Initial commit - production deployment

This commit is contained in:
2026-01-21 17:17:16 +01:00
commit c23d00dd92
2289 changed files with 638440 additions and 0 deletions

1
services/external/app/api/__init__.py vendored Normal file
View File

@@ -0,0 +1 @@
# services/external/app/api/__init__.py

237
services/external/app/api/audit.py vendored Normal file
View File

@@ -0,0 +1,237 @@
# services/external/app/api/audit.py
"""
Audit Logs API - Retrieve audit trail for external service
"""
from fastapi import APIRouter, Depends, HTTPException, Query, Path, status
from typing import Optional, Dict, Any
from uuid import UUID
from datetime import datetime
import structlog
from sqlalchemy import select, func, and_
from sqlalchemy.ext.asyncio import AsyncSession
from app.models import AuditLog
from shared.auth.decorators import get_current_user_dep
from shared.auth.access_control import require_user_role
from shared.routing import RouteBuilder
from shared.models.audit_log_schemas import (
AuditLogResponse,
AuditLogListResponse,
AuditLogStatsResponse
)
from app.core.database import database_manager
route_builder = RouteBuilder('external')
router = APIRouter(tags=["audit-logs"])
logger = structlog.get_logger()
async def get_db():
"""Database session dependency"""
async with database_manager.get_session() as session:
yield session
@router.get(
route_builder.build_base_route("audit-logs"),
response_model=AuditLogListResponse
)
@require_user_role(['admin', 'owner'])
async def get_audit_logs(
tenant_id: UUID = Path(..., description="Tenant ID"),
start_date: Optional[datetime] = Query(None, description="Filter logs from this date"),
end_date: Optional[datetime] = Query(None, description="Filter logs until this date"),
user_id: Optional[UUID] = Query(None, description="Filter by user ID"),
action: Optional[str] = Query(None, description="Filter by action type"),
resource_type: Optional[str] = Query(None, description="Filter by resource type"),
severity: Optional[str] = Query(None, description="Filter by severity level"),
search: Optional[str] = Query(None, description="Search in description field"),
limit: int = Query(100, ge=1, le=1000, description="Number of records to return"),
offset: int = Query(0, ge=0, description="Number of records to skip"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Get audit logs for external service.
Requires admin or owner role.
"""
try:
logger.info(
"Retrieving audit logs",
tenant_id=tenant_id,
user_id=current_user.get("user_id"),
filters={
"start_date": start_date,
"end_date": end_date,
"action": action,
"resource_type": resource_type,
"severity": severity
}
)
# Build query filters
filters = [AuditLog.tenant_id == tenant_id]
if start_date:
filters.append(AuditLog.created_at >= start_date)
if end_date:
filters.append(AuditLog.created_at <= end_date)
if user_id:
filters.append(AuditLog.user_id == user_id)
if action:
filters.append(AuditLog.action == action)
if resource_type:
filters.append(AuditLog.resource_type == resource_type)
if severity:
filters.append(AuditLog.severity == severity)
if search:
filters.append(AuditLog.description.ilike(f"%{search}%"))
# Count total matching records
count_query = select(func.count()).select_from(AuditLog).where(and_(*filters))
total_result = await db.execute(count_query)
total = total_result.scalar() or 0
# Fetch paginated results
query = (
select(AuditLog)
.where(and_(*filters))
.order_by(AuditLog.created_at.desc())
.limit(limit)
.offset(offset)
)
result = await db.execute(query)
audit_logs = result.scalars().all()
# Convert to response models
items = [AuditLogResponse.from_orm(log) for log in audit_logs]
logger.info(
"Successfully retrieved audit logs",
tenant_id=tenant_id,
total=total,
returned=len(items)
)
return AuditLogListResponse(
items=items,
total=total,
limit=limit,
offset=offset,
has_more=(offset + len(items)) < total
)
except Exception as e:
logger.error(
"Failed to retrieve audit logs",
error=str(e),
tenant_id=tenant_id
)
raise HTTPException(
status_code=500,
detail=f"Failed to retrieve audit logs: {str(e)}"
)
@router.get(
route_builder.build_base_route("audit-logs/stats"),
response_model=AuditLogStatsResponse
)
@require_user_role(['admin', 'owner'])
async def get_audit_log_stats(
tenant_id: UUID = Path(..., description="Tenant ID"),
start_date: Optional[datetime] = Query(None, description="Filter logs from this date"),
end_date: Optional[datetime] = Query(None, description="Filter logs until this date"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Get audit log statistics for external service.
Requires admin or owner role.
"""
try:
logger.info(
"Retrieving audit log statistics",
tenant_id=tenant_id,
user_id=current_user.get("user_id")
)
# Build base filters
filters = [AuditLog.tenant_id == tenant_id]
if start_date:
filters.append(AuditLog.created_at >= start_date)
if end_date:
filters.append(AuditLog.created_at <= end_date)
# Total events
count_query = select(func.count()).select_from(AuditLog).where(and_(*filters))
total_result = await db.execute(count_query)
total_events = total_result.scalar() or 0
# Events by action
action_query = (
select(AuditLog.action, func.count().label('count'))
.where(and_(*filters))
.group_by(AuditLog.action)
)
action_result = await db.execute(action_query)
events_by_action = {row.action: row.count for row in action_result}
# Events by severity
severity_query = (
select(AuditLog.severity, func.count().label('count'))
.where(and_(*filters))
.group_by(AuditLog.severity)
)
severity_result = await db.execute(severity_query)
events_by_severity = {row.severity: row.count for row in severity_result}
# Events by resource type
resource_query = (
select(AuditLog.resource_type, func.count().label('count'))
.where(and_(*filters))
.group_by(AuditLog.resource_type)
)
resource_result = await db.execute(resource_query)
events_by_resource_type = {row.resource_type: row.count for row in resource_result}
# Date range
date_range_query = (
select(
func.min(AuditLog.created_at).label('min_date'),
func.max(AuditLog.created_at).label('max_date')
)
.where(and_(*filters))
)
date_result = await db.execute(date_range_query)
date_row = date_result.one()
logger.info(
"Successfully retrieved audit log statistics",
tenant_id=tenant_id,
total_events=total_events
)
return AuditLogStatsResponse(
total_events=total_events,
events_by_action=events_by_action,
events_by_severity=events_by_severity,
events_by_resource_type=events_by_resource_type,
date_range={
"min": date_row.min_date,
"max": date_row.max_date
}
)
except Exception as e:
logger.error(
"Failed to retrieve audit log statistics",
error=str(e),
tenant_id=tenant_id
)
raise HTTPException(
status_code=500,
detail=f"Failed to retrieve audit log statistics: {str(e)}"
)

View File

@@ -0,0 +1,488 @@
# services/external/app/api/calendar_operations.py
"""
Calendar Operations API - School calendars and tenant location context endpoints
"""
from fastapi import APIRouter, Depends, HTTPException, Query, Path, Body
from typing import List, Optional
from uuid import UUID
import structlog
from app.schemas.calendar import (
SchoolCalendarResponse,
SchoolCalendarListResponse,
TenantLocationContextResponse,
TenantLocationContextCreateRequest,
CalendarCheckResponse
)
from app.registry.calendar_registry import CalendarRegistry, SchoolType
from app.repositories.calendar_repository import CalendarRepository
from app.cache.redis_wrapper import ExternalDataCache
from shared.routing.route_builder import RouteBuilder
from shared.auth.decorators import get_current_user_dep
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.database import get_db
from datetime import datetime, date
route_builder = RouteBuilder('external')
router = APIRouter(tags=["calendar-operations"])
logger = structlog.get_logger()
# Initialize cache
cache = ExternalDataCache()
# ===== School Calendar Endpoints =====
@router.get(
route_builder.build_operations_route("cities/{city_id}/school-calendars"),
response_model=SchoolCalendarListResponse
)
async def list_school_calendars_for_city(
city_id: str = Path(..., description="City ID (e.g., 'madrid')"),
school_type: Optional[str] = Query(None, description="Filter by school type"),
academic_year: Optional[str] = Query(None, description="Filter by academic year"),
db: AsyncSession = Depends(get_db)
):
"""List all available school calendars for a city"""
try:
repo = CalendarRepository(db)
calendars = await repo.get_calendars_by_city(city_id, enabled_only=True)
# Apply filters if provided
if school_type:
calendars = [c for c in calendars if c.school_type == school_type]
if academic_year:
calendars = [c for c in calendars if c.academic_year == academic_year]
calendar_responses = [
SchoolCalendarResponse(
calendar_id=str(c.id),
calendar_name=c.calendar_name,
city_id=c.city_id,
school_type=c.school_type,
academic_year=c.academic_year,
holiday_periods=c.holiday_periods,
school_hours=c.school_hours,
source=c.source,
enabled=c.enabled
)
for c in calendars
]
return SchoolCalendarListResponse(
city_id=city_id,
calendars=calendar_responses,
total=len(calendar_responses)
)
except Exception as e:
logger.error(
"Error listing school calendars",
city_id=city_id,
error=str(e)
)
raise HTTPException(
status_code=500,
detail=f"Error retrieving school calendars: {str(e)}"
)
@router.get(
route_builder.build_operations_route("school-calendars/{calendar_id}"),
response_model=SchoolCalendarResponse
)
async def get_school_calendar(
calendar_id: UUID = Path(..., description="School calendar ID"),
db: AsyncSession = Depends(get_db)
):
"""Get detailed information about a specific school calendar (cached)"""
try:
calendar_id_str = str(calendar_id)
# Check cache first
cached = await cache.get_cached_calendar(calendar_id_str)
if cached:
logger.debug("Returning cached calendar", calendar_id=calendar_id_str)
return SchoolCalendarResponse(**cached)
# Cache miss - fetch from database
repo = CalendarRepository(db)
calendar = await repo.get_calendar_by_id(calendar_id)
if not calendar:
raise HTTPException(status_code=404, detail="School calendar not found")
response_data = {
"calendar_id": str(calendar.id),
"calendar_name": calendar.calendar_name,
"city_id": calendar.city_id,
"school_type": calendar.school_type,
"academic_year": calendar.academic_year,
"holiday_periods": calendar.holiday_periods,
"school_hours": calendar.school_hours,
"source": calendar.source,
"enabled": calendar.enabled
}
# Cache the result
await cache.set_cached_calendar(calendar_id_str, response_data)
return SchoolCalendarResponse(**response_data)
except HTTPException:
raise
except Exception as e:
logger.error(
"Error retrieving school calendar",
calendar_id=str(calendar_id),
error=str(e)
)
raise HTTPException(
status_code=500,
detail=f"Error retrieving school calendar: {str(e)}"
)
@router.get(
route_builder.build_operations_route("school-calendars/{calendar_id}/is-holiday"),
response_model=CalendarCheckResponse
)
async def check_is_school_holiday(
calendar_id: UUID = Path(..., description="School calendar ID"),
check_date: str = Query(..., description="Date to check (ISO format: YYYY-MM-DD)"),
db: AsyncSession = Depends(get_db)
):
"""Check if a specific date is a school holiday"""
try:
repo = CalendarRepository(db)
calendar = await repo.get_calendar_by_id(calendar_id)
if not calendar:
raise HTTPException(status_code=404, detail="School calendar not found")
# Parse the date
try:
date_obj = datetime.strptime(check_date, "%Y-%m-%d").date()
except ValueError:
raise HTTPException(
status_code=400,
detail="Invalid date format. Use YYYY-MM-DD"
)
# Check if date falls within any holiday period
is_holiday = False
holiday_name = None
for period in calendar.holiday_periods:
start = datetime.strptime(period["start_date"], "%Y-%m-%d").date()
end = datetime.strptime(period["end_date"], "%Y-%m-%d").date()
if start <= date_obj <= end:
is_holiday = True
holiday_name = period["name"]
break
return CalendarCheckResponse(
date=check_date,
is_holiday=is_holiday,
holiday_name=holiday_name,
calendar_id=str(calendar_id),
calendar_name=calendar.calendar_name
)
except HTTPException:
raise
except Exception as e:
logger.error(
"Error checking holiday status",
calendar_id=str(calendar_id),
date=check_date,
error=str(e)
)
raise HTTPException(
status_code=500,
detail=f"Error checking holiday status: {str(e)}"
)
# ===== Tenant Location Context Endpoints =====
@router.get(
route_builder.build_base_route("location-context"),
response_model=TenantLocationContextResponse
)
async def get_tenant_location_context(
tenant_id: str = Path(..., description="Tenant ID"),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Get location context for a tenant including school calendar assignment (cached)"""
try:
# Check cache first
cached = await cache.get_cached_tenant_context(tenant_id)
if cached:
logger.debug("Returning cached tenant context", tenant_id=tenant_id)
return TenantLocationContextResponse(**cached)
# Cache miss - fetch from database
repo = CalendarRepository(db)
context = await repo.get_tenant_with_calendar(tenant_id)
if not context:
raise HTTPException(
status_code=404,
detail="Location context not found for this tenant"
)
# Cache the result
await cache.set_cached_tenant_context(tenant_id_str, context)
return TenantLocationContextResponse(**context)
except HTTPException:
raise
except Exception as e:
logger.error(
"Error retrieving tenant location context",
tenant_id=str(tenant_id),
error=str(e)
)
raise HTTPException(
status_code=500,
detail=f"Error retrieving location context: {str(e)}"
)
@router.post(
route_builder.build_base_route("location-context"),
response_model=TenantLocationContextResponse
)
async def create_or_update_tenant_location_context(
request: TenantLocationContextCreateRequest,
tenant_id: str = Path(..., description="Tenant ID"),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Create or update tenant location context"""
try:
# Convert to UUID for use with repository
tenant_uuid = UUID(tenant_id)
repo = CalendarRepository(db)
# Validate calendar_id if provided
if request.school_calendar_id:
calendar = await repo.get_calendar_by_id(request.school_calendar_id)
if not calendar:
raise HTTPException(
status_code=400,
detail="Invalid school_calendar_id"
)
# Create or update context
context_obj = await repo.create_or_update_tenant_location_context(
tenant_id=tenant_uuid,
city_id=request.city_id,
school_calendar_id=request.school_calendar_id,
neighborhood=request.neighborhood,
local_events=request.local_events,
notes=request.notes
)
# Invalidate cache since context was updated
await cache.invalidate_tenant_context(tenant_id)
# Get full context with calendar details
context = await repo.get_tenant_with_calendar(tenant_uuid)
# Cache the new context
await cache.set_cached_tenant_context(tenant_id, context)
return TenantLocationContextResponse(**context)
except HTTPException:
raise
except Exception as e:
logger.error(
"Error creating/updating tenant location context",
tenant_id=str(tenant_id),
error=str(e)
)
raise HTTPException(
status_code=500,
detail=f"Error creating/updating location context: {str(e)}"
)
@router.delete(
route_builder.build_base_route("location-context"),
status_code=204
)
async def delete_tenant_location_context(
tenant_id: str = Path(..., description="Tenant ID"),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Delete tenant location context"""
try:
# Convert to UUID for use with repository
tenant_uuid = UUID(tenant_id)
repo = CalendarRepository(db)
deleted = await repo.delete_tenant_location_context(tenant_uuid)
if not deleted:
raise HTTPException(
status_code=404,
detail="Location context not found"
)
return None
except HTTPException:
raise
except Exception as e:
logger.error(
"Error deleting tenant location context",
tenant_id=str(tenant_id),
error=str(e)
)
raise HTTPException(
status_code=500,
detail=f"Error deleting location context: {str(e)}"
)
# ===== Calendar Suggestion Endpoint =====
@router.post(
route_builder.build_base_route("location-context/suggest-calendar")
)
async def suggest_calendar_for_tenant(
tenant_id: str = Path(..., description="Tenant ID"),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Suggest an appropriate school calendar for a tenant based on location and POI data.
This endpoint analyzes:
- Tenant's city location
- Detected schools nearby (from POI detection)
- Available calendars for the city
- Bakery-specific heuristics (primary schools = stronger morning rush)
Returns a suggestion with confidence score and reasoning.
Does NOT automatically assign - requires admin approval.
"""
try:
from app.utils.calendar_suggester import CalendarSuggester
from app.repositories.poi_context_repository import POIContextRepository
tenant_uuid = UUID(tenant_id)
# Get tenant's location context
calendar_repo = CalendarRepository(db)
location_context = await calendar_repo.get_tenant_location_context(tenant_uuid)
if not location_context:
raise HTTPException(
status_code=404,
detail="Location context not found. Create location context first."
)
city_id = location_context.city_id
# Get available calendars for city
calendars_result = await calendar_repo.get_calendars_by_city(city_id, enabled_only=True)
calendars = calendars_result.get("calendars", []) if calendars_result else []
# Get POI context if available
poi_repo = POIContextRepository(db)
poi_context = await poi_repo.get_by_tenant_id(tenant_uuid)
poi_data = poi_context.to_dict() if poi_context else None
# Generate suggestion
suggester = CalendarSuggester()
suggestion = suggester.suggest_calendar_for_tenant(
city_id=city_id,
available_calendars=calendars,
poi_context=poi_data,
tenant_data=None # Could include tenant info if needed
)
# Format for admin display
admin_message = suggester.format_suggestion_for_admin(suggestion)
logger.info(
"Calendar suggestion generated",
tenant_id=tenant_id,
city_id=city_id,
suggested_calendar=suggestion.get("suggested_calendar_id"),
confidence=suggestion.get("confidence")
)
return {
**suggestion,
"admin_message": admin_message,
"tenant_id": tenant_id,
"current_calendar_id": str(location_context.school_calendar_id) if location_context.school_calendar_id else None
}
except HTTPException:
raise
except Exception as e:
logger.error(
"Error generating calendar suggestion",
tenant_id=tenant_id,
error=str(e),
exc_info=True
)
raise HTTPException(
status_code=500,
detail=f"Error generating calendar suggestion: {str(e)}"
)
# ===== Helper Endpoints =====
@router.get(
route_builder.build_operations_route("calendars/registry"),
response_model=List[SchoolCalendarResponse]
)
async def list_registry_calendars():
"""List all calendars from the CalendarRegistry (static configuration)"""
calendars = CalendarRegistry.get_enabled_calendars()
return [
SchoolCalendarResponse(
calendar_id=cal.calendar_id,
calendar_name=cal.calendar_name,
city_id=cal.city_id,
school_type=cal.school_type.value,
academic_year=cal.academic_year,
holiday_periods=[
{
"name": hp.name,
"start_date": hp.start_date,
"end_date": hp.end_date,
"description": hp.description
}
for hp in cal.holiday_periods
],
school_hours={
"morning_start": cal.school_hours.morning_start,
"morning_end": cal.school_hours.morning_end,
"has_afternoon_session": cal.school_hours.has_afternoon_session,
"afternoon_start": cal.school_hours.afternoon_start,
"afternoon_end": cal.school_hours.afternoon_end
},
source=cal.source,
enabled=cal.enabled
)
for cal in calendars
]

View File

@@ -0,0 +1,510 @@
# services/external/app/api/city_operations.py
"""
City Operations API - New endpoints for city-based data access
"""
from fastapi import APIRouter, Depends, HTTPException, Query, Path
from typing import List
from datetime import datetime
from uuid import UUID
import structlog
from app.schemas.city_data import CityInfoResponse, DataAvailabilityResponse
from app.schemas.weather import WeatherDataResponse, WeatherForecastResponse, WeatherForecastAPIResponse
from app.schemas.traffic import TrafficDataResponse
from app.registry.city_registry import CityRegistry
from app.registry.geolocation_mapper import GeolocationMapper
from app.repositories.city_data_repository import CityDataRepository
from app.cache.redis_wrapper import ExternalDataCache
from app.services.weather_service import WeatherService
from app.services.traffic_service import TrafficService
from app.services.tenant_deletion_service import ExternalTenantDeletionService
from shared.routing.route_builder import RouteBuilder
from shared.auth.decorators import get_current_user_dep
from shared.auth.access_control import service_only_access
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.database import get_db
route_builder = RouteBuilder('external')
router = APIRouter(tags=["city-operations"])
logger = structlog.get_logger()
@router.get(
route_builder.build_base_route("cities"),
response_model=List[CityInfoResponse]
)
async def list_supported_cities():
"""List all enabled cities with data availability"""
registry = CityRegistry()
cities = registry.get_enabled_cities()
return [
CityInfoResponse(
city_id=city.city_id,
name=city.name,
country=city.country.value,
latitude=city.latitude,
longitude=city.longitude,
radius_km=city.radius_km,
weather_provider=city.weather_provider.value,
traffic_provider=city.traffic_provider.value,
enabled=city.enabled
)
for city in cities
]
@router.get(
route_builder.build_operations_route("cities/{city_id}/availability"),
response_model=DataAvailabilityResponse
)
async def get_city_data_availability(
city_id: str = Path(..., description="City ID"),
db: AsyncSession = Depends(get_db)
):
"""Get data availability for a specific city"""
registry = CityRegistry()
city = registry.get_city(city_id)
if not city:
raise HTTPException(status_code=404, detail="City not found")
from sqlalchemy import text
weather_stmt = text(
"SELECT MIN(date), MAX(date), COUNT(*) FROM city_weather_data WHERE city_id = :city_id"
)
weather_result = await db.execute(weather_stmt, {"city_id": city_id})
weather_row = weather_result.fetchone()
weather_min, weather_max, weather_count = weather_row if weather_row else (None, None, 0)
traffic_stmt = text(
"SELECT MIN(date), MAX(date), COUNT(*) FROM city_traffic_data WHERE city_id = :city_id"
)
traffic_result = await db.execute(traffic_stmt, {"city_id": city_id})
traffic_row = traffic_result.fetchone()
traffic_min, traffic_max, traffic_count = traffic_row if traffic_row else (None, None, 0)
return DataAvailabilityResponse(
city_id=city_id,
city_name=city.name,
weather_available=weather_count > 0,
weather_start_date=weather_min.isoformat() if weather_min else None,
weather_end_date=weather_max.isoformat() if weather_max else None,
weather_record_count=weather_count or 0,
traffic_available=traffic_count > 0,
traffic_start_date=traffic_min.isoformat() if traffic_min else None,
traffic_end_date=traffic_max.isoformat() if traffic_max else None,
traffic_record_count=traffic_count or 0
)
@router.get(
route_builder.build_operations_route("historical-weather-optimized"),
response_model=List[WeatherDataResponse]
)
async def get_historical_weather_optimized(
tenant_id: UUID = Path(..., description="Tenant ID"),
latitude: float = Query(..., description="Latitude"),
longitude: float = Query(..., description="Longitude"),
start_date: datetime = Query(..., description="Start date"),
end_date: datetime = Query(..., description="End date"),
db: AsyncSession = Depends(get_db)
):
"""
Get historical weather data using city-based cached data
This is the FAST endpoint for training service
"""
try:
mapper = GeolocationMapper()
mapping = mapper.map_tenant_to_city(latitude, longitude)
if not mapping:
raise HTTPException(
status_code=404,
detail="No supported city found for this location"
)
city, distance = mapping
logger.info(
"Fetching historical weather from cache",
tenant_id=tenant_id,
city=city.name,
distance_km=round(distance, 2)
)
cache = ExternalDataCache()
cached_data = await cache.get_cached_weather(
city.city_id, start_date, end_date
)
if cached_data:
logger.info("Weather cache hit", records=len(cached_data))
return cached_data
repo = CityDataRepository(db)
db_records = await repo.get_weather_by_city_and_range(
city.city_id, start_date, end_date
)
response_data = [
WeatherDataResponse(
id=str(record.id),
location_id=f"{city.city_id}_{record.date.date()}",
date=record.date,
temperature=record.temperature,
precipitation=record.precipitation,
humidity=record.humidity,
wind_speed=record.wind_speed,
pressure=record.pressure,
description=record.description,
source=record.source,
raw_data=None,
created_at=record.created_at,
updated_at=record.updated_at
)
for record in db_records
]
await cache.set_cached_weather(
city.city_id, start_date, end_date, response_data
)
logger.info(
"Historical weather data retrieved",
records=len(response_data),
source="database"
)
return response_data
except HTTPException:
raise
except Exception as e:
logger.error("Error fetching historical weather", error=str(e))
raise HTTPException(status_code=500, detail="Internal server error")
@router.get(
route_builder.build_operations_route("historical-traffic-optimized"),
response_model=List[TrafficDataResponse]
)
async def get_historical_traffic_optimized(
tenant_id: UUID = Path(..., description="Tenant ID"),
latitude: float = Query(..., description="Latitude"),
longitude: float = Query(..., description="Longitude"),
start_date: datetime = Query(..., description="Start date"),
end_date: datetime = Query(..., description="End date"),
db: AsyncSession = Depends(get_db)
):
"""
Get historical traffic data using city-based cached data
This is the FAST endpoint for training service
"""
try:
mapper = GeolocationMapper()
mapping = mapper.map_tenant_to_city(latitude, longitude)
if not mapping:
raise HTTPException(
status_code=404,
detail="No supported city found for this location"
)
city, distance = mapping
logger.info(
"Fetching historical traffic from cache",
tenant_id=tenant_id,
city=city.name,
distance_km=round(distance, 2)
)
cache = ExternalDataCache()
cached_data = await cache.get_cached_traffic(
city.city_id, start_date, end_date
)
if cached_data:
logger.info("Traffic cache hit", records=len(cached_data))
return cached_data
logger.debug("Starting DB query for traffic", city_id=city.city_id)
repo = CityDataRepository(db)
db_records = await repo.get_traffic_by_city_and_range(
city.city_id, start_date, end_date
)
logger.debug("DB query completed", records=len(db_records))
logger.debug("Creating response objects")
response_data = [
TrafficDataResponse(
date=record.date,
traffic_volume=record.traffic_volume,
pedestrian_count=record.pedestrian_count,
congestion_level=record.congestion_level,
average_speed=record.average_speed,
source=record.source
)
for record in db_records
]
logger.debug("Response objects created", count=len(response_data))
logger.debug("Caching traffic data")
await cache.set_cached_traffic(
city.city_id, start_date, end_date, response_data
)
logger.debug("Caching completed")
logger.info(
"Historical traffic data retrieved",
records=len(response_data),
source="database"
)
return response_data
except HTTPException:
raise
except Exception as e:
logger.error("Error fetching historical traffic", error=str(e))
raise HTTPException(status_code=500, detail="Internal server error")
# ================================================================
# REAL-TIME & FORECAST ENDPOINTS
# ================================================================
@router.get(
route_builder.build_operations_route("weather/current"),
response_model=WeatherDataResponse
)
async def get_current_weather(
tenant_id: UUID = Path(..., description="Tenant ID"),
latitude: float = Query(..., description="Latitude"),
longitude: float = Query(..., description="Longitude")
):
"""
Get current weather for a location (real-time data from AEMET)
"""
try:
weather_service = WeatherService()
weather_data = await weather_service.get_current_weather(latitude, longitude)
if not weather_data:
raise HTTPException(
status_code=404,
detail="No weather data available for this location"
)
logger.info(
"Current weather retrieved",
tenant_id=tenant_id,
latitude=latitude,
longitude=longitude
)
return weather_data
except HTTPException:
raise
except Exception as e:
logger.error("Error fetching current weather", error=str(e))
raise HTTPException(status_code=500, detail="Internal server error")
@router.get(
route_builder.build_operations_route("weather/forecast")
)
async def get_weather_forecast(
tenant_id: UUID = Path(..., description="Tenant ID"),
latitude: float = Query(..., description="Latitude"),
longitude: float = Query(..., description="Longitude"),
days: int = Query(7, ge=1, le=14, description="Number of days to forecast")
):
"""
Get weather forecast for a location (from AEMET)
Returns list of forecast objects with: forecast_date, generated_at, temperature, precipitation, humidity, wind_speed, description, source
"""
try:
weather_service = WeatherService()
forecast_data = await weather_service.get_weather_forecast(latitude, longitude, days)
if not forecast_data:
raise HTTPException(
status_code=404,
detail="No forecast data available for this location"
)
logger.info(
"Weather forecast retrieved",
tenant_id=tenant_id,
latitude=latitude,
longitude=longitude,
days=days,
count=len(forecast_data)
)
return forecast_data
except HTTPException:
raise
except Exception as e:
logger.error("Error fetching weather forecast", error=str(e))
raise HTTPException(status_code=500, detail="Internal server error")
@router.get(
route_builder.build_operations_route("traffic/current"),
response_model=TrafficDataResponse
)
async def get_current_traffic(
tenant_id: UUID = Path(..., description="Tenant ID"),
latitude: float = Query(..., description="Latitude"),
longitude: float = Query(..., description="Longitude")
):
"""
Get current traffic conditions for a location (real-time data from Madrid OpenData)
"""
try:
traffic_service = TrafficService()
traffic_data = await traffic_service.get_current_traffic(latitude, longitude)
if not traffic_data:
raise HTTPException(
status_code=404,
detail="No traffic data available for this location"
)
logger.info(
"Current traffic retrieved",
tenant_id=tenant_id,
latitude=latitude,
longitude=longitude
)
return traffic_data
except HTTPException:
raise
except Exception as e:
logger.error("Error fetching current traffic", error=str(e))
raise HTTPException(status_code=500, detail="Internal server error")
# ============================================================================
# Tenant Data Deletion Operations (Internal Service Only)
# ============================================================================
@router.delete(
route_builder.build_base_route("tenant/{tenant_id}", include_tenant_prefix=False),
response_model=dict
)
@service_only_access
async def delete_tenant_data(
tenant_id: str = Path(..., description="Tenant ID to delete data for"),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Delete tenant-specific external data (Internal service only)
IMPORTANT NOTE:
The External service primarily stores SHARED city-wide data that is used
by ALL tenants. This endpoint only deletes tenant-specific data:
- Tenant-specific audit logs
- Tenant-specific weather data (if any)
City-wide data (CityWeatherData, CityTrafficData, TrafficData, etc.)
is intentionally PRESERVED as it's shared across all tenants.
**WARNING**: This operation is irreversible!
Returns:
Deletion summary with counts of deleted records and note about preserved data
"""
try:
logger.info("external.tenant_deletion.api_called", tenant_id=tenant_id)
deletion_service = ExternalTenantDeletionService(db)
result = await deletion_service.safe_delete_tenant_data(tenant_id)
if not result.success:
raise HTTPException(
status_code=500,
detail=f"Tenant data deletion failed: {', '.join(result.errors)}"
)
return {
"message": "Tenant-specific data deletion completed successfully",
"note": "City-wide shared data (weather, traffic) has been preserved",
"summary": result.to_dict()
}
except HTTPException:
raise
except Exception as e:
logger.error("external.tenant_deletion.api_error",
tenant_id=tenant_id,
error=str(e),
exc_info=True)
raise HTTPException(
status_code=500,
detail=f"Failed to delete tenant data: {str(e)}"
)
@router.get(
route_builder.build_base_route("tenant/{tenant_id}/deletion-preview", include_tenant_prefix=False),
response_model=dict
)
@service_only_access
async def preview_tenant_data_deletion(
tenant_id: str = Path(..., description="Tenant ID to preview deletion for"),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Preview what tenant-specific data would be deleted (dry-run)
This shows counts of tenant-specific data only. City-wide shared data
(CityWeatherData, CityTrafficData, TrafficData, etc.) will NOT be deleted.
Returns:
Dictionary with entity names and their counts
"""
try:
logger.info("external.tenant_deletion.preview_called", tenant_id=tenant_id)
deletion_service = ExternalTenantDeletionService(db)
preview = await deletion_service.get_tenant_data_preview(tenant_id)
total_records = sum(v for k, v in preview.items() if not k.startswith("_"))
return {
"tenant_id": tenant_id,
"service": "external",
"preview": preview,
"total_records": total_records,
"note": "City-wide data (weather, traffic) is shared and will NOT be deleted",
"preserved_data": [
"CityWeatherData (city-wide)",
"CityTrafficData (city-wide)",
"TrafficData (city-wide)",
"TrafficMeasurementPoint (reference data)",
"WeatherForecast (city-wide)"
],
"warning": "Only tenant-specific records will be permanently deleted"
}
except Exception as e:
logger.error("external.tenant_deletion.preview_error",
tenant_id=tenant_id,
error=str(e),
exc_info=True)
raise HTTPException(
status_code=500,
detail=f"Failed to preview tenant data deletion: {str(e)}"
)

302
services/external/app/api/geocoding.py vendored Normal file
View File

@@ -0,0 +1,302 @@
"""
Geocoding API Endpoints
Provides address search, autocomplete, and geocoding via Nominatim.
"""
from fastapi import APIRouter, Query, HTTPException
from typing import List, Optional
from pydantic import BaseModel, Field
import structlog
from app.services.nominatim_service import NominatimService
logger = structlog.get_logger()
router = APIRouter(prefix="/api/v1/geocoding", tags=["Geocoding"])
# Initialize Nominatim service
# In production, override with environment variable for self-hosted instance
nominatim_service = NominatimService()
# Response Models
class AddressResult(BaseModel):
"""Address search result"""
display_name: str = Field(..., description="Full formatted address")
lat: float = Field(..., description="Latitude")
lon: float = Field(..., description="Longitude")
osm_type: str = Field(..., description="OSM object type")
osm_id: int = Field(..., description="OSM object ID")
place_id: int = Field(..., description="Nominatim place ID")
type: str = Field(..., description="Place type")
class_: str = Field(..., alias="class", description="OSM class")
address: dict = Field(..., description="Parsed address components")
boundingbox: List[str] = Field(..., description="Bounding box coordinates")
class GeocodeResult(BaseModel):
"""Geocoding result"""
display_name: str = Field(..., description="Full formatted address")
lat: float = Field(..., description="Latitude")
lon: float = Field(..., description="Longitude")
address: dict = Field(..., description="Parsed address components")
class CoordinateValidation(BaseModel):
"""Coordinate validation result"""
valid: bool = Field(..., description="Whether coordinates are valid")
address: Optional[str] = Field(None, description="Address at coordinates if valid")
# Endpoints
@router.get(
"/search",
response_model=List[AddressResult],
summary="Search for addresses",
description="Search for addresses matching query (autocomplete). Minimum 3 characters required."
)
async def search_addresses(
q: str = Query(..., min_length=3, description="Search query (minimum 3 characters)"),
country_code: str = Query("es", description="ISO country code to restrict search"),
limit: int = Query(10, ge=1, le=50, description="Maximum number of results")
):
"""
Search for addresses matching the query.
This endpoint provides autocomplete functionality for address input.
Results are restricted to the specified country and sorted by relevance.
Example:
GET /api/v1/geocoding/search?q=Gran%20Via%20Madrid&limit=5
"""
try:
results = await nominatim_service.search_address(
query=q,
country_code=country_code,
limit=limit
)
logger.info(
"Address search request",
query=q,
country=country_code,
result_count=len(results)
)
return results
except Exception as e:
logger.error(
"Address search failed",
query=q,
error=str(e),
exc_info=True
)
raise HTTPException(
status_code=500,
detail=f"Address search failed: {str(e)}"
)
@router.get(
"/geocode",
response_model=GeocodeResult,
summary="Geocode an address",
description="Convert an address string to coordinates (lat/lon)"
)
async def geocode_address(
address: str = Query(..., min_length=5, description="Full address to geocode"),
country_code: str = Query("es", description="ISO country code")
):
"""
Geocode an address to get coordinates.
Returns the best matching location for the given address.
Example:
GET /api/v1/geocoding/geocode?address=Gran%20Via%2028,%20Madrid
"""
try:
result = await nominatim_service.geocode_address(
address=address,
country_code=country_code
)
if not result:
raise HTTPException(
status_code=404,
detail=f"Address not found: {address}"
)
logger.info(
"Geocoding request",
address=address,
lat=result["lat"],
lon=result["lon"]
)
return result
except HTTPException:
raise
except Exception as e:
logger.error(
"Geocoding failed",
address=address,
error=str(e),
exc_info=True
)
raise HTTPException(
status_code=500,
detail=f"Geocoding failed: {str(e)}"
)
@router.get(
"/reverse",
response_model=GeocodeResult,
summary="Reverse geocode coordinates",
description="Convert coordinates (lat/lon) to an address"
)
async def reverse_geocode(
lat: float = Query(..., ge=-90, le=90, description="Latitude"),
lon: float = Query(..., ge=-180, le=180, description="Longitude")
):
"""
Reverse geocode coordinates to get address.
Returns the address at the specified coordinates.
Example:
GET /api/v1/geocoding/reverse?lat=40.4168&lon=-3.7038
"""
try:
result = await nominatim_service.reverse_geocode(
latitude=lat,
longitude=lon
)
if not result:
raise HTTPException(
status_code=404,
detail=f"No address found at coordinates: {lat}, {lon}"
)
logger.info(
"Reverse geocoding request",
lat=lat,
lon=lon,
address=result["display_name"]
)
return result
except HTTPException:
raise
except Exception as e:
logger.error(
"Reverse geocoding failed",
lat=lat,
lon=lon,
error=str(e),
exc_info=True
)
raise HTTPException(
status_code=500,
detail=f"Reverse geocoding failed: {str(e)}"
)
@router.get(
"/validate",
response_model=CoordinateValidation,
summary="Validate coordinates",
description="Check if coordinates point to a valid location"
)
async def validate_coordinates(
lat: float = Query(..., ge=-90, le=90, description="Latitude"),
lon: float = Query(..., ge=-180, le=180, description="Longitude")
):
"""
Validate that coordinates point to a real location.
Returns validation result with address if valid.
Example:
GET /api/v1/geocoding/validate?lat=40.4168&lon=-3.7038
"""
try:
is_valid = await nominatim_service.validate_coordinates(
latitude=lat,
longitude=lon
)
result = {"valid": is_valid, "address": None}
if is_valid:
geocode_result = await nominatim_service.reverse_geocode(lat, lon)
if geocode_result:
result["address"] = geocode_result["display_name"]
logger.info(
"Coordinate validation request",
lat=lat,
lon=lon,
valid=is_valid
)
return result
except Exception as e:
logger.error(
"Coordinate validation failed",
lat=lat,
lon=lon,
error=str(e),
exc_info=True
)
raise HTTPException(
status_code=500,
detail=f"Coordinate validation failed: {str(e)}"
)
@router.get(
"/health",
summary="Check geocoding service health",
description="Check if Nominatim service is accessible"
)
async def health_check():
"""
Check if Nominatim service is accessible.
Returns service health status.
"""
try:
is_healthy = await nominatim_service.health_check()
if not is_healthy:
raise HTTPException(
status_code=503,
detail="Nominatim service is unavailable"
)
return {
"status": "healthy",
"service": "nominatim",
"base_url": nominatim_service.base_url,
"is_public_api": nominatim_service.is_public_api
}
except HTTPException:
raise
except Exception as e:
logger.error(
"Health check failed",
error=str(e),
exc_info=True
)
raise HTTPException(
status_code=503,
detail=f"Health check failed: {str(e)}"
)

532
services/external/app/api/poi_context.py vendored Normal file
View File

@@ -0,0 +1,532 @@
"""
POI Context API Endpoints
REST API for POI detection, retrieval, and management.
"""
from fastapi import APIRouter, Depends, HTTPException, Query
from sqlalchemy.ext.asyncio import AsyncSession
from typing import Optional
import structlog
import uuid
from app.core.database import get_db
from app.services.poi_detection_service import POIDetectionService
from app.services.poi_feature_selector import POIFeatureSelector
from app.services.competitor_analyzer import CompetitorAnalyzer
from app.services.poi_refresh_service import POIRefreshService
from app.repositories.poi_context_repository import POIContextRepository
from app.cache.poi_cache_service import POICacheService
from app.core.redis_client import get_redis_client
from shared.routing.route_builder import RouteBuilder
logger = structlog.get_logger()
route_builder = RouteBuilder('external')
router = APIRouter(tags=["POI Context"])
@router.post(
route_builder.build_base_route("poi-context/detect")
)
async def detect_pois_for_tenant(
tenant_id: str,
latitude: float = Query(..., description="Bakery latitude"),
longitude: float = Query(..., description="Bakery longitude"),
force_refresh: bool = Query(False, description="Force refresh, skip cache"),
db: AsyncSession = Depends(get_db)
):
"""
Detect POIs for a tenant's bakery location.
Performs automated POI detection using Overpass API, calculates ML features,
and stores results for demand forecasting.
"""
try:
tenant_uuid = uuid.UUID(tenant_id)
except ValueError:
raise HTTPException(status_code=400, detail="Invalid tenant_id format")
logger.info(
"POI detection requested",
tenant_id=tenant_id,
location=(latitude, longitude),
force_refresh=force_refresh
)
try:
# Initialize services
poi_service = POIDetectionService()
feature_selector = POIFeatureSelector()
competitor_analyzer = CompetitorAnalyzer()
poi_repo = POIContextRepository(db)
redis_client = await get_redis_client()
cache_service = POICacheService(redis_client)
# Check cache first (unless force refresh)
if not force_refresh:
cached_result = await cache_service.get_cached_pois(latitude, longitude)
if cached_result:
logger.info("Using cached POI results", tenant_id=tenant_id)
# Still save to database for this tenant
poi_context = await poi_repo.create_or_update(tenant_uuid, cached_result)
return {
"status": "success",
"source": "cache",
"poi_context": poi_context.to_dict()
}
# Detect POIs
poi_results = await poi_service.detect_pois_for_bakery(
latitude, longitude, tenant_id
)
# Select relevant features
try:
feature_selection = feature_selector.select_relevant_features(
poi_results["poi_categories"],
tenant_id
)
except Exception as e:
logger.error(
"Feature selection failed",
tenant_id=tenant_id,
error=str(e),
exc_info=True
)
# Provide default feature selection to continue
feature_selection = {
"features": {},
"relevant_categories": [],
"relevance_report": [],
"total_features": 0,
"total_relevant_categories": 0
}
# Analyze competitors specifically
try:
competitors_data = poi_results["poi_categories"].get("competitors", {})
competitor_pois = competitors_data.get("pois", [])
competitor_analysis = competitor_analyzer.analyze_competitive_landscape(
competitor_pois,
(latitude, longitude),
tenant_id
)
except Exception as e:
logger.error(
"Competitor analysis failed",
tenant_id=tenant_id,
error=str(e),
exc_info=True
)
# Provide default competitor analysis to continue
competitor_analysis = {
"competitive_pressure_score": 0.0,
"direct_competitors_count": 0,
"nearby_competitors_count": 0,
"market_competitors_count": 0,
"total_competitors_count": 0,
"competitive_zone": "low_competition",
"market_type": "underserved",
"competitive_advantage": "first_mover",
"ml_feature_competitive_pressure": 0.0,
"ml_feature_has_direct_competitor": 0,
"ml_feature_competitor_density_500m": 0,
"competitor_details": [],
"nearest_competitor": None
}
# Generate competitive insights
try:
competitive_insights = competitor_analyzer.get_competitive_insights(
competitor_analysis
)
except Exception as e:
logger.warning(
"Failed to generate competitive insights",
tenant_id=tenant_id,
error=str(e)
)
competitive_insights = []
# Combine results
enhanced_results = {
**poi_results,
"ml_features": feature_selection.get("features", {}),
"relevant_categories": feature_selection.get("relevant_categories", []),
"relevance_report": feature_selection.get("relevance_report", []),
"competitor_analysis": competitor_analysis,
"competitive_insights": competitive_insights
}
# Cache results
try:
await cache_service.cache_poi_results(latitude, longitude, enhanced_results)
except Exception as e:
logger.warning(
"Failed to cache POI results",
tenant_id=tenant_id,
error=str(e)
)
# Save to database
try:
poi_context = await poi_repo.create_or_update(tenant_uuid, enhanced_results)
except Exception as e:
logger.error(
"Failed to save POI context to database",
tenant_id=tenant_id,
error=str(e),
exc_info=True
)
raise HTTPException(
status_code=500,
detail=f"Failed to save POI context: {str(e)}"
)
# Schedule automatic refresh job (180 days from now)
try:
poi_refresh_service = POIRefreshService()
refresh_job = await poi_refresh_service.schedule_refresh_job(
tenant_id=tenant_id,
latitude=latitude,
longitude=longitude,
session=db
)
logger.info(
"POI refresh job scheduled",
tenant_id=tenant_id,
job_id=str(refresh_job.id),
scheduled_at=refresh_job.scheduled_at
)
except Exception as e:
logger.warning(
"Failed to schedule POI refresh job",
tenant_id=tenant_id,
error=str(e)
)
logger.info(
"POI detection completed",
tenant_id=tenant_id,
total_pois=poi_context.total_pois_detected,
relevant_categories=len(feature_selection.get("relevant_categories", []))
)
# Phase 3: Auto-trigger calendar suggestion after POI detection
# This helps admins by providing intelligent calendar recommendations
calendar_suggestion = None
try:
from app.utils.calendar_suggester import CalendarSuggester
from app.repositories.calendar_repository import CalendarRepository
# Get tenant's location context
calendar_repo = CalendarRepository(db)
location_context = await calendar_repo.get_tenant_location_context(tenant_uuid)
if location_context and location_context.school_calendar_id is None:
# Only suggest if no calendar assigned yet
city_id = location_context.city_id
# Get available calendars for city
calendars_result = await calendar_repo.get_calendars_by_city(city_id, enabled_only=True)
calendars = calendars_result.get("calendars", []) if calendars_result else []
if calendars:
# Generate suggestion using POI data
suggester = CalendarSuggester()
calendar_suggestion = suggester.suggest_calendar_for_tenant(
city_id=city_id,
available_calendars=calendars,
poi_context=poi_context.to_dict(),
tenant_data=None
)
logger.info(
"Calendar suggestion auto-generated after POI detection",
tenant_id=tenant_id,
suggested_calendar=calendar_suggestion.get("calendar_name"),
confidence=calendar_suggestion.get("confidence_percentage"),
should_auto_assign=calendar_suggestion.get("should_auto_assign")
)
# TODO: Send notification to admin about available suggestion
# This will be implemented when notification service is integrated
else:
logger.info(
"No calendars available for city, skipping suggestion",
tenant_id=tenant_id,
city_id=city_id
)
elif location_context and location_context.school_calendar_id:
logger.info(
"Calendar already assigned, skipping suggestion",
tenant_id=tenant_id,
calendar_id=str(location_context.school_calendar_id)
)
else:
logger.warning(
"No location context found, skipping calendar suggestion",
tenant_id=tenant_id
)
except Exception as e:
# Non-blocking: POI detection should succeed even if suggestion fails
logger.warning(
"Failed to auto-generate calendar suggestion (non-blocking)",
tenant_id=tenant_id,
error=str(e)
)
return {
"status": "success",
"source": "detection",
"poi_context": poi_context.to_dict(),
"feature_selection": feature_selection,
"competitor_analysis": competitor_analysis,
"competitive_insights": competitive_insights,
"calendar_suggestion": calendar_suggestion # Include suggestion in response
}
except Exception as e:
logger.error(
"POI detection failed",
tenant_id=tenant_id,
error=str(e),
exc_info=True
)
raise HTTPException(
status_code=500,
detail=f"POI detection failed: {str(e)}"
)
@router.get(
route_builder.build_base_route("poi-context")
)
async def get_poi_context(
tenant_id: str,
db: AsyncSession = Depends(get_db)
):
"""
Get POI context for a tenant.
Returns stored POI detection results and ML features.
"""
try:
tenant_uuid = uuid.UUID(tenant_id)
except ValueError:
raise HTTPException(status_code=400, detail="Invalid tenant_id format")
poi_repo = POIContextRepository(db)
poi_context = await poi_repo.get_by_tenant_id(tenant_uuid)
if not poi_context:
raise HTTPException(
status_code=404,
detail=f"POI context not found for tenant {tenant_id}"
)
# Check if stale
is_stale = poi_context.is_stale()
return {
"poi_context": poi_context.to_dict(),
"is_stale": is_stale,
"needs_refresh": is_stale
}
@router.post(
route_builder.build_base_route("poi-context/refresh")
)
async def refresh_poi_context(
tenant_id: str,
db: AsyncSession = Depends(get_db)
):
"""
Refresh POI context for a tenant.
Re-detects POIs and updates stored data.
"""
try:
tenant_uuid = uuid.UUID(tenant_id)
except ValueError:
raise HTTPException(status_code=400, detail="Invalid tenant_id format")
poi_repo = POIContextRepository(db)
existing_context = await poi_repo.get_by_tenant_id(tenant_uuid)
if not existing_context:
raise HTTPException(
status_code=404,
detail=f"POI context not found for tenant {tenant_id}. Use detect endpoint first."
)
# Perform detection with force_refresh=True
return await detect_pois_for_tenant(
tenant_id=tenant_id,
latitude=existing_context.latitude,
longitude=existing_context.longitude,
force_refresh=True,
db=db
)
@router.delete(
route_builder.build_base_route("poi-context")
)
async def delete_poi_context(
tenant_id: str,
db: AsyncSession = Depends(get_db)
):
"""
Delete POI context for a tenant.
"""
try:
tenant_uuid = uuid.UUID(tenant_id)
except ValueError:
raise HTTPException(status_code=400, detail="Invalid tenant_id format")
poi_repo = POIContextRepository(db)
deleted = await poi_repo.delete_by_tenant_id(tenant_uuid)
if not deleted:
raise HTTPException(
status_code=404,
detail=f"POI context not found for tenant {tenant_id}"
)
return {
"status": "success",
"message": f"POI context deleted for tenant {tenant_id}"
}
@router.get(
route_builder.build_base_route("poi-context/feature-importance")
)
async def get_feature_importance(
tenant_id: str,
db: AsyncSession = Depends(get_db)
):
"""
Get feature importance summary for tenant's POI context.
Shows which POI categories are relevant and their impact scores.
"""
try:
tenant_uuid = uuid.UUID(tenant_id)
except ValueError:
raise HTTPException(status_code=400, detail="Invalid tenant_id format")
poi_repo = POIContextRepository(db)
poi_context = await poi_repo.get_by_tenant_id(tenant_uuid)
if not poi_context:
raise HTTPException(
status_code=404,
detail=f"POI context not found for tenant {tenant_id}"
)
feature_selector = POIFeatureSelector()
importance_summary = feature_selector.get_feature_importance_summary(
poi_context.poi_detection_results
)
return {
"tenant_id": tenant_id,
"feature_importance": importance_summary,
"total_categories": len(importance_summary),
"relevant_categories": sum(1 for cat in importance_summary if cat["is_relevant"])
}
@router.get(
route_builder.build_base_route("poi-context/competitor-analysis")
)
async def get_competitor_analysis(
tenant_id: str,
db: AsyncSession = Depends(get_db)
):
"""
Get detailed competitor analysis for tenant location.
"""
try:
tenant_uuid = uuid.UUID(tenant_id)
except ValueError:
raise HTTPException(status_code=400, detail="Invalid tenant_id format")
poi_repo = POIContextRepository(db)
poi_context = await poi_repo.get_by_tenant_id(tenant_uuid)
if not poi_context:
raise HTTPException(
status_code=404,
detail=f"POI context not found for tenant {tenant_id}"
)
competitor_analyzer = CompetitorAnalyzer()
competitors = poi_context.poi_detection_results.get("competitors", {}).get("pois", [])
analysis = competitor_analyzer.analyze_competitive_landscape(
competitors,
(poi_context.latitude, poi_context.longitude),
tenant_id
)
insights = competitor_analyzer.get_competitive_insights(analysis)
return {
"tenant_id": tenant_id,
"location": {
"latitude": poi_context.latitude,
"longitude": poi_context.longitude
},
"competitor_analysis": analysis,
"insights": insights
}
@router.get("/health")
async def poi_health_check():
"""
Check POI detection service health.
Verifies Overpass API accessibility.
"""
poi_service = POIDetectionService()
health = await poi_service.health_check()
if not health["healthy"]:
raise HTTPException(
status_code=503,
detail=f"POI detection service unhealthy: {health.get('error', 'Unknown error')}"
)
return {
"status": "healthy",
"overpass_api": health
}
@router.get("/cache/stats")
async def get_cache_stats():
"""
Get POI cache statistics.
"""
try:
redis_client = await get_redis_client()
cache_service = POICacheService(redis_client)
stats = await cache_service.get_cache_stats()
return {
"status": "success",
"cache_stats": stats
}
except Exception as e:
logger.error("Failed to get cache stats", error=str(e))
raise HTTPException(
status_code=500,
detail=f"Failed to get cache stats: {str(e)}"
)

View File

@@ -0,0 +1,441 @@
"""
POI Refresh Jobs API Endpoints
REST API for managing POI refresh background jobs.
"""
from fastapi import APIRouter, Depends, HTTPException, Query
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select, desc
from typing import List, Optional
from datetime import datetime, timezone
from pydantic import BaseModel, Field
import structlog
import uuid
from app.core.database import get_db
from app.services.poi_refresh_service import POIRefreshService
from app.services.poi_scheduler import get_scheduler
from app.models.poi_refresh_job import POIRefreshJob
logger = structlog.get_logger()
router = APIRouter(prefix="/poi-refresh-jobs", tags=["POI Refresh Jobs"])
# Response Models
class POIRefreshJobResponse(BaseModel):
"""POI refresh job response"""
id: str
tenant_id: str
status: str
scheduled_at: datetime
started_at: Optional[datetime] = None
completed_at: Optional[datetime] = None
attempt_count: int
max_attempts: int
pois_detected: Optional[int] = None
changes_detected: bool = False
change_summary: Optional[dict] = None
error_message: Optional[str] = None
next_scheduled_at: Optional[datetime] = None
duration_seconds: Optional[float] = None
is_overdue: bool
can_retry: bool
class Config:
from_attributes = True
class ScheduleJobRequest(BaseModel):
"""Schedule POI refresh job request"""
tenant_id: str = Field(..., description="Tenant UUID")
latitude: float = Field(..., ge=-90, le=90, description="Bakery latitude")
longitude: float = Field(..., ge=-180, le=180, description="Bakery longitude")
scheduled_at: Optional[datetime] = Field(None, description="When to run (default: 180 days from now)")
class JobExecutionResult(BaseModel):
"""Job execution result"""
status: str
job_id: str
message: Optional[str] = None
pois_detected: Optional[int] = None
changes_detected: Optional[bool] = None
change_summary: Optional[dict] = None
duration_seconds: Optional[float] = None
next_scheduled_at: Optional[str] = None
error: Optional[str] = None
attempt: Optional[int] = None
can_retry: Optional[bool] = None
# Endpoints
@router.post(
"/schedule",
response_model=POIRefreshJobResponse,
summary="Schedule POI refresh job",
description="Schedule a background job to refresh POI context for a tenant"
)
async def schedule_refresh_job(
request: ScheduleJobRequest,
db: AsyncSession = Depends(get_db)
):
"""
Schedule a POI refresh job for a tenant.
Creates a background job that will detect POIs for the tenant's location
at the scheduled time. Default schedule is 180 days from now.
"""
try:
tenant_uuid = uuid.UUID(request.tenant_id)
except ValueError:
raise HTTPException(status_code=400, detail="Invalid tenant_id format")
try:
poi_refresh_service = POIRefreshService()
job = await poi_refresh_service.schedule_refresh_job(
tenant_id=request.tenant_id,
latitude=request.latitude,
longitude=request.longitude,
scheduled_at=request.scheduled_at,
session=db
)
logger.info(
"POI refresh job scheduled via API",
tenant_id=request.tenant_id,
job_id=str(job.id),
scheduled_at=job.scheduled_at
)
return POIRefreshJobResponse(
id=str(job.id),
tenant_id=str(job.tenant_id),
status=job.status,
scheduled_at=job.scheduled_at,
started_at=job.started_at,
completed_at=job.completed_at,
attempt_count=job.attempt_count,
max_attempts=job.max_attempts,
pois_detected=job.pois_detected,
changes_detected=job.changes_detected,
change_summary=job.change_summary,
error_message=job.error_message,
next_scheduled_at=job.next_scheduled_at,
duration_seconds=job.duration_seconds,
is_overdue=job.is_overdue,
can_retry=job.can_retry
)
except Exception as e:
logger.error(
"Failed to schedule POI refresh job",
tenant_id=request.tenant_id,
error=str(e),
exc_info=True
)
raise HTTPException(
status_code=500,
detail=f"Failed to schedule refresh job: {str(e)}"
)
@router.get(
"/{job_id}",
response_model=POIRefreshJobResponse,
summary="Get refresh job by ID",
description="Retrieve details of a specific POI refresh job"
)
async def get_refresh_job(
job_id: str,
db: AsyncSession = Depends(get_db)
):
"""Get POI refresh job by ID"""
try:
job_uuid = uuid.UUID(job_id)
except ValueError:
raise HTTPException(status_code=400, detail="Invalid job_id format")
result = await db.execute(
select(POIRefreshJob).where(POIRefreshJob.id == job_uuid)
)
job = result.scalar_one_or_none()
if not job:
raise HTTPException(status_code=404, detail=f"Job not found: {job_id}")
return POIRefreshJobResponse(
id=str(job.id),
tenant_id=str(job.tenant_id),
status=job.status,
scheduled_at=job.scheduled_at,
started_at=job.started_at,
completed_at=job.completed_at,
attempt_count=job.attempt_count,
max_attempts=job.max_attempts,
pois_detected=job.pois_detected,
changes_detected=job.changes_detected,
change_summary=job.change_summary,
error_message=job.error_message,
next_scheduled_at=job.next_scheduled_at,
duration_seconds=job.duration_seconds,
is_overdue=job.is_overdue,
can_retry=job.can_retry
)
@router.get(
"/tenant/{tenant_id}",
response_model=List[POIRefreshJobResponse],
summary="Get refresh jobs for tenant",
description="Retrieve all POI refresh jobs for a specific tenant"
)
async def get_tenant_refresh_jobs(
tenant_id: str,
status: Optional[str] = Query(None, description="Filter by status"),
limit: int = Query(50, ge=1, le=200, description="Maximum number of results"),
db: AsyncSession = Depends(get_db)
):
"""Get all POI refresh jobs for a tenant"""
try:
tenant_uuid = uuid.UUID(tenant_id)
except ValueError:
raise HTTPException(status_code=400, detail="Invalid tenant_id format")
query = select(POIRefreshJob).where(POIRefreshJob.tenant_id == tenant_uuid)
if status:
query = query.where(POIRefreshJob.status == status)
query = query.order_by(desc(POIRefreshJob.scheduled_at)).limit(limit)
result = await db.execute(query)
jobs = result.scalars().all()
return [
POIRefreshJobResponse(
id=str(job.id),
tenant_id=str(job.tenant_id),
status=job.status,
scheduled_at=job.scheduled_at,
started_at=job.started_at,
completed_at=job.completed_at,
attempt_count=job.attempt_count,
max_attempts=job.max_attempts,
pois_detected=job.pois_detected,
changes_detected=job.changes_detected,
change_summary=job.change_summary,
error_message=job.error_message,
next_scheduled_at=job.next_scheduled_at,
duration_seconds=job.duration_seconds,
is_overdue=job.is_overdue,
can_retry=job.can_retry
)
for job in jobs
]
@router.post(
"/{job_id}/execute",
response_model=JobExecutionResult,
summary="Execute refresh job",
description="Manually trigger execution of a pending POI refresh job"
)
async def execute_refresh_job(
job_id: str,
db: AsyncSession = Depends(get_db)
):
"""Manually execute a POI refresh job"""
try:
job_uuid = uuid.UUID(job_id)
except ValueError:
raise HTTPException(status_code=400, detail="Invalid job_id format")
try:
poi_refresh_service = POIRefreshService()
result = await poi_refresh_service.execute_refresh_job(
job_id=job_id,
session=db
)
logger.info(
"POI refresh job executed via API",
job_id=job_id,
status=result["status"]
)
return JobExecutionResult(**result)
except ValueError as e:
raise HTTPException(status_code=404, detail=str(e))
except Exception as e:
logger.error(
"Failed to execute POI refresh job",
job_id=job_id,
error=str(e),
exc_info=True
)
raise HTTPException(
status_code=500,
detail=f"Failed to execute refresh job: {str(e)}"
)
@router.post(
"/process-pending",
summary="Process all pending jobs",
description="Manually trigger processing of all pending POI refresh jobs"
)
async def process_pending_jobs(
max_concurrent: int = Query(5, ge=1, le=20, description="Max concurrent executions"),
db: AsyncSession = Depends(get_db)
):
"""Process all pending POI refresh jobs"""
try:
poi_refresh_service = POIRefreshService()
result = await poi_refresh_service.process_pending_jobs(
max_concurrent=max_concurrent,
session=db
)
logger.info(
"Pending POI refresh jobs processed via API",
total_jobs=result["total_jobs"],
successful=result["successful"],
failed=result["failed"]
)
return result
except Exception as e:
logger.error(
"Failed to process pending POI refresh jobs",
error=str(e),
exc_info=True
)
raise HTTPException(
status_code=500,
detail=f"Failed to process pending jobs: {str(e)}"
)
@router.get(
"/pending",
response_model=List[POIRefreshJobResponse],
summary="Get pending jobs",
description="Retrieve all pending POI refresh jobs that are due for execution"
)
async def get_pending_jobs(
limit: int = Query(100, ge=1, le=500, description="Maximum number of results"),
db: AsyncSession = Depends(get_db)
):
"""Get all pending POI refresh jobs"""
try:
poi_refresh_service = POIRefreshService()
jobs = await poi_refresh_service.get_pending_jobs(
limit=limit,
session=db
)
return [
POIRefreshJobResponse(
id=str(job.id),
tenant_id=str(job.tenant_id),
status=job.status,
scheduled_at=job.scheduled_at,
started_at=job.started_at,
completed_at=job.completed_at,
attempt_count=job.attempt_count,
max_attempts=job.max_attempts,
pois_detected=job.pois_detected,
changes_detected=job.changes_detected,
change_summary=job.change_summary,
error_message=job.error_message,
next_scheduled_at=job.next_scheduled_at,
duration_seconds=job.duration_seconds,
is_overdue=job.is_overdue,
can_retry=job.can_retry
)
for job in jobs
]
except Exception as e:
logger.error(
"Failed to get pending POI refresh jobs",
error=str(e),
exc_info=True
)
raise HTTPException(
status_code=500,
detail=f"Failed to get pending jobs: {str(e)}"
)
@router.post(
"/trigger-scheduler",
summary="Trigger scheduler immediately",
description="Trigger an immediate check for pending jobs (bypasses schedule)"
)
async def trigger_scheduler():
"""Trigger POI refresh scheduler immediately"""
try:
scheduler = get_scheduler()
if not scheduler.is_running:
raise HTTPException(
status_code=503,
detail="POI refresh scheduler is not running"
)
result = await scheduler.trigger_immediate_check()
logger.info(
"POI refresh scheduler triggered via API",
total_jobs=result["total_jobs"],
successful=result["successful"],
failed=result["failed"]
)
return result
except HTTPException:
raise
except Exception as e:
logger.error(
"Failed to trigger POI refresh scheduler",
error=str(e),
exc_info=True
)
raise HTTPException(
status_code=500,
detail=f"Failed to trigger scheduler: {str(e)}"
)
@router.get(
"/scheduler/status",
summary="Get scheduler status",
description="Check if POI refresh scheduler is running"
)
async def get_scheduler_status():
"""Get POI refresh scheduler status"""
try:
scheduler = get_scheduler()
return {
"is_running": scheduler.is_running,
"check_interval_seconds": scheduler.check_interval_seconds,
"max_concurrent_jobs": scheduler.max_concurrent_jobs
}
except Exception as e:
logger.error(
"Failed to get scheduler status",
error=str(e),
exc_info=True
)
raise HTTPException(
status_code=500,
detail=f"Failed to get scheduler status: {str(e)}"
)

View File

@@ -0,0 +1,129 @@
# services/external/app/api/traffic_data.py
"""
Traffic Data API - Atomic CRUD operations on TrafficData model
"""
from fastapi import APIRouter, Depends, HTTPException, Query, Path
from typing import List, Optional
from datetime import date
from uuid import UUID
import structlog
from app.schemas.traffic import TrafficDataResponse
from app.services.traffic_service import TrafficService
from shared.routing.route_builder import RouteBuilder
from shared.auth.decorators import get_current_user_dep
from shared.auth.access_control import analytics_tier_required
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.database import get_db
route_builder = RouteBuilder('external')
router = APIRouter(tags=["traffic-data"])
logger = structlog.get_logger()
def get_traffic_service():
"""Dependency injection for TrafficService"""
return TrafficService()
@router.get(
route_builder.build_base_route("traffic-data"),
response_model=List[TrafficDataResponse]
)
@analytics_tier_required
async def list_traffic_data(
tenant_id: UUID = Path(..., description="Tenant ID"),
start_date: Optional[date] = Query(None),
end_date: Optional[date] = Query(None),
latitude: Optional[float] = Query(None),
longitude: Optional[float] = Query(None),
limit: int = Query(100, ge=1, le=1000),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db),
traffic_service: TrafficService = Depends(get_traffic_service)
):
"""List stored traffic data records (Professional+ tier required)"""
try:
logger.info("Listing traffic data", tenant_id=tenant_id)
traffic_records = await traffic_service.get_stored_traffic_data(
tenant_id=tenant_id,
start_date=start_date,
end_date=end_date,
latitude=latitude,
longitude=longitude,
limit=limit,
db=db
)
return traffic_records
except Exception as e:
logger.error("Failed to list traffic data", error=str(e), tenant_id=tenant_id)
raise HTTPException(status_code=500, detail="Failed to retrieve traffic data")
@router.get(
route_builder.build_resource_detail_route("traffic-data", "traffic_id"),
response_model=TrafficDataResponse
)
@analytics_tier_required
async def get_traffic_data(
tenant_id: UUID = Path(..., description="Tenant ID"),
traffic_id: UUID = Path(..., description="Traffic data ID"),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db),
traffic_service: TrafficService = Depends(get_traffic_service)
):
"""Get a specific traffic data record"""
try:
logger.info("Getting traffic data", tenant_id=tenant_id, traffic_id=traffic_id)
traffic_record = await traffic_service.get_traffic_data_by_id(
tenant_id=tenant_id,
traffic_id=traffic_id,
db=db
)
if not traffic_record:
raise HTTPException(status_code=404, detail="Traffic data not found")
return traffic_record
except HTTPException:
raise
except Exception as e:
logger.error("Failed to get traffic data", error=str(e), tenant_id=tenant_id)
raise HTTPException(status_code=500, detail="Failed to retrieve traffic data")
@router.delete(
route_builder.build_resource_detail_route("traffic-data", "traffic_id")
)
async def delete_traffic_data(
tenant_id: UUID = Path(..., description="Tenant ID"),
traffic_id: UUID = Path(..., description="Traffic data ID"),
db: AsyncSession = Depends(get_db),
traffic_service: TrafficService = Depends(get_traffic_service)
):
"""Delete a traffic data record"""
try:
logger.info("Deleting traffic data", tenant_id=tenant_id, traffic_id=traffic_id)
success = await traffic_service.delete_traffic_data(
tenant_id=tenant_id,
traffic_id=traffic_id,
db=db
)
if not success:
raise HTTPException(status_code=404, detail="Traffic data not found")
return {"message": "Traffic data deleted successfully"}
except HTTPException:
raise
except Exception as e:
logger.error("Failed to delete traffic data", error=str(e), tenant_id=tenant_id)
raise HTTPException(status_code=500, detail="Failed to delete traffic data")

View File

@@ -0,0 +1,129 @@
# services/external/app/api/weather_data.py
"""
Weather Data API - Atomic CRUD operations on WeatherData model
"""
from fastapi import APIRouter, Depends, HTTPException, Query, Path
from typing import List, Optional
from datetime import date
from uuid import UUID
import structlog
from app.schemas.weather import WeatherDataResponse
from app.services.weather_service import WeatherService
from shared.routing.route_builder import RouteBuilder
from shared.auth.decorators import get_current_user_dep
from shared.auth.access_control import analytics_tier_required
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.database import get_db
route_builder = RouteBuilder('external')
router = APIRouter(tags=["weather-data"])
logger = structlog.get_logger()
def get_weather_service():
"""Dependency injection for WeatherService"""
return WeatherService()
@router.get(
route_builder.build_base_route("weather-data"),
response_model=List[WeatherDataResponse]
)
@analytics_tier_required
async def list_weather_data(
tenant_id: UUID = Path(..., description="Tenant ID"),
start_date: Optional[date] = Query(None),
end_date: Optional[date] = Query(None),
latitude: Optional[float] = Query(None),
longitude: Optional[float] = Query(None),
limit: int = Query(100, ge=1, le=1000),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db),
weather_service: WeatherService = Depends(get_weather_service)
):
"""List stored weather data records (Professional+ tier required)"""
try:
logger.info("Listing weather data", tenant_id=tenant_id)
weather_records = await weather_service.get_stored_weather_data(
tenant_id=tenant_id,
start_date=start_date,
end_date=end_date,
latitude=latitude,
longitude=longitude,
limit=limit,
db=db
)
return weather_records
except Exception as e:
logger.error("Failed to list weather data", error=str(e), tenant_id=tenant_id)
raise HTTPException(status_code=500, detail="Failed to retrieve weather data")
@router.get(
route_builder.build_resource_detail_route("weather-data", "weather_id"),
response_model=WeatherDataResponse
)
@analytics_tier_required
async def get_weather_data(
tenant_id: UUID = Path(..., description="Tenant ID"),
weather_id: UUID = Path(..., description="Weather data ID"),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db),
weather_service: WeatherService = Depends(get_weather_service)
):
"""Get a specific weather data record"""
try:
logger.info("Getting weather data", tenant_id=tenant_id, weather_id=weather_id)
weather_record = await weather_service.get_weather_data_by_id(
tenant_id=tenant_id,
weather_id=weather_id,
db=db
)
if not weather_record:
raise HTTPException(status_code=404, detail="Weather data not found")
return weather_record
except HTTPException:
raise
except Exception as e:
logger.error("Failed to get weather data", error=str(e), tenant_id=tenant_id)
raise HTTPException(status_code=500, detail="Failed to retrieve weather data")
@router.delete(
route_builder.build_resource_detail_route("weather-data", "weather_id")
)
async def delete_weather_data(
tenant_id: UUID = Path(..., description="Tenant ID"),
weather_id: UUID = Path(..., description="Weather data ID"),
db: AsyncSession = Depends(get_db),
weather_service: WeatherService = Depends(get_weather_service)
):
"""Delete a weather data record"""
try:
logger.info("Deleting weather data", tenant_id=tenant_id, weather_id=weather_id)
success = await weather_service.delete_weather_data(
tenant_id=tenant_id,
weather_id=weather_id,
db=db
)
if not success:
raise HTTPException(status_code=404, detail="Weather data not found")
return {"message": "Weather data deleted successfully"}
except HTTPException:
raise
except Exception as e:
logger.error("Failed to delete weather data", error=str(e), tenant_id=tenant_id)
raise HTTPException(status_code=500, detail="Failed to delete weather data")