Add POI feature and imporve the overall backend implementation
This commit is contained in:
302
services/external/app/api/geocoding.py
vendored
Normal file
302
services/external/app/api/geocoding.py
vendored
Normal file
@@ -0,0 +1,302 @@
|
||||
"""
|
||||
Geocoding API Endpoints
|
||||
|
||||
Provides address search, autocomplete, and geocoding via Nominatim.
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Query, HTTPException
|
||||
from typing import List, Optional
|
||||
from pydantic import BaseModel, Field
|
||||
import structlog
|
||||
|
||||
from app.services.nominatim_service import NominatimService
|
||||
|
||||
logger = structlog.get_logger()
|
||||
router = APIRouter(prefix="/api/v1/geocoding", tags=["Geocoding"])
|
||||
|
||||
# Initialize Nominatim service
|
||||
# In production, override with environment variable for self-hosted instance
|
||||
nominatim_service = NominatimService()
|
||||
|
||||
|
||||
# Response Models
|
||||
class AddressResult(BaseModel):
|
||||
"""Address search result"""
|
||||
display_name: str = Field(..., description="Full formatted address")
|
||||
lat: float = Field(..., description="Latitude")
|
||||
lon: float = Field(..., description="Longitude")
|
||||
osm_type: str = Field(..., description="OSM object type")
|
||||
osm_id: int = Field(..., description="OSM object ID")
|
||||
place_id: int = Field(..., description="Nominatim place ID")
|
||||
type: str = Field(..., description="Place type")
|
||||
class_: str = Field(..., alias="class", description="OSM class")
|
||||
address: dict = Field(..., description="Parsed address components")
|
||||
boundingbox: List[str] = Field(..., description="Bounding box coordinates")
|
||||
|
||||
|
||||
class GeocodeResult(BaseModel):
|
||||
"""Geocoding result"""
|
||||
display_name: str = Field(..., description="Full formatted address")
|
||||
lat: float = Field(..., description="Latitude")
|
||||
lon: float = Field(..., description="Longitude")
|
||||
address: dict = Field(..., description="Parsed address components")
|
||||
|
||||
|
||||
class CoordinateValidation(BaseModel):
|
||||
"""Coordinate validation result"""
|
||||
valid: bool = Field(..., description="Whether coordinates are valid")
|
||||
address: Optional[str] = Field(None, description="Address at coordinates if valid")
|
||||
|
||||
|
||||
# Endpoints
|
||||
@router.get(
|
||||
"/search",
|
||||
response_model=List[AddressResult],
|
||||
summary="Search for addresses",
|
||||
description="Search for addresses matching query (autocomplete). Minimum 3 characters required."
|
||||
)
|
||||
async def search_addresses(
|
||||
q: str = Query(..., min_length=3, description="Search query (minimum 3 characters)"),
|
||||
country_code: str = Query("es", description="ISO country code to restrict search"),
|
||||
limit: int = Query(10, ge=1, le=50, description="Maximum number of results")
|
||||
):
|
||||
"""
|
||||
Search for addresses matching the query.
|
||||
|
||||
This endpoint provides autocomplete functionality for address input.
|
||||
Results are restricted to the specified country and sorted by relevance.
|
||||
|
||||
Example:
|
||||
GET /api/v1/geocoding/search?q=Gran%20Via%20Madrid&limit=5
|
||||
"""
|
||||
try:
|
||||
results = await nominatim_service.search_address(
|
||||
query=q,
|
||||
country_code=country_code,
|
||||
limit=limit
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Address search request",
|
||||
query=q,
|
||||
country=country_code,
|
||||
result_count=len(results)
|
||||
)
|
||||
|
||||
return results
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Address search failed",
|
||||
query=q,
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Address search failed: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/geocode",
|
||||
response_model=GeocodeResult,
|
||||
summary="Geocode an address",
|
||||
description="Convert an address string to coordinates (lat/lon)"
|
||||
)
|
||||
async def geocode_address(
|
||||
address: str = Query(..., min_length=5, description="Full address to geocode"),
|
||||
country_code: str = Query("es", description="ISO country code")
|
||||
):
|
||||
"""
|
||||
Geocode an address to get coordinates.
|
||||
|
||||
Returns the best matching location for the given address.
|
||||
|
||||
Example:
|
||||
GET /api/v1/geocoding/geocode?address=Gran%20Via%2028,%20Madrid
|
||||
"""
|
||||
try:
|
||||
result = await nominatim_service.geocode_address(
|
||||
address=address,
|
||||
country_code=country_code
|
||||
)
|
||||
|
||||
if not result:
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail=f"Address not found: {address}"
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Geocoding request",
|
||||
address=address,
|
||||
lat=result["lat"],
|
||||
lon=result["lon"]
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Geocoding failed",
|
||||
address=address,
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Geocoding failed: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/reverse",
|
||||
response_model=GeocodeResult,
|
||||
summary="Reverse geocode coordinates",
|
||||
description="Convert coordinates (lat/lon) to an address"
|
||||
)
|
||||
async def reverse_geocode(
|
||||
lat: float = Query(..., ge=-90, le=90, description="Latitude"),
|
||||
lon: float = Query(..., ge=-180, le=180, description="Longitude")
|
||||
):
|
||||
"""
|
||||
Reverse geocode coordinates to get address.
|
||||
|
||||
Returns the address at the specified coordinates.
|
||||
|
||||
Example:
|
||||
GET /api/v1/geocoding/reverse?lat=40.4168&lon=-3.7038
|
||||
"""
|
||||
try:
|
||||
result = await nominatim_service.reverse_geocode(
|
||||
latitude=lat,
|
||||
longitude=lon
|
||||
)
|
||||
|
||||
if not result:
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail=f"No address found at coordinates: {lat}, {lon}"
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Reverse geocoding request",
|
||||
lat=lat,
|
||||
lon=lon,
|
||||
address=result["display_name"]
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Reverse geocoding failed",
|
||||
lat=lat,
|
||||
lon=lon,
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Reverse geocoding failed: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/validate",
|
||||
response_model=CoordinateValidation,
|
||||
summary="Validate coordinates",
|
||||
description="Check if coordinates point to a valid location"
|
||||
)
|
||||
async def validate_coordinates(
|
||||
lat: float = Query(..., ge=-90, le=90, description="Latitude"),
|
||||
lon: float = Query(..., ge=-180, le=180, description="Longitude")
|
||||
):
|
||||
"""
|
||||
Validate that coordinates point to a real location.
|
||||
|
||||
Returns validation result with address if valid.
|
||||
|
||||
Example:
|
||||
GET /api/v1/geocoding/validate?lat=40.4168&lon=-3.7038
|
||||
"""
|
||||
try:
|
||||
is_valid = await nominatim_service.validate_coordinates(
|
||||
latitude=lat,
|
||||
longitude=lon
|
||||
)
|
||||
|
||||
result = {"valid": is_valid, "address": None}
|
||||
|
||||
if is_valid:
|
||||
geocode_result = await nominatim_service.reverse_geocode(lat, lon)
|
||||
if geocode_result:
|
||||
result["address"] = geocode_result["display_name"]
|
||||
|
||||
logger.info(
|
||||
"Coordinate validation request",
|
||||
lat=lat,
|
||||
lon=lon,
|
||||
valid=is_valid
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Coordinate validation failed",
|
||||
lat=lat,
|
||||
lon=lon,
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Coordinate validation failed: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/health",
|
||||
summary="Check geocoding service health",
|
||||
description="Check if Nominatim service is accessible"
|
||||
)
|
||||
async def health_check():
|
||||
"""
|
||||
Check if Nominatim service is accessible.
|
||||
|
||||
Returns service health status.
|
||||
"""
|
||||
try:
|
||||
is_healthy = await nominatim_service.health_check()
|
||||
|
||||
if not is_healthy:
|
||||
raise HTTPException(
|
||||
status_code=503,
|
||||
detail="Nominatim service is unavailable"
|
||||
)
|
||||
|
||||
return {
|
||||
"status": "healthy",
|
||||
"service": "nominatim",
|
||||
"base_url": nominatim_service.base_url,
|
||||
"is_public_api": nominatim_service.is_public_api
|
||||
}
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Health check failed",
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=503,
|
||||
detail=f"Health check failed: {str(e)}"
|
||||
)
|
||||
452
services/external/app/api/poi_context.py
vendored
Normal file
452
services/external/app/api/poi_context.py
vendored
Normal file
@@ -0,0 +1,452 @@
|
||||
"""
|
||||
POI Context API Endpoints
|
||||
|
||||
REST API for POI detection, retrieval, and management.
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from typing import Optional
|
||||
import structlog
|
||||
import uuid
|
||||
|
||||
from app.core.database import get_db
|
||||
from app.services.poi_detection_service import POIDetectionService
|
||||
from app.services.poi_feature_selector import POIFeatureSelector
|
||||
from app.services.competitor_analyzer import CompetitorAnalyzer
|
||||
from app.services.poi_refresh_service import POIRefreshService
|
||||
from app.repositories.poi_context_repository import POIContextRepository
|
||||
from app.cache.poi_cache_service import POICacheService
|
||||
from app.core.redis_client import get_redis_client
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
router = APIRouter(prefix="/poi-context", tags=["POI Context"])
|
||||
|
||||
|
||||
@router.post("/{tenant_id}/detect")
|
||||
async def detect_pois_for_tenant(
|
||||
tenant_id: str,
|
||||
latitude: float = Query(..., description="Bakery latitude"),
|
||||
longitude: float = Query(..., description="Bakery longitude"),
|
||||
force_refresh: bool = Query(False, description="Force refresh, skip cache"),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Detect POIs for a tenant's bakery location.
|
||||
|
||||
Performs automated POI detection using Overpass API, calculates ML features,
|
||||
and stores results for demand forecasting.
|
||||
"""
|
||||
try:
|
||||
tenant_uuid = uuid.UUID(tenant_id)
|
||||
except ValueError:
|
||||
raise HTTPException(status_code=400, detail="Invalid tenant_id format")
|
||||
|
||||
logger.info(
|
||||
"POI detection requested",
|
||||
tenant_id=tenant_id,
|
||||
location=(latitude, longitude),
|
||||
force_refresh=force_refresh
|
||||
)
|
||||
|
||||
try:
|
||||
# Initialize services
|
||||
poi_service = POIDetectionService()
|
||||
feature_selector = POIFeatureSelector()
|
||||
competitor_analyzer = CompetitorAnalyzer()
|
||||
poi_repo = POIContextRepository(db)
|
||||
redis_client = await get_redis_client()
|
||||
cache_service = POICacheService(redis_client)
|
||||
|
||||
# Check cache first (unless force refresh)
|
||||
if not force_refresh:
|
||||
cached_result = await cache_service.get_cached_pois(latitude, longitude)
|
||||
if cached_result:
|
||||
logger.info("Using cached POI results", tenant_id=tenant_id)
|
||||
# Still save to database for this tenant
|
||||
poi_context = await poi_repo.create_or_update(tenant_uuid, cached_result)
|
||||
return {
|
||||
"status": "success",
|
||||
"source": "cache",
|
||||
"poi_context": poi_context.to_dict()
|
||||
}
|
||||
|
||||
# Detect POIs
|
||||
poi_results = await poi_service.detect_pois_for_bakery(
|
||||
latitude, longitude, tenant_id
|
||||
)
|
||||
|
||||
# Select relevant features
|
||||
try:
|
||||
feature_selection = feature_selector.select_relevant_features(
|
||||
poi_results["poi_categories"],
|
||||
tenant_id
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Feature selection failed",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
# Provide default feature selection to continue
|
||||
feature_selection = {
|
||||
"features": {},
|
||||
"relevant_categories": [],
|
||||
"relevance_report": [],
|
||||
"total_features": 0,
|
||||
"total_relevant_categories": 0
|
||||
}
|
||||
|
||||
# Analyze competitors specifically
|
||||
try:
|
||||
competitors_data = poi_results["poi_categories"].get("competitors", {})
|
||||
competitor_pois = competitors_data.get("pois", [])
|
||||
competitor_analysis = competitor_analyzer.analyze_competitive_landscape(
|
||||
competitor_pois,
|
||||
(latitude, longitude),
|
||||
tenant_id
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Competitor analysis failed",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
# Provide default competitor analysis to continue
|
||||
competitor_analysis = {
|
||||
"competitive_pressure_score": 0.0,
|
||||
"direct_competitors_count": 0,
|
||||
"nearby_competitors_count": 0,
|
||||
"market_competitors_count": 0,
|
||||
"total_competitors_count": 0,
|
||||
"competitive_zone": "low_competition",
|
||||
"market_type": "underserved",
|
||||
"competitive_advantage": "first_mover",
|
||||
"ml_feature_competitive_pressure": 0.0,
|
||||
"ml_feature_has_direct_competitor": 0,
|
||||
"ml_feature_competitor_density_500m": 0,
|
||||
"competitor_details": [],
|
||||
"nearest_competitor": None
|
||||
}
|
||||
|
||||
# Generate competitive insights
|
||||
try:
|
||||
competitive_insights = competitor_analyzer.get_competitive_insights(
|
||||
competitor_analysis
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
"Failed to generate competitive insights",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e)
|
||||
)
|
||||
competitive_insights = []
|
||||
|
||||
# Combine results
|
||||
enhanced_results = {
|
||||
**poi_results,
|
||||
"ml_features": feature_selection.get("features", {}),
|
||||
"relevant_categories": feature_selection.get("relevant_categories", []),
|
||||
"relevance_report": feature_selection.get("relevance_report", []),
|
||||
"competitor_analysis": competitor_analysis,
|
||||
"competitive_insights": competitive_insights
|
||||
}
|
||||
|
||||
# Cache results
|
||||
try:
|
||||
await cache_service.cache_poi_results(latitude, longitude, enhanced_results)
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
"Failed to cache POI results",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e)
|
||||
)
|
||||
|
||||
# Save to database
|
||||
try:
|
||||
poi_context = await poi_repo.create_or_update(tenant_uuid, enhanced_results)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to save POI context to database",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to save POI context: {str(e)}"
|
||||
)
|
||||
|
||||
# Schedule automatic refresh job (180 days from now)
|
||||
try:
|
||||
poi_refresh_service = POIRefreshService()
|
||||
refresh_job = await poi_refresh_service.schedule_refresh_job(
|
||||
tenant_id=tenant_id,
|
||||
latitude=latitude,
|
||||
longitude=longitude,
|
||||
session=db
|
||||
)
|
||||
logger.info(
|
||||
"POI refresh job scheduled",
|
||||
tenant_id=tenant_id,
|
||||
job_id=str(refresh_job.id),
|
||||
scheduled_at=refresh_job.scheduled_at
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
"Failed to schedule POI refresh job",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e)
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"POI detection completed",
|
||||
tenant_id=tenant_id,
|
||||
total_pois=poi_context.total_pois_detected,
|
||||
relevant_categories=len(feature_selection.get("relevant_categories", []))
|
||||
)
|
||||
|
||||
return {
|
||||
"status": "success",
|
||||
"source": "detection",
|
||||
"poi_context": poi_context.to_dict(),
|
||||
"feature_selection": feature_selection,
|
||||
"competitor_analysis": competitor_analysis,
|
||||
"competitive_insights": competitive_insights
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"POI detection failed",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"POI detection failed: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get("/{tenant_id}")
|
||||
async def get_poi_context(
|
||||
tenant_id: str,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get POI context for a tenant.
|
||||
|
||||
Returns stored POI detection results and ML features.
|
||||
"""
|
||||
try:
|
||||
tenant_uuid = uuid.UUID(tenant_id)
|
||||
except ValueError:
|
||||
raise HTTPException(status_code=400, detail="Invalid tenant_id format")
|
||||
|
||||
poi_repo = POIContextRepository(db)
|
||||
poi_context = await poi_repo.get_by_tenant_id(tenant_uuid)
|
||||
|
||||
if not poi_context:
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail=f"POI context not found for tenant {tenant_id}"
|
||||
)
|
||||
|
||||
# Check if stale
|
||||
is_stale = poi_context.is_stale()
|
||||
|
||||
return {
|
||||
"poi_context": poi_context.to_dict(),
|
||||
"is_stale": is_stale,
|
||||
"needs_refresh": is_stale
|
||||
}
|
||||
|
||||
|
||||
@router.post("/{tenant_id}/refresh")
|
||||
async def refresh_poi_context(
|
||||
tenant_id: str,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Refresh POI context for a tenant.
|
||||
|
||||
Re-detects POIs and updates stored data.
|
||||
"""
|
||||
try:
|
||||
tenant_uuid = uuid.UUID(tenant_id)
|
||||
except ValueError:
|
||||
raise HTTPException(status_code=400, detail="Invalid tenant_id format")
|
||||
|
||||
poi_repo = POIContextRepository(db)
|
||||
existing_context = await poi_repo.get_by_tenant_id(tenant_uuid)
|
||||
|
||||
if not existing_context:
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail=f"POI context not found for tenant {tenant_id}. Use detect endpoint first."
|
||||
)
|
||||
|
||||
# Perform detection with force_refresh=True
|
||||
return await detect_pois_for_tenant(
|
||||
tenant_id=tenant_id,
|
||||
latitude=existing_context.latitude,
|
||||
longitude=existing_context.longitude,
|
||||
force_refresh=True,
|
||||
db=db
|
||||
)
|
||||
|
||||
|
||||
@router.delete("/{tenant_id}")
|
||||
async def delete_poi_context(
|
||||
tenant_id: str,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Delete POI context for a tenant.
|
||||
"""
|
||||
try:
|
||||
tenant_uuid = uuid.UUID(tenant_id)
|
||||
except ValueError:
|
||||
raise HTTPException(status_code=400, detail="Invalid tenant_id format")
|
||||
|
||||
poi_repo = POIContextRepository(db)
|
||||
deleted = await poi_repo.delete_by_tenant_id(tenant_uuid)
|
||||
|
||||
if not deleted:
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail=f"POI context not found for tenant {tenant_id}"
|
||||
)
|
||||
|
||||
return {
|
||||
"status": "success",
|
||||
"message": f"POI context deleted for tenant {tenant_id}"
|
||||
}
|
||||
|
||||
|
||||
@router.get("/{tenant_id}/feature-importance")
|
||||
async def get_feature_importance(
|
||||
tenant_id: str,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get feature importance summary for tenant's POI context.
|
||||
|
||||
Shows which POI categories are relevant and their impact scores.
|
||||
"""
|
||||
try:
|
||||
tenant_uuid = uuid.UUID(tenant_id)
|
||||
except ValueError:
|
||||
raise HTTPException(status_code=400, detail="Invalid tenant_id format")
|
||||
|
||||
poi_repo = POIContextRepository(db)
|
||||
poi_context = await poi_repo.get_by_tenant_id(tenant_uuid)
|
||||
|
||||
if not poi_context:
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail=f"POI context not found for tenant {tenant_id}"
|
||||
)
|
||||
|
||||
feature_selector = POIFeatureSelector()
|
||||
importance_summary = feature_selector.get_feature_importance_summary(
|
||||
poi_context.poi_detection_results
|
||||
)
|
||||
|
||||
return {
|
||||
"tenant_id": tenant_id,
|
||||
"feature_importance": importance_summary,
|
||||
"total_categories": len(importance_summary),
|
||||
"relevant_categories": sum(1 for cat in importance_summary if cat["is_relevant"])
|
||||
}
|
||||
|
||||
|
||||
@router.get("/{tenant_id}/competitor-analysis")
|
||||
async def get_competitor_analysis(
|
||||
tenant_id: str,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get detailed competitor analysis for tenant location.
|
||||
"""
|
||||
try:
|
||||
tenant_uuid = uuid.UUID(tenant_id)
|
||||
except ValueError:
|
||||
raise HTTPException(status_code=400, detail="Invalid tenant_id format")
|
||||
|
||||
poi_repo = POIContextRepository(db)
|
||||
poi_context = await poi_repo.get_by_tenant_id(tenant_uuid)
|
||||
|
||||
if not poi_context:
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail=f"POI context not found for tenant {tenant_id}"
|
||||
)
|
||||
|
||||
competitor_analyzer = CompetitorAnalyzer()
|
||||
competitors = poi_context.poi_detection_results.get("competitors", {}).get("pois", [])
|
||||
|
||||
analysis = competitor_analyzer.analyze_competitive_landscape(
|
||||
competitors,
|
||||
(poi_context.latitude, poi_context.longitude),
|
||||
tenant_id
|
||||
)
|
||||
|
||||
insights = competitor_analyzer.get_competitive_insights(analysis)
|
||||
|
||||
return {
|
||||
"tenant_id": tenant_id,
|
||||
"location": {
|
||||
"latitude": poi_context.latitude,
|
||||
"longitude": poi_context.longitude
|
||||
},
|
||||
"competitor_analysis": analysis,
|
||||
"insights": insights
|
||||
}
|
||||
|
||||
|
||||
@router.get("/health")
|
||||
async def poi_health_check():
|
||||
"""
|
||||
Check POI detection service health.
|
||||
|
||||
Verifies Overpass API accessibility.
|
||||
"""
|
||||
poi_service = POIDetectionService()
|
||||
health = await poi_service.health_check()
|
||||
|
||||
if not health["healthy"]:
|
||||
raise HTTPException(
|
||||
status_code=503,
|
||||
detail=f"POI detection service unhealthy: {health.get('error', 'Unknown error')}"
|
||||
)
|
||||
|
||||
return {
|
||||
"status": "healthy",
|
||||
"overpass_api": health
|
||||
}
|
||||
|
||||
|
||||
@router.get("/cache/stats")
|
||||
async def get_cache_stats():
|
||||
"""
|
||||
Get POI cache statistics.
|
||||
"""
|
||||
try:
|
||||
redis_client = await get_redis_client()
|
||||
cache_service = POICacheService(redis_client)
|
||||
stats = await cache_service.get_cache_stats()
|
||||
|
||||
return {
|
||||
"status": "success",
|
||||
"cache_stats": stats
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error("Failed to get cache stats", error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to get cache stats: {str(e)}"
|
||||
)
|
||||
441
services/external/app/api/poi_refresh_jobs.py
vendored
Normal file
441
services/external/app/api/poi_refresh_jobs.py
vendored
Normal file
@@ -0,0 +1,441 @@
|
||||
"""
|
||||
POI Refresh Jobs API Endpoints
|
||||
|
||||
REST API for managing POI refresh background jobs.
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, desc
|
||||
from typing import List, Optional
|
||||
from datetime import datetime, timezone
|
||||
from pydantic import BaseModel, Field
|
||||
import structlog
|
||||
import uuid
|
||||
|
||||
from app.core.database import get_db
|
||||
from app.services.poi_refresh_service import POIRefreshService
|
||||
from app.services.poi_scheduler import get_scheduler
|
||||
from app.models.poi_refresh_job import POIRefreshJob
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
router = APIRouter(prefix="/poi-refresh-jobs", tags=["POI Refresh Jobs"])
|
||||
|
||||
|
||||
# Response Models
|
||||
class POIRefreshJobResponse(BaseModel):
|
||||
"""POI refresh job response"""
|
||||
id: str
|
||||
tenant_id: str
|
||||
status: str
|
||||
scheduled_at: datetime
|
||||
started_at: Optional[datetime] = None
|
||||
completed_at: Optional[datetime] = None
|
||||
attempt_count: int
|
||||
max_attempts: int
|
||||
pois_detected: Optional[int] = None
|
||||
changes_detected: bool = False
|
||||
change_summary: Optional[dict] = None
|
||||
error_message: Optional[str] = None
|
||||
next_scheduled_at: Optional[datetime] = None
|
||||
duration_seconds: Optional[float] = None
|
||||
is_overdue: bool
|
||||
can_retry: bool
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class ScheduleJobRequest(BaseModel):
|
||||
"""Schedule POI refresh job request"""
|
||||
tenant_id: str = Field(..., description="Tenant UUID")
|
||||
latitude: float = Field(..., ge=-90, le=90, description="Bakery latitude")
|
||||
longitude: float = Field(..., ge=-180, le=180, description="Bakery longitude")
|
||||
scheduled_at: Optional[datetime] = Field(None, description="When to run (default: 180 days from now)")
|
||||
|
||||
|
||||
class JobExecutionResult(BaseModel):
|
||||
"""Job execution result"""
|
||||
status: str
|
||||
job_id: str
|
||||
message: Optional[str] = None
|
||||
pois_detected: Optional[int] = None
|
||||
changes_detected: Optional[bool] = None
|
||||
change_summary: Optional[dict] = None
|
||||
duration_seconds: Optional[float] = None
|
||||
next_scheduled_at: Optional[str] = None
|
||||
error: Optional[str] = None
|
||||
attempt: Optional[int] = None
|
||||
can_retry: Optional[bool] = None
|
||||
|
||||
|
||||
# Endpoints
|
||||
@router.post(
|
||||
"/schedule",
|
||||
response_model=POIRefreshJobResponse,
|
||||
summary="Schedule POI refresh job",
|
||||
description="Schedule a background job to refresh POI context for a tenant"
|
||||
)
|
||||
async def schedule_refresh_job(
|
||||
request: ScheduleJobRequest,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Schedule a POI refresh job for a tenant.
|
||||
|
||||
Creates a background job that will detect POIs for the tenant's location
|
||||
at the scheduled time. Default schedule is 180 days from now.
|
||||
"""
|
||||
try:
|
||||
tenant_uuid = uuid.UUID(request.tenant_id)
|
||||
except ValueError:
|
||||
raise HTTPException(status_code=400, detail="Invalid tenant_id format")
|
||||
|
||||
try:
|
||||
poi_refresh_service = POIRefreshService()
|
||||
job = await poi_refresh_service.schedule_refresh_job(
|
||||
tenant_id=request.tenant_id,
|
||||
latitude=request.latitude,
|
||||
longitude=request.longitude,
|
||||
scheduled_at=request.scheduled_at,
|
||||
session=db
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"POI refresh job scheduled via API",
|
||||
tenant_id=request.tenant_id,
|
||||
job_id=str(job.id),
|
||||
scheduled_at=job.scheduled_at
|
||||
)
|
||||
|
||||
return POIRefreshJobResponse(
|
||||
id=str(job.id),
|
||||
tenant_id=str(job.tenant_id),
|
||||
status=job.status,
|
||||
scheduled_at=job.scheduled_at,
|
||||
started_at=job.started_at,
|
||||
completed_at=job.completed_at,
|
||||
attempt_count=job.attempt_count,
|
||||
max_attempts=job.max_attempts,
|
||||
pois_detected=job.pois_detected,
|
||||
changes_detected=job.changes_detected,
|
||||
change_summary=job.change_summary,
|
||||
error_message=job.error_message,
|
||||
next_scheduled_at=job.next_scheduled_at,
|
||||
duration_seconds=job.duration_seconds,
|
||||
is_overdue=job.is_overdue,
|
||||
can_retry=job.can_retry
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to schedule POI refresh job",
|
||||
tenant_id=request.tenant_id,
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to schedule refresh job: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/{job_id}",
|
||||
response_model=POIRefreshJobResponse,
|
||||
summary="Get refresh job by ID",
|
||||
description="Retrieve details of a specific POI refresh job"
|
||||
)
|
||||
async def get_refresh_job(
|
||||
job_id: str,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get POI refresh job by ID"""
|
||||
try:
|
||||
job_uuid = uuid.UUID(job_id)
|
||||
except ValueError:
|
||||
raise HTTPException(status_code=400, detail="Invalid job_id format")
|
||||
|
||||
result = await db.execute(
|
||||
select(POIRefreshJob).where(POIRefreshJob.id == job_uuid)
|
||||
)
|
||||
job = result.scalar_one_or_none()
|
||||
|
||||
if not job:
|
||||
raise HTTPException(status_code=404, detail=f"Job not found: {job_id}")
|
||||
|
||||
return POIRefreshJobResponse(
|
||||
id=str(job.id),
|
||||
tenant_id=str(job.tenant_id),
|
||||
status=job.status,
|
||||
scheduled_at=job.scheduled_at,
|
||||
started_at=job.started_at,
|
||||
completed_at=job.completed_at,
|
||||
attempt_count=job.attempt_count,
|
||||
max_attempts=job.max_attempts,
|
||||
pois_detected=job.pois_detected,
|
||||
changes_detected=job.changes_detected,
|
||||
change_summary=job.change_summary,
|
||||
error_message=job.error_message,
|
||||
next_scheduled_at=job.next_scheduled_at,
|
||||
duration_seconds=job.duration_seconds,
|
||||
is_overdue=job.is_overdue,
|
||||
can_retry=job.can_retry
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/tenant/{tenant_id}",
|
||||
response_model=List[POIRefreshJobResponse],
|
||||
summary="Get refresh jobs for tenant",
|
||||
description="Retrieve all POI refresh jobs for a specific tenant"
|
||||
)
|
||||
async def get_tenant_refresh_jobs(
|
||||
tenant_id: str,
|
||||
status: Optional[str] = Query(None, description="Filter by status"),
|
||||
limit: int = Query(50, ge=1, le=200, description="Maximum number of results"),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get all POI refresh jobs for a tenant"""
|
||||
try:
|
||||
tenant_uuid = uuid.UUID(tenant_id)
|
||||
except ValueError:
|
||||
raise HTTPException(status_code=400, detail="Invalid tenant_id format")
|
||||
|
||||
query = select(POIRefreshJob).where(POIRefreshJob.tenant_id == tenant_uuid)
|
||||
|
||||
if status:
|
||||
query = query.where(POIRefreshJob.status == status)
|
||||
|
||||
query = query.order_by(desc(POIRefreshJob.scheduled_at)).limit(limit)
|
||||
|
||||
result = await db.execute(query)
|
||||
jobs = result.scalars().all()
|
||||
|
||||
return [
|
||||
POIRefreshJobResponse(
|
||||
id=str(job.id),
|
||||
tenant_id=str(job.tenant_id),
|
||||
status=job.status,
|
||||
scheduled_at=job.scheduled_at,
|
||||
started_at=job.started_at,
|
||||
completed_at=job.completed_at,
|
||||
attempt_count=job.attempt_count,
|
||||
max_attempts=job.max_attempts,
|
||||
pois_detected=job.pois_detected,
|
||||
changes_detected=job.changes_detected,
|
||||
change_summary=job.change_summary,
|
||||
error_message=job.error_message,
|
||||
next_scheduled_at=job.next_scheduled_at,
|
||||
duration_seconds=job.duration_seconds,
|
||||
is_overdue=job.is_overdue,
|
||||
can_retry=job.can_retry
|
||||
)
|
||||
for job in jobs
|
||||
]
|
||||
|
||||
|
||||
@router.post(
|
||||
"/{job_id}/execute",
|
||||
response_model=JobExecutionResult,
|
||||
summary="Execute refresh job",
|
||||
description="Manually trigger execution of a pending POI refresh job"
|
||||
)
|
||||
async def execute_refresh_job(
|
||||
job_id: str,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Manually execute a POI refresh job"""
|
||||
try:
|
||||
job_uuid = uuid.UUID(job_id)
|
||||
except ValueError:
|
||||
raise HTTPException(status_code=400, detail="Invalid job_id format")
|
||||
|
||||
try:
|
||||
poi_refresh_service = POIRefreshService()
|
||||
result = await poi_refresh_service.execute_refresh_job(
|
||||
job_id=job_id,
|
||||
session=db
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"POI refresh job executed via API",
|
||||
job_id=job_id,
|
||||
status=result["status"]
|
||||
)
|
||||
|
||||
return JobExecutionResult(**result)
|
||||
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=404, detail=str(e))
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to execute POI refresh job",
|
||||
job_id=job_id,
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to execute refresh job: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.post(
|
||||
"/process-pending",
|
||||
summary="Process all pending jobs",
|
||||
description="Manually trigger processing of all pending POI refresh jobs"
|
||||
)
|
||||
async def process_pending_jobs(
|
||||
max_concurrent: int = Query(5, ge=1, le=20, description="Max concurrent executions"),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Process all pending POI refresh jobs"""
|
||||
try:
|
||||
poi_refresh_service = POIRefreshService()
|
||||
result = await poi_refresh_service.process_pending_jobs(
|
||||
max_concurrent=max_concurrent,
|
||||
session=db
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Pending POI refresh jobs processed via API",
|
||||
total_jobs=result["total_jobs"],
|
||||
successful=result["successful"],
|
||||
failed=result["failed"]
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to process pending POI refresh jobs",
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to process pending jobs: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/pending",
|
||||
response_model=List[POIRefreshJobResponse],
|
||||
summary="Get pending jobs",
|
||||
description="Retrieve all pending POI refresh jobs that are due for execution"
|
||||
)
|
||||
async def get_pending_jobs(
|
||||
limit: int = Query(100, ge=1, le=500, description="Maximum number of results"),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get all pending POI refresh jobs"""
|
||||
try:
|
||||
poi_refresh_service = POIRefreshService()
|
||||
jobs = await poi_refresh_service.get_pending_jobs(
|
||||
limit=limit,
|
||||
session=db
|
||||
)
|
||||
|
||||
return [
|
||||
POIRefreshJobResponse(
|
||||
id=str(job.id),
|
||||
tenant_id=str(job.tenant_id),
|
||||
status=job.status,
|
||||
scheduled_at=job.scheduled_at,
|
||||
started_at=job.started_at,
|
||||
completed_at=job.completed_at,
|
||||
attempt_count=job.attempt_count,
|
||||
max_attempts=job.max_attempts,
|
||||
pois_detected=job.pois_detected,
|
||||
changes_detected=job.changes_detected,
|
||||
change_summary=job.change_summary,
|
||||
error_message=job.error_message,
|
||||
next_scheduled_at=job.next_scheduled_at,
|
||||
duration_seconds=job.duration_seconds,
|
||||
is_overdue=job.is_overdue,
|
||||
can_retry=job.can_retry
|
||||
)
|
||||
for job in jobs
|
||||
]
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to get pending POI refresh jobs",
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to get pending jobs: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.post(
|
||||
"/trigger-scheduler",
|
||||
summary="Trigger scheduler immediately",
|
||||
description="Trigger an immediate check for pending jobs (bypasses schedule)"
|
||||
)
|
||||
async def trigger_scheduler():
|
||||
"""Trigger POI refresh scheduler immediately"""
|
||||
try:
|
||||
scheduler = get_scheduler()
|
||||
|
||||
if not scheduler.is_running:
|
||||
raise HTTPException(
|
||||
status_code=503,
|
||||
detail="POI refresh scheduler is not running"
|
||||
)
|
||||
|
||||
result = await scheduler.trigger_immediate_check()
|
||||
|
||||
logger.info(
|
||||
"POI refresh scheduler triggered via API",
|
||||
total_jobs=result["total_jobs"],
|
||||
successful=result["successful"],
|
||||
failed=result["failed"]
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to trigger POI refresh scheduler",
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to trigger scheduler: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/scheduler/status",
|
||||
summary="Get scheduler status",
|
||||
description="Check if POI refresh scheduler is running"
|
||||
)
|
||||
async def get_scheduler_status():
|
||||
"""Get POI refresh scheduler status"""
|
||||
try:
|
||||
scheduler = get_scheduler()
|
||||
|
||||
return {
|
||||
"is_running": scheduler.is_running,
|
||||
"check_interval_seconds": scheduler.check_interval_seconds,
|
||||
"max_concurrent_jobs": scheduler.max_concurrent_jobs
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to get scheduler status",
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to get scheduler status: {str(e)}"
|
||||
)
|
||||
Reference in New Issue
Block a user