Improve the demo feature of the project

This commit is contained in:
Urtzi Alfaro
2025-10-12 18:47:33 +02:00
parent dbc7f2fa0d
commit 7556a00db7
168 changed files with 10102 additions and 18869 deletions

View File

@@ -27,8 +27,7 @@ COPY --from=shared /shared /app/shared
# Copy application code
COPY services/forecasting/ .
# Copy scripts directory
COPY scripts/ /app/scripts/
# Add shared libraries to Python path
ENV PYTHONPATH="/app:/app/shared:${PYTHONPATH:-}"

View File

@@ -0,0 +1,221 @@
"""
Internal Demo Cloning API for Forecasting Service
Service-to-service endpoint for cloning forecast data
"""
from fastapi import APIRouter, Depends, HTTPException, Header
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select
import structlog
import uuid
from datetime import datetime, timezone, timedelta
from typing import Optional
import os
from app.core.database import get_db
from app.models.forecasts import Forecast, PredictionBatch
logger = structlog.get_logger()
router = APIRouter(prefix="/internal/demo", tags=["internal"])
# Internal API key for service-to-service auth
INTERNAL_API_KEY = os.getenv("INTERNAL_API_KEY", "dev-internal-key-change-in-production")
# Base demo tenant IDs
DEMO_TENANT_SAN_PABLO = "a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6"
DEMO_TENANT_LA_ESPIGA = "b2c3d4e5-f6a7-48b9-c0d1-e2f3a4b5c6d7"
def verify_internal_api_key(x_internal_api_key: Optional[str] = Header(None)):
"""Verify internal API key for service-to-service communication"""
if x_internal_api_key != INTERNAL_API_KEY:
logger.warning("Unauthorized internal API access attempted")
raise HTTPException(status_code=403, detail="Invalid internal API key")
return True
@router.post("/clone")
async def clone_demo_data(
base_tenant_id: str,
virtual_tenant_id: str,
demo_account_type: str,
session_id: Optional[str] = None,
db: AsyncSession = Depends(get_db),
_: bool = Depends(verify_internal_api_key)
):
"""
Clone forecasting service data for a virtual demo tenant
Clones:
- Forecasts (historical predictions)
- Prediction batches (batch prediction records)
Args:
base_tenant_id: Template tenant UUID to clone from
virtual_tenant_id: Target virtual tenant UUID
demo_account_type: Type of demo account
session_id: Originating session ID for tracing
Returns:
Cloning status and record counts
"""
start_time = datetime.now(timezone.utc)
logger.info(
"Starting forecasting data cloning",
base_tenant_id=base_tenant_id,
virtual_tenant_id=virtual_tenant_id,
demo_account_type=demo_account_type,
session_id=session_id
)
try:
# Validate UUIDs
base_uuid = uuid.UUID(base_tenant_id)
virtual_uuid = uuid.UUID(virtual_tenant_id)
# Track cloning statistics
stats = {
"forecasts": 0,
"prediction_batches": 0
}
# Clone Forecasts
result = await db.execute(
select(Forecast).where(Forecast.tenant_id == base_uuid)
)
base_forecasts = result.scalars().all()
logger.info(
"Found forecasts to clone",
count=len(base_forecasts),
base_tenant=str(base_uuid)
)
# Calculate date offset to make forecasts recent
if base_forecasts:
max_date = max(forecast.forecast_date for forecast in base_forecasts)
today = datetime.now(timezone.utc)
date_offset = today - max_date
else:
date_offset = timedelta(days=0)
for forecast in base_forecasts:
new_forecast = Forecast(
id=uuid.uuid4(),
tenant_id=virtual_uuid,
inventory_product_id=forecast.inventory_product_id, # Keep product reference
product_name=forecast.product_name,
location=forecast.location,
forecast_date=forecast.forecast_date + date_offset,
created_at=datetime.now(timezone.utc),
predicted_demand=forecast.predicted_demand,
confidence_lower=forecast.confidence_lower,
confidence_upper=forecast.confidence_upper,
confidence_level=forecast.confidence_level,
model_id=forecast.model_id,
model_version=forecast.model_version,
algorithm=forecast.algorithm,
business_type=forecast.business_type,
day_of_week=forecast.day_of_week,
is_holiday=forecast.is_holiday,
is_weekend=forecast.is_weekend,
weather_temperature=forecast.weather_temperature,
weather_precipitation=forecast.weather_precipitation,
weather_description=forecast.weather_description,
traffic_volume=forecast.traffic_volume,
processing_time_ms=forecast.processing_time_ms,
features_used=forecast.features_used
)
db.add(new_forecast)
stats["forecasts"] += 1
# Clone Prediction Batches
result = await db.execute(
select(PredictionBatch).where(PredictionBatch.tenant_id == base_uuid)
)
base_batches = result.scalars().all()
logger.info(
"Found prediction batches to clone",
count=len(base_batches),
base_tenant=str(base_uuid)
)
for batch in base_batches:
new_batch = PredictionBatch(
id=uuid.uuid4(),
tenant_id=virtual_uuid,
batch_name=batch.batch_name,
requested_at=batch.requested_at + date_offset,
completed_at=batch.completed_at + date_offset if batch.completed_at else None,
status=batch.status,
total_products=batch.total_products,
completed_products=batch.completed_products,
failed_products=batch.failed_products,
forecast_days=batch.forecast_days,
business_type=batch.business_type,
error_message=batch.error_message,
processing_time_ms=batch.processing_time_ms,
cancelled_by=batch.cancelled_by
)
db.add(new_batch)
stats["prediction_batches"] += 1
# Commit all changes
await db.commit()
total_records = sum(stats.values())
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
logger.info(
"Forecasting data cloning completed",
virtual_tenant_id=virtual_tenant_id,
total_records=total_records,
stats=stats,
duration_ms=duration_ms
)
return {
"service": "forecasting",
"status": "completed",
"records_cloned": total_records,
"duration_ms": duration_ms,
"details": stats
}
except ValueError as e:
logger.error("Invalid UUID format", error=str(e))
raise HTTPException(status_code=400, detail=f"Invalid UUID: {str(e)}")
except Exception as e:
logger.error(
"Failed to clone forecasting data",
error=str(e),
virtual_tenant_id=virtual_tenant_id,
exc_info=True
)
# Rollback on error
await db.rollback()
return {
"service": "forecasting",
"status": "failed",
"records_cloned": 0,
"duration_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000),
"error": str(e)
}
@router.get("/clone/health")
async def clone_health_check(_: bool = Depends(verify_internal_api_key)):
"""
Health check for internal cloning endpoint
Used by orchestrator to verify service availability
"""
return {
"service": "forecasting",
"clone_endpoint": "available",
"version": "2.0.0"
}

View File

@@ -15,7 +15,7 @@ from app.services.forecasting_alert_service import ForecastingAlertService
from shared.service_base import StandardFastAPIService
# Import API routers
from app.api import forecasts, forecasting_operations, analytics, scenario_operations
from app.api import forecasts, forecasting_operations, analytics, scenario_operations, internal_demo
class ForecastingService(StandardFastAPIService):
@@ -167,6 +167,7 @@ service.add_router(forecasts.router)
service.add_router(forecasting_operations.router)
service.add_router(analytics.router)
service.add_router(scenario_operations.router)
service.add_router(internal_demo.router)
if __name__ == "__main__":
import uvicorn