2025-10-12 18:47:33 +02:00
|
|
|
"""
|
|
|
|
|
Internal Demo Cloning API for Forecasting Service
|
|
|
|
|
Service-to-service endpoint for cloning forecast data
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
from fastapi import APIRouter, Depends, HTTPException, Header
|
|
|
|
|
from sqlalchemy.ext.asyncio import AsyncSession
|
|
|
|
|
from sqlalchemy import select
|
|
|
|
|
import structlog
|
|
|
|
|
import uuid
|
|
|
|
|
from datetime import datetime, timezone, timedelta
|
|
|
|
|
from typing import Optional
|
|
|
|
|
import os
|
2025-11-27 15:52:40 +01:00
|
|
|
import sys
|
|
|
|
|
from pathlib import Path
|
|
|
|
|
|
|
|
|
|
sys.path.insert(0, str(Path(__file__).parent.parent.parent.parent))
|
|
|
|
|
from shared.utils.demo_dates import adjust_date_for_demo, BASE_REFERENCE_DATE
|
2025-10-12 18:47:33 +02:00
|
|
|
|
|
|
|
|
from app.core.database import get_db
|
|
|
|
|
from app.models.forecasts import Forecast, PredictionBatch
|
|
|
|
|
|
|
|
|
|
logger = structlog.get_logger()
|
|
|
|
|
router = APIRouter(prefix="/internal/demo", tags=["internal"])
|
|
|
|
|
|
|
|
|
|
# Base demo tenant IDs
|
2025-11-30 09:12:40 +01:00
|
|
|
DEMO_TENANT_PROFESSIONAL = "a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6"
|
2025-10-12 18:47:33 +02:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def verify_internal_api_key(x_internal_api_key: Optional[str] = Header(None)):
|
|
|
|
|
"""Verify internal API key for service-to-service communication"""
|
2025-11-30 09:12:40 +01:00
|
|
|
from app.core.config import settings
|
|
|
|
|
if x_internal_api_key != settings.INTERNAL_API_KEY:
|
2025-10-12 18:47:33 +02:00
|
|
|
logger.warning("Unauthorized internal API access attempted")
|
|
|
|
|
raise HTTPException(status_code=403, detail="Invalid internal API key")
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@router.post("/clone")
|
|
|
|
|
async def clone_demo_data(
|
|
|
|
|
base_tenant_id: str,
|
|
|
|
|
virtual_tenant_id: str,
|
|
|
|
|
demo_account_type: str,
|
|
|
|
|
session_id: Optional[str] = None,
|
2025-11-27 15:52:40 +01:00
|
|
|
session_created_at: Optional[str] = None,
|
2025-10-12 18:47:33 +02:00
|
|
|
db: AsyncSession = Depends(get_db),
|
|
|
|
|
_: bool = Depends(verify_internal_api_key)
|
|
|
|
|
):
|
|
|
|
|
"""
|
|
|
|
|
Clone forecasting service data for a virtual demo tenant
|
|
|
|
|
|
|
|
|
|
Clones:
|
|
|
|
|
- Forecasts (historical predictions)
|
|
|
|
|
- Prediction batches (batch prediction records)
|
|
|
|
|
|
|
|
|
|
Args:
|
|
|
|
|
base_tenant_id: Template tenant UUID to clone from
|
|
|
|
|
virtual_tenant_id: Target virtual tenant UUID
|
|
|
|
|
demo_account_type: Type of demo account
|
|
|
|
|
session_id: Originating session ID for tracing
|
2025-11-27 15:52:40 +01:00
|
|
|
session_created_at: ISO timestamp when demo session was created (for date adjustment)
|
2025-10-12 18:47:33 +02:00
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
|
Cloning status and record counts
|
|
|
|
|
"""
|
|
|
|
|
start_time = datetime.now(timezone.utc)
|
|
|
|
|
|
2025-11-27 15:52:40 +01:00
|
|
|
# Parse session_created_at or fallback to now
|
|
|
|
|
if session_created_at:
|
|
|
|
|
try:
|
|
|
|
|
session_time = datetime.fromisoformat(session_created_at.replace('Z', '+00:00'))
|
|
|
|
|
except (ValueError, AttributeError) as e:
|
|
|
|
|
logger.warning(
|
|
|
|
|
"Invalid session_created_at format, using current time",
|
|
|
|
|
session_created_at=session_created_at,
|
|
|
|
|
error=str(e)
|
|
|
|
|
)
|
|
|
|
|
session_time = datetime.now(timezone.utc)
|
|
|
|
|
else:
|
|
|
|
|
logger.warning("session_created_at not provided, using current time")
|
|
|
|
|
session_time = datetime.now(timezone.utc)
|
|
|
|
|
|
2025-10-12 18:47:33 +02:00
|
|
|
logger.info(
|
|
|
|
|
"Starting forecasting data cloning",
|
|
|
|
|
base_tenant_id=base_tenant_id,
|
|
|
|
|
virtual_tenant_id=virtual_tenant_id,
|
|
|
|
|
demo_account_type=demo_account_type,
|
2025-11-27 15:52:40 +01:00
|
|
|
session_id=session_id,
|
|
|
|
|
session_time=session_time.isoformat()
|
2025-10-12 18:47:33 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
# Validate UUIDs
|
|
|
|
|
base_uuid = uuid.UUID(base_tenant_id)
|
|
|
|
|
virtual_uuid = uuid.UUID(virtual_tenant_id)
|
|
|
|
|
|
|
|
|
|
# Track cloning statistics
|
|
|
|
|
stats = {
|
|
|
|
|
"forecasts": 0,
|
|
|
|
|
"prediction_batches": 0
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
# Clone Forecasts
|
|
|
|
|
result = await db.execute(
|
|
|
|
|
select(Forecast).where(Forecast.tenant_id == base_uuid)
|
|
|
|
|
)
|
|
|
|
|
base_forecasts = result.scalars().all()
|
|
|
|
|
|
|
|
|
|
logger.info(
|
|
|
|
|
"Found forecasts to clone",
|
|
|
|
|
count=len(base_forecasts),
|
|
|
|
|
base_tenant=str(base_uuid)
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
for forecast in base_forecasts:
|
2025-11-27 15:52:40 +01:00
|
|
|
adjusted_forecast_date = adjust_date_for_demo(
|
|
|
|
|
forecast.forecast_date,
|
|
|
|
|
session_time,
|
|
|
|
|
BASE_REFERENCE_DATE
|
|
|
|
|
) if forecast.forecast_date else None
|
|
|
|
|
|
2025-10-12 18:47:33 +02:00
|
|
|
new_forecast = Forecast(
|
|
|
|
|
id=uuid.uuid4(),
|
|
|
|
|
tenant_id=virtual_uuid,
|
|
|
|
|
inventory_product_id=forecast.inventory_product_id, # Keep product reference
|
|
|
|
|
product_name=forecast.product_name,
|
|
|
|
|
location=forecast.location,
|
2025-11-27 15:52:40 +01:00
|
|
|
forecast_date=adjusted_forecast_date,
|
|
|
|
|
created_at=session_time,
|
2025-10-12 18:47:33 +02:00
|
|
|
predicted_demand=forecast.predicted_demand,
|
|
|
|
|
confidence_lower=forecast.confidence_lower,
|
|
|
|
|
confidence_upper=forecast.confidence_upper,
|
|
|
|
|
confidence_level=forecast.confidence_level,
|
|
|
|
|
model_id=forecast.model_id,
|
|
|
|
|
model_version=forecast.model_version,
|
|
|
|
|
algorithm=forecast.algorithm,
|
|
|
|
|
business_type=forecast.business_type,
|
|
|
|
|
day_of_week=forecast.day_of_week,
|
|
|
|
|
is_holiday=forecast.is_holiday,
|
|
|
|
|
is_weekend=forecast.is_weekend,
|
|
|
|
|
weather_temperature=forecast.weather_temperature,
|
|
|
|
|
weather_precipitation=forecast.weather_precipitation,
|
|
|
|
|
weather_description=forecast.weather_description,
|
|
|
|
|
traffic_volume=forecast.traffic_volume,
|
|
|
|
|
processing_time_ms=forecast.processing_time_ms,
|
|
|
|
|
features_used=forecast.features_used
|
|
|
|
|
)
|
|
|
|
|
db.add(new_forecast)
|
|
|
|
|
stats["forecasts"] += 1
|
|
|
|
|
|
|
|
|
|
# Clone Prediction Batches
|
|
|
|
|
result = await db.execute(
|
|
|
|
|
select(PredictionBatch).where(PredictionBatch.tenant_id == base_uuid)
|
|
|
|
|
)
|
|
|
|
|
base_batches = result.scalars().all()
|
|
|
|
|
|
|
|
|
|
logger.info(
|
|
|
|
|
"Found prediction batches to clone",
|
|
|
|
|
count=len(base_batches),
|
|
|
|
|
base_tenant=str(base_uuid)
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
for batch in base_batches:
|
2025-11-27 15:52:40 +01:00
|
|
|
adjusted_requested_at = adjust_date_for_demo(
|
|
|
|
|
batch.requested_at,
|
|
|
|
|
session_time,
|
|
|
|
|
BASE_REFERENCE_DATE
|
|
|
|
|
) if batch.requested_at else None
|
|
|
|
|
adjusted_completed_at = adjust_date_for_demo(
|
|
|
|
|
batch.completed_at,
|
|
|
|
|
session_time,
|
|
|
|
|
BASE_REFERENCE_DATE
|
|
|
|
|
) if batch.completed_at else None
|
|
|
|
|
|
2025-10-12 18:47:33 +02:00
|
|
|
new_batch = PredictionBatch(
|
|
|
|
|
id=uuid.uuid4(),
|
|
|
|
|
tenant_id=virtual_uuid,
|
|
|
|
|
batch_name=batch.batch_name,
|
2025-11-27 15:52:40 +01:00
|
|
|
requested_at=adjusted_requested_at,
|
|
|
|
|
completed_at=adjusted_completed_at,
|
2025-10-12 18:47:33 +02:00
|
|
|
status=batch.status,
|
|
|
|
|
total_products=batch.total_products,
|
|
|
|
|
completed_products=batch.completed_products,
|
|
|
|
|
failed_products=batch.failed_products,
|
|
|
|
|
forecast_days=batch.forecast_days,
|
|
|
|
|
business_type=batch.business_type,
|
|
|
|
|
error_message=batch.error_message,
|
|
|
|
|
processing_time_ms=batch.processing_time_ms,
|
|
|
|
|
cancelled_by=batch.cancelled_by
|
|
|
|
|
)
|
|
|
|
|
db.add(new_batch)
|
|
|
|
|
stats["prediction_batches"] += 1
|
|
|
|
|
|
|
|
|
|
# Commit all changes
|
|
|
|
|
await db.commit()
|
|
|
|
|
|
|
|
|
|
total_records = sum(stats.values())
|
|
|
|
|
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
|
|
|
|
|
|
|
|
|
logger.info(
|
|
|
|
|
"Forecasting data cloning completed",
|
|
|
|
|
virtual_tenant_id=virtual_tenant_id,
|
|
|
|
|
total_records=total_records,
|
|
|
|
|
stats=stats,
|
|
|
|
|
duration_ms=duration_ms
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
return {
|
|
|
|
|
"service": "forecasting",
|
|
|
|
|
"status": "completed",
|
|
|
|
|
"records_cloned": total_records,
|
|
|
|
|
"duration_ms": duration_ms,
|
|
|
|
|
"details": stats
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
except ValueError as e:
|
|
|
|
|
logger.error("Invalid UUID format", error=str(e))
|
|
|
|
|
raise HTTPException(status_code=400, detail=f"Invalid UUID: {str(e)}")
|
|
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error(
|
|
|
|
|
"Failed to clone forecasting data",
|
|
|
|
|
error=str(e),
|
|
|
|
|
virtual_tenant_id=virtual_tenant_id,
|
|
|
|
|
exc_info=True
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
# Rollback on error
|
|
|
|
|
await db.rollback()
|
|
|
|
|
|
|
|
|
|
return {
|
|
|
|
|
"service": "forecasting",
|
|
|
|
|
"status": "failed",
|
|
|
|
|
"records_cloned": 0,
|
|
|
|
|
"duration_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000),
|
|
|
|
|
"error": str(e)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@router.get("/clone/health")
|
|
|
|
|
async def clone_health_check(_: bool = Depends(verify_internal_api_key)):
|
|
|
|
|
"""
|
|
|
|
|
Health check for internal cloning endpoint
|
|
|
|
|
Used by orchestrator to verify service availability
|
|
|
|
|
"""
|
|
|
|
|
return {
|
|
|
|
|
"service": "forecasting",
|
|
|
|
|
"clone_endpoint": "available",
|
|
|
|
|
"version": "2.0.0"
|
|
|
|
|
}
|