325 lines
11 KiB
Python
325 lines
11 KiB
Python
"""
|
|
Internal Demo Cloning API for Sales Service
|
|
Service-to-service endpoint for cloning sales data
|
|
"""
|
|
|
|
from fastapi import APIRouter, Depends, HTTPException, Header
|
|
from sqlalchemy.ext.asyncio import AsyncSession
|
|
from sqlalchemy import select, delete, func
|
|
import structlog
|
|
import uuid
|
|
from datetime import datetime, timezone, timedelta
|
|
from typing import Optional
|
|
import os
|
|
from decimal import Decimal
|
|
import sys
|
|
import json
|
|
from pathlib import Path
|
|
|
|
sys.path.insert(0, str(Path(__file__).parent.parent.parent.parent))
|
|
from shared.utils.demo_dates import adjust_date_for_demo, resolve_time_marker
|
|
|
|
from app.core.database import get_db
|
|
from app.models.sales import SalesData
|
|
|
|
from app.core.config import settings
|
|
|
|
logger = structlog.get_logger()
|
|
router = APIRouter()
|
|
|
|
# Base demo tenant IDs
|
|
DEMO_TENANT_PROFESSIONAL = "a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6"
|
|
|
|
|
|
def parse_date_field(
|
|
field_value: any,
|
|
session_time: datetime,
|
|
field_name: str = "date"
|
|
) -> Optional[datetime]:
|
|
"""
|
|
Parse a date field from JSON, supporting BASE_TS markers and ISO timestamps.
|
|
|
|
Args:
|
|
field_value: The date field value (can be BASE_TS marker, ISO string, or None)
|
|
session_time: Session creation time (timezone-aware UTC)
|
|
field_name: Name of the field (for logging)
|
|
|
|
Returns:
|
|
Timezone-aware UTC datetime or None
|
|
"""
|
|
if field_value is None:
|
|
return None
|
|
|
|
# Handle BASE_TS markers
|
|
if isinstance(field_value, str) and field_value.startswith("BASE_TS"):
|
|
try:
|
|
return resolve_time_marker(field_value, session_time)
|
|
except (ValueError, AttributeError) as e:
|
|
logger.warning(
|
|
"Failed to resolve BASE_TS marker",
|
|
field_name=field_name,
|
|
marker=field_value,
|
|
error=str(e)
|
|
)
|
|
return None
|
|
|
|
# Handle ISO timestamps (legacy format - convert to absolute datetime)
|
|
if isinstance(field_value, str) and ('T' in field_value or 'Z' in field_value):
|
|
try:
|
|
parsed_date = datetime.fromisoformat(field_value.replace('Z', '+00:00'))
|
|
# Adjust relative to session time
|
|
return adjust_date_for_demo(parsed_date, session_time)
|
|
except (ValueError, AttributeError) as e:
|
|
logger.warning(
|
|
"Failed to parse ISO timestamp",
|
|
field_name=field_name,
|
|
value=field_value,
|
|
error=str(e)
|
|
)
|
|
return None
|
|
|
|
logger.warning(
|
|
"Unknown date format",
|
|
field_name=field_name,
|
|
value=field_value,
|
|
value_type=type(field_value).__name__
|
|
)
|
|
return None
|
|
|
|
|
|
def verify_internal_api_key(x_internal_api_key: Optional[str] = Header(None)):
|
|
"""Verify internal API key for service-to-service communication"""
|
|
if x_internal_api_key != settings.INTERNAL_API_KEY:
|
|
logger.warning("Unauthorized internal API access attempted")
|
|
raise HTTPException(status_code=403, detail="Invalid internal API key")
|
|
return True
|
|
|
|
|
|
@router.post("/internal/demo/clone")
|
|
async def clone_demo_data(
|
|
base_tenant_id: str,
|
|
virtual_tenant_id: str,
|
|
demo_account_type: str,
|
|
session_id: Optional[str] = None,
|
|
session_created_at: Optional[str] = None,
|
|
db: AsyncSession = Depends(get_db),
|
|
_: bool = Depends(verify_internal_api_key)
|
|
):
|
|
"""
|
|
Clone sales service data for a virtual demo tenant
|
|
|
|
Clones:
|
|
- Sales history records from template tenant
|
|
- Adjusts dates to recent timeframe
|
|
- Updates product references to new virtual tenant
|
|
|
|
Args:
|
|
base_tenant_id: Template tenant UUID to clone from
|
|
virtual_tenant_id: Target virtual tenant UUID
|
|
demo_account_type: Type of demo account
|
|
session_id: Originating session ID for tracing
|
|
session_created_at: ISO timestamp when demo session was created (for date adjustment)
|
|
|
|
Returns:
|
|
Cloning status and record counts
|
|
"""
|
|
start_time = datetime.now(timezone.utc)
|
|
|
|
# Parse session_created_at or fallback to now
|
|
if session_created_at:
|
|
try:
|
|
session_time = datetime.fromisoformat(session_created_at.replace('Z', '+00:00'))
|
|
except (ValueError, AttributeError) as e:
|
|
logger.warning(
|
|
"Invalid session_created_at format, using current time",
|
|
session_created_at=session_created_at,
|
|
error=str(e)
|
|
)
|
|
session_time = datetime.now(timezone.utc)
|
|
else:
|
|
logger.warning("session_created_at not provided, using current time")
|
|
session_time = datetime.now(timezone.utc)
|
|
|
|
logger.info(
|
|
"Starting sales data cloning",
|
|
base_tenant_id=base_tenant_id,
|
|
virtual_tenant_id=virtual_tenant_id,
|
|
demo_account_type=demo_account_type,
|
|
session_id=session_id,
|
|
session_time=session_time.isoformat()
|
|
)
|
|
|
|
try:
|
|
# Validate UUIDs
|
|
base_uuid = uuid.UUID(base_tenant_id)
|
|
virtual_uuid = uuid.UUID(virtual_tenant_id)
|
|
|
|
# Track cloning statistics
|
|
stats = {
|
|
"sales_records": 0,
|
|
}
|
|
|
|
# Load seed data from JSON files
|
|
from shared.utils.seed_data_paths import get_seed_data_path
|
|
|
|
if demo_account_type == "professional":
|
|
json_file = get_seed_data_path("professional", "09-sales.json")
|
|
elif demo_account_type == "enterprise":
|
|
json_file = get_seed_data_path("enterprise", "09-sales.json")
|
|
elif demo_account_type == "enterprise_child":
|
|
json_file = get_seed_data_path("enterprise", "09-sales.json", child_id=base_tenant_id)
|
|
else:
|
|
raise ValueError(f"Invalid demo account type: {demo_account_type}")
|
|
|
|
# Load JSON data
|
|
with open(json_file, 'r', encoding='utf-8') as f:
|
|
seed_data = json.load(f)
|
|
|
|
logger.info(
|
|
"Loaded sales seed data",
|
|
sales_records=len(seed_data.get('sales_data', []))
|
|
)
|
|
|
|
# Load Sales Data from seed data
|
|
for sale_data in seed_data.get('sales_data', []):
|
|
# Parse date field (supports BASE_TS markers and ISO timestamps)
|
|
# Different demo types may use different field names for the date
|
|
# Prioritize in order: date, sale_date, sales_date
|
|
date_value = (sale_data.get('date') or
|
|
sale_data.get('sale_date') or
|
|
sale_data.get('sales_date'))
|
|
|
|
adjusted_date = parse_date_field(
|
|
date_value,
|
|
session_time,
|
|
"date"
|
|
)
|
|
|
|
# Ensure date is not None for NOT NULL constraint by using session_time as fallback
|
|
if adjusted_date is None:
|
|
adjusted_date = session_time
|
|
|
|
# Create new sales record with adjusted date
|
|
# Map different possible JSON field names to the correct model field names
|
|
new_sale = SalesData(
|
|
id=uuid.uuid4(),
|
|
tenant_id=virtual_uuid,
|
|
date=adjusted_date,
|
|
inventory_product_id=sale_data.get('inventory_product_id') or sale_data.get('product_id'), # inventory_product_id is the model field
|
|
quantity_sold=sale_data.get('quantity_sold') or sale_data.get('quantity', 0.0), # quantity_sold is the model field
|
|
unit_price=sale_data.get('unit_price', 0.0), # unit_price is the model field
|
|
revenue=sale_data.get('revenue') or sale_data.get('total_revenue') or sale_data.get('total_amount', 0.0), # revenue is the model field
|
|
cost_of_goods=sale_data.get('cost_of_goods', 0.0), # cost_of_goods is the model field
|
|
discount_applied=sale_data.get('discount_applied', 0.0), # discount_applied is the model field
|
|
location_id=sale_data.get('location_id'),
|
|
sales_channel=sale_data.get('sales_channel', 'IN_STORE'), # sales_channel is the model field
|
|
source="demo_clone", # Mark as seeded
|
|
is_validated=sale_data.get('is_validated', True),
|
|
validation_notes=sale_data.get('validation_notes'),
|
|
notes=sale_data.get('notes'),
|
|
weather_condition=sale_data.get('weather_condition'),
|
|
is_holiday=sale_data.get('is_holiday', False),
|
|
is_weekend=sale_data.get('is_weekend', False),
|
|
created_at=session_time,
|
|
updated_at=session_time
|
|
)
|
|
db.add(new_sale)
|
|
stats["sales_records"] += 1
|
|
|
|
# Commit all changes
|
|
await db.commit()
|
|
|
|
total_records = sum(stats.values())
|
|
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
|
|
|
logger.info(
|
|
"Sales data cloning completed",
|
|
virtual_tenant_id=virtual_tenant_id,
|
|
total_records=total_records,
|
|
stats=stats,
|
|
duration_ms=duration_ms
|
|
)
|
|
|
|
return {
|
|
"service": "sales",
|
|
"status": "completed",
|
|
"records_cloned": total_records,
|
|
"duration_ms": duration_ms,
|
|
"details": stats
|
|
}
|
|
|
|
except ValueError as e:
|
|
logger.error("Invalid UUID format", error=str(e))
|
|
raise HTTPException(status_code=400, detail=f"Invalid UUID: {str(e)}")
|
|
|
|
except Exception as e:
|
|
logger.error(
|
|
"Failed to clone sales data",
|
|
error=str(e),
|
|
virtual_tenant_id=virtual_tenant_id,
|
|
exc_info=True
|
|
)
|
|
|
|
# Rollback on error
|
|
await db.rollback()
|
|
|
|
return {
|
|
"service": "sales",
|
|
"status": "failed",
|
|
"records_cloned": 0,
|
|
"duration_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000),
|
|
"error": str(e)
|
|
}
|
|
|
|
|
|
@router.get("/clone/health")
|
|
async def clone_health_check(_: bool = Depends(verify_internal_api_key)):
|
|
"""
|
|
Health check for internal cloning endpoint
|
|
Used by orchestrator to verify service availability
|
|
"""
|
|
return {
|
|
"service": "sales",
|
|
"clone_endpoint": "available",
|
|
"version": "2.0.0"
|
|
}
|
|
|
|
|
|
@router.delete("/tenant/{virtual_tenant_id}")
|
|
async def delete_demo_data(
|
|
virtual_tenant_id: str,
|
|
db: AsyncSession = Depends(get_db),
|
|
_: bool = Depends(verify_internal_api_key)
|
|
):
|
|
"""Delete all sales data for a virtual demo tenant"""
|
|
logger.info("Deleting sales data for virtual tenant", virtual_tenant_id=virtual_tenant_id)
|
|
start_time = datetime.now(timezone.utc)
|
|
|
|
try:
|
|
virtual_uuid = uuid.UUID(virtual_tenant_id)
|
|
|
|
# Count records
|
|
sales_count = await db.scalar(select(func.count(SalesData.id)).where(SalesData.tenant_id == virtual_uuid))
|
|
|
|
# Delete sales data
|
|
await db.execute(delete(SalesData).where(SalesData.tenant_id == virtual_uuid))
|
|
await db.commit()
|
|
|
|
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
|
logger.info("Sales data deleted successfully", virtual_tenant_id=virtual_tenant_id, duration_ms=duration_ms)
|
|
|
|
return {
|
|
"service": "sales",
|
|
"status": "deleted",
|
|
"virtual_tenant_id": virtual_tenant_id,
|
|
"records_deleted": {
|
|
"sales": sales_count,
|
|
"total": sales_count
|
|
},
|
|
"duration_ms": duration_ms
|
|
}
|
|
except Exception as e:
|
|
logger.error("Failed to delete sales data", error=str(e), exc_info=True)
|
|
await db.rollback()
|
|
raise HTTPException(status_code=500, detail=str(e))
|