Files
bakery-ia/services/suppliers/app/api/internal_demo.py
2026-01-12 14:24:14 +01:00

402 lines
14 KiB
Python

"""
Internal Demo Cloning API for Suppliers Service
Service-to-service endpoint for cloning supplier data
"""
from fastapi import APIRouter, Depends, HTTPException, Header
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select, delete
import structlog
import uuid
from uuid import UUID
from datetime import datetime, timezone
from typing import Optional
import json
from pathlib import Path
from app.core.database import get_db
from app.models.suppliers import Supplier
from app.core.config import settings
# Import demo_dates utilities at the top level
import sys
sys.path.insert(0, str(Path(__file__).parent.parent.parent.parent))
from shared.utils.demo_dates import adjust_date_for_demo, resolve_time_marker
logger = structlog.get_logger()
router = APIRouter(prefix="/internal/demo", tags=["internal"])
# Base demo tenant IDs
DEMO_TENANT_PROFESSIONAL = "a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6"
def parse_date_field(
field_value: any,
session_time: datetime,
field_name: str = "date"
) -> Optional[datetime]:
"""
Parse a date field from JSON, supporting BASE_TS markers and ISO timestamps.
Args:
field_value: The date field value (can be BASE_TS marker, ISO string, or None)
session_time: Session creation time (timezone-aware UTC)
field_name: Name of the field (for logging)
Returns:
Timezone-aware UTC datetime or None
"""
if field_value is None:
return None
# Handle BASE_TS markers
if isinstance(field_value, str) and field_value.startswith("BASE_TS"):
try:
return resolve_time_marker(field_value, session_time)
except (ValueError, AttributeError) as e:
logger.warning(
"Failed to resolve BASE_TS marker",
field_name=field_name,
marker=field_value,
error=str(e)
)
return None
# Handle ISO timestamps (legacy format - convert to absolute datetime)
if isinstance(field_value, str) and ('T' in field_value or 'Z' in field_value):
try:
parsed_date = datetime.fromisoformat(field_value.replace('Z', '+00:00'))
# Adjust relative to session time
return adjust_date_for_demo(parsed_date, session_time)
except (ValueError, AttributeError) as e:
logger.warning(
"Failed to parse ISO timestamp",
field_name=field_name,
value=field_value,
error=str(e)
)
return None
logger.warning(
"Unknown date format",
field_name=field_name,
value=field_value,
value_type=type(field_value).__name__
)
return None
@router.post("/clone")
async def clone_demo_data(
base_tenant_id: str,
virtual_tenant_id: str,
demo_account_type: str,
session_id: Optional[str] = None,
session_created_at: Optional[str] = None,
db: AsyncSession = Depends(get_db)
):
"""
Clone suppliers service data for a virtual demo tenant
This endpoint creates fresh demo data by:
1. Loading seed data from JSON files
2. Applying XOR-based ID transformation
3. Adjusting dates relative to session creation time
4. Creating records in the virtual tenant
Args:
base_tenant_id: Template tenant UUID (for reference)
virtual_tenant_id: Target virtual tenant UUID
demo_account_type: Type of demo account
session_id: Originating session ID for tracing
session_created_at: Session creation timestamp for date adjustment
Returns:
Cloning status and record counts
"""
start_time = datetime.now(timezone.utc)
try:
# Validate UUIDs
virtual_uuid = uuid.UUID(virtual_tenant_id)
# Parse session creation time for date adjustment
if session_created_at:
try:
session_time = datetime.fromisoformat(session_created_at.replace('Z', '+00:00'))
except (ValueError, AttributeError):
session_time = start_time
else:
session_time = start_time
logger.info(
"Starting suppliers data cloning",
base_tenant_id=base_tenant_id,
virtual_tenant_id=virtual_tenant_id,
demo_account_type=demo_account_type,
session_id=session_id,
session_created_at=session_created_at
)
# Load seed data from JSON files
from shared.utils.seed_data_paths import get_seed_data_path
if demo_account_type == "professional":
json_file = get_seed_data_path("professional", "05-suppliers.json")
elif demo_account_type == "enterprise":
json_file = get_seed_data_path("enterprise", "05-suppliers.json")
elif demo_account_type == "enterprise_child":
json_file = get_seed_data_path("enterprise", "05-suppliers.json", child_id=base_tenant_id)
else:
raise ValueError(f"Invalid demo account type: {demo_account_type}")
# Load JSON data
with open(json_file, 'r', encoding='utf-8') as f:
seed_data = json.load(f)
# Track cloning statistics
stats = {
"suppliers": 0
}
# Create Suppliers
for supplier_data in seed_data.get('suppliers', []):
# Transform supplier ID using XOR
from shared.utils.demo_id_transformer import transform_id
try:
supplier_uuid = uuid.UUID(supplier_data['id'])
transformed_id = transform_id(supplier_data['id'], virtual_uuid)
except ValueError as e:
logger.error("Failed to parse supplier UUID",
supplier_id=supplier_data['id'],
error=str(e))
raise HTTPException(
status_code=400,
detail=f"Invalid UUID format in supplier data: {str(e)}"
)
# Parse date fields (supports BASE_TS markers and ISO timestamps)
adjusted_created_at = parse_date_field(
supplier_data.get('created_at'),
session_time,
"created_at"
)
adjusted_updated_at = parse_date_field(
supplier_data.get('updated_at'),
session_time,
"updated_at"
) or adjusted_created_at # Fallback to created_at if not provided
# Map supplier_type to enum if it's a string
from app.models.suppliers import SupplierType, SupplierStatus, PaymentTerms
supplier_type_value = supplier_data.get('supplier_type')
if supplier_type_value is None:
# Default to multi if supplier_type not provided
supplier_type_value = SupplierType.multi
elif isinstance(supplier_type_value, str):
try:
supplier_type_value = SupplierType[supplier_type_value]
except KeyError:
supplier_type_value = SupplierType.multi
# Map payment_terms to enum if it's a string
payment_terms_value = supplier_data.get('payment_terms', 'net_30')
if isinstance(payment_terms_value, str):
try:
payment_terms_value = PaymentTerms[payment_terms_value]
except KeyError:
payment_terms_value = PaymentTerms.net_30
# Map status to enum if provided
status_value = supplier_data.get('status', 'active')
if isinstance(status_value, str):
try:
status_value = SupplierStatus[status_value]
except KeyError:
status_value = SupplierStatus.active
# Map created_by and updated_by - use a system user UUID if not provided
system_user_id = uuid.UUID('00000000-0000-0000-0000-000000000000')
created_by = supplier_data.get('created_by', str(system_user_id))
updated_by = supplier_data.get('updated_by', str(system_user_id))
new_supplier = Supplier(
id=str(transformed_id),
tenant_id=virtual_uuid,
name=supplier_data['name'],
supplier_code=supplier_data.get('supplier_code'),
tax_id=supplier_data.get('tax_id'),
registration_number=supplier_data.get('registration_number'),
supplier_type=supplier_type_value,
status=status_value,
contact_person=supplier_data.get('contact_person'),
email=supplier_data.get('email'),
phone=supplier_data.get('phone'),
mobile=supplier_data.get('mobile'),
website=supplier_data.get('website'),
address_line1=supplier_data.get('address_line1'),
address_line2=supplier_data.get('address_line2'),
city=supplier_data.get('city'),
state_province=supplier_data.get('state_province'),
postal_code=supplier_data.get('postal_code'),
country=supplier_data.get('country'),
payment_terms=payment_terms_value,
credit_limit=supplier_data.get('credit_limit', 0.0),
currency=supplier_data.get('currency', 'EUR'),
standard_lead_time=supplier_data.get('standard_lead_time', 3),
minimum_order_amount=supplier_data.get('minimum_order_amount'),
delivery_area=supplier_data.get('delivery_area'),
quality_rating=supplier_data.get('quality_rating', 0.0),
delivery_rating=supplier_data.get('delivery_rating', 0.0),
total_orders=supplier_data.get('total_orders', 0),
total_amount=supplier_data.get('total_amount', 0.0),
trust_score=supplier_data.get('trust_score', 0.0),
is_preferred_supplier=supplier_data.get('is_preferred_supplier', False),
auto_approve_enabled=supplier_data.get('auto_approve_enabled', False),
total_pos_count=supplier_data.get('total_pos_count', 0),
approved_pos_count=supplier_data.get('approved_pos_count', 0),
on_time_delivery_rate=supplier_data.get('on_time_delivery_rate', 0.0),
fulfillment_rate=supplier_data.get('fulfillment_rate', 0.0),
last_performance_update=parse_date_field(
supplier_data.get('last_performance_update'),
session_time,
"last_performance_update"
),
approved_by=supplier_data.get('approved_by'),
approved_at=parse_date_field(
supplier_data.get('approved_at'),
session_time,
"approved_at"
),
rejection_reason=supplier_data.get('rejection_reason'),
notes=supplier_data.get('notes'),
certifications=supplier_data.get('certifications'),
business_hours=supplier_data.get('business_hours'),
specializations=supplier_data.get('specializations'),
created_at=adjusted_created_at,
updated_at=adjusted_updated_at,
created_by=created_by,
updated_by=updated_by
)
db.add(new_supplier)
stats["suppliers"] += 1
await db.commit()
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
logger.info(
"Suppliers data cloned successfully",
virtual_tenant_id=virtual_tenant_id,
suppliers_cloned=stats["suppliers"],
duration_ms=duration_ms
)
return {
"service": "suppliers",
"status": "completed",
"records_cloned": stats["suppliers"],
"duration_ms": duration_ms,
"details": {
"suppliers": stats["suppliers"],
"virtual_tenant_id": str(virtual_tenant_id)
}
}
except ValueError as e:
logger.error("Invalid UUID format", error=str(e), virtual_tenant_id=virtual_tenant_id)
raise HTTPException(status_code=400, detail=f"Invalid UUID: {str(e)}")
except Exception as e:
logger.error(
"Failed to clone suppliers data",
error=str(e),
virtual_tenant_id=virtual_tenant_id,
exc_info=True
)
# Rollback on error
await db.rollback()
return {
"service": "suppliers",
"status": "failed",
"records_cloned": 0,
"duration_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000),
"error": str(e)
}
@router.get("/clone/health")
async def clone_health_check():
"""
Health check for internal cloning endpoint
Used by orchestrator to verify service availability
"""
return {
"service": "suppliers",
"clone_endpoint": "available",
"version": "2.0.0"
}
@router.delete("/tenant/{virtual_tenant_id}")
async def delete_demo_tenant_data(
virtual_tenant_id: UUID,
db: AsyncSession = Depends(get_db)
):
"""
Delete all demo data for a virtual tenant.
This endpoint is idempotent - safe to call multiple times.
"""
start_time = datetime.now(timezone.utc)
records_deleted = {
"suppliers": 0,
"total": 0
}
try:
# Delete suppliers
result = await db.execute(
delete(Supplier)
.where(Supplier.tenant_id == virtual_tenant_id)
)
records_deleted["suppliers"] = result.rowcount
records_deleted["total"] = records_deleted["suppliers"]
await db.commit()
logger.info(
"demo_data_deleted",
service="suppliers",
virtual_tenant_id=str(virtual_tenant_id),
records_deleted=records_deleted
)
return {
"service": "suppliers",
"status": "deleted",
"virtual_tenant_id": str(virtual_tenant_id),
"records_deleted": records_deleted,
"duration_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
}
except Exception as e:
await db.rollback()
logger.error(
"demo_data_deletion_failed",
service="suppliers",
virtual_tenant_id=str(virtual_tenant_id),
error=str(e)
)
raise HTTPException(
status_code=500,
detail=f"Failed to delete demo data: {str(e)}"
)