Files
bakery-ia/services/suppliers/app/api/internal_demo.py

409 lines
15 KiB
Python
Raw Normal View History

"""
Internal Demo Cloning API for Suppliers Service
2025-12-13 23:57:54 +01:00
Service-to-service endpoint for cloning supplier data
"""
from fastapi import APIRouter, Depends, HTTPException, Header
from sqlalchemy.ext.asyncio import AsyncSession
2025-12-13 23:57:54 +01:00
from sqlalchemy import select, delete
import structlog
import uuid
2025-12-13 23:57:54 +01:00
from uuid import UUID
from datetime import datetime, timezone
from typing import Optional
2025-12-13 23:57:54 +01:00
import json
2025-10-30 21:08:07 +01:00
from pathlib import Path
from app.core.database import get_db
2025-12-13 23:57:54 +01:00
from app.models.suppliers import Supplier
2025-11-30 09:12:40 +01:00
from app.core.config import settings
2025-12-14 11:58:14 +01:00
# Import demo_dates utilities at the top level
import sys
sys.path.insert(0, str(Path(__file__).parent.parent.parent.parent))
from shared.utils.demo_dates import adjust_date_for_demo, resolve_time_marker
logger = structlog.get_logger()
2025-12-13 23:57:54 +01:00
router = APIRouter()
# Base demo tenant IDs
2025-11-30 09:12:40 +01:00
DEMO_TENANT_PROFESSIONAL = "a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6"
2025-12-14 11:58:14 +01:00
def parse_date_field(
field_value: any,
session_time: datetime,
field_name: str = "date"
) -> Optional[datetime]:
"""
Parse a date field from JSON, supporting BASE_TS markers and ISO timestamps.
Args:
field_value: The date field value (can be BASE_TS marker, ISO string, or None)
session_time: Session creation time (timezone-aware UTC)
field_name: Name of the field (for logging)
Returns:
Timezone-aware UTC datetime or None
"""
if field_value is None:
return None
# Handle BASE_TS markers
if isinstance(field_value, str) and field_value.startswith("BASE_TS"):
try:
return resolve_time_marker(field_value, session_time)
except (ValueError, AttributeError) as e:
logger.warning(
"Failed to resolve BASE_TS marker",
field_name=field_name,
marker=field_value,
error=str(e)
)
return None
# Handle ISO timestamps (legacy format - convert to absolute datetime)
if isinstance(field_value, str) and ('T' in field_value or 'Z' in field_value):
try:
parsed_date = datetime.fromisoformat(field_value.replace('Z', '+00:00'))
# Adjust relative to session time
return adjust_date_for_demo(parsed_date, session_time)
except (ValueError, AttributeError) as e:
logger.warning(
"Failed to parse ISO timestamp",
field_name=field_name,
value=field_value,
error=str(e)
)
return None
logger.warning(
"Unknown date format",
field_name=field_name,
value=field_value,
value_type=type(field_value).__name__
)
return None
def verify_internal_api_key(x_internal_api_key: Optional[str] = Header(None)):
"""Verify internal API key for service-to-service communication"""
2025-11-30 09:12:40 +01:00
if x_internal_api_key != settings.INTERNAL_API_KEY:
logger.warning("Unauthorized internal API access attempted")
raise HTTPException(status_code=403, detail="Invalid internal API key")
return True
2025-12-13 23:57:54 +01:00
@router.post("/internal/demo/clone")
async def clone_demo_data(
base_tenant_id: str,
virtual_tenant_id: str,
demo_account_type: str,
session_id: Optional[str] = None,
2025-10-30 21:08:07 +01:00
session_created_at: Optional[str] = None,
db: AsyncSession = Depends(get_db),
_: bool = Depends(verify_internal_api_key)
):
"""
Clone suppliers service data for a virtual demo tenant
2025-12-13 23:57:54 +01:00
This endpoint creates fresh demo data by:
1. Loading seed data from JSON files
2. Applying XOR-based ID transformation
3. Adjusting dates relative to session creation time
4. Creating records in the virtual tenant
Args:
2025-12-13 23:57:54 +01:00
base_tenant_id: Template tenant UUID (for reference)
virtual_tenant_id: Target virtual tenant UUID
demo_account_type: Type of demo account
session_id: Originating session ID for tracing
2025-12-13 23:57:54 +01:00
session_created_at: Session creation timestamp for date adjustment
Returns:
Cloning status and record counts
"""
start_time = datetime.now(timezone.utc)
2025-12-13 23:57:54 +01:00
try:
# Validate UUIDs
virtual_uuid = uuid.UUID(virtual_tenant_id)
2025-12-13 23:57:54 +01:00
# Parse session creation time for date adjustment
if session_created_at:
try:
session_time = datetime.fromisoformat(session_created_at.replace('Z', '+00:00'))
except (ValueError, AttributeError):
session_time = start_time
else:
session_time = start_time
logger.info(
2025-12-13 23:57:54 +01:00
"Starting suppliers data cloning",
base_tenant_id=base_tenant_id,
virtual_tenant_id=virtual_tenant_id,
demo_account_type=demo_account_type,
session_id=session_id,
session_created_at=session_created_at
)
2025-12-13 23:57:54 +01:00
# Load seed data from JSON files
2025-12-17 13:03:52 +01:00
from shared.utils.seed_data_paths import get_seed_data_path
if demo_account_type == "professional":
json_file = get_seed_data_path("professional", "05-suppliers.json")
elif demo_account_type == "enterprise":
json_file = get_seed_data_path("enterprise", "05-suppliers.json")
elif demo_account_type == "enterprise_child":
json_file = get_seed_data_path("enterprise", "05-suppliers.json", child_id=base_tenant_id)
else:
raise ValueError(f"Invalid demo account type: {demo_account_type}")
2025-12-13 23:57:54 +01:00
# Load JSON data
with open(json_file, 'r', encoding='utf-8') as f:
seed_data = json.load(f)
2025-12-13 23:57:54 +01:00
# Track cloning statistics
stats = {
"suppliers": 0
}
2025-10-21 19:50:07 +02:00
2025-12-13 23:57:54 +01:00
# Create Suppliers
for supplier_data in seed_data.get('suppliers', []):
# Transform supplier ID using XOR
from shared.utils.demo_id_transformer import transform_id
try:
supplier_uuid = uuid.UUID(supplier_data['id'])
transformed_id = transform_id(supplier_data['id'], virtual_uuid)
except ValueError as e:
logger.error("Failed to parse supplier UUID",
supplier_id=supplier_data['id'],
error=str(e))
raise HTTPException(
status_code=400,
detail=f"Invalid UUID format in supplier data: {str(e)}"
)
2025-10-30 21:08:07 +01:00
2025-12-14 11:58:14 +01:00
# Parse date fields (supports BASE_TS markers and ISO timestamps)
adjusted_created_at = parse_date_field(
supplier_data.get('created_at'),
2025-12-13 23:57:54 +01:00
session_time,
2025-12-14 11:58:14 +01:00
"created_at"
2025-10-30 21:08:07 +01:00
)
2025-12-14 11:58:14 +01:00
adjusted_updated_at = parse_date_field(
supplier_data.get('updated_at'),
session_time,
"updated_at"
) or adjusted_created_at # Fallback to created_at if not provided
2025-12-13 23:57:54 +01:00
# Map supplier_type to enum if it's a string
from app.models.suppliers import SupplierType, SupplierStatus, PaymentTerms
supplier_type_value = supplier_data.get('supplier_type')
if supplier_type_value is None:
# Default to multi if supplier_type not provided
supplier_type_value = SupplierType.multi
elif isinstance(supplier_type_value, str):
try:
supplier_type_value = SupplierType[supplier_type_value]
except KeyError:
supplier_type_value = SupplierType.multi
# Map payment_terms to enum if it's a string
payment_terms_value = supplier_data.get('payment_terms', 'net_30')
if isinstance(payment_terms_value, str):
try:
payment_terms_value = PaymentTerms[payment_terms_value]
except KeyError:
payment_terms_value = PaymentTerms.net_30
# Map status to enum if provided
status_value = supplier_data.get('status', 'active')
if isinstance(status_value, str):
try:
status_value = SupplierStatus[status_value]
except KeyError:
status_value = SupplierStatus.active
# Map created_by and updated_by - use a system user UUID if not provided
system_user_id = uuid.UUID('00000000-0000-0000-0000-000000000000')
created_by = supplier_data.get('created_by', str(system_user_id))
updated_by = supplier_data.get('updated_by', str(system_user_id))
2025-12-13 23:57:54 +01:00
new_supplier = Supplier(
id=str(transformed_id),
tenant_id=virtual_uuid,
2025-12-13 23:57:54 +01:00
name=supplier_data['name'],
supplier_code=supplier_data.get('supplier_code'),
tax_id=supplier_data.get('tax_id'),
registration_number=supplier_data.get('registration_number'),
supplier_type=supplier_type_value,
status=status_value,
contact_person=supplier_data.get('contact_person'),
email=supplier_data.get('email'),
phone=supplier_data.get('phone'),
mobile=supplier_data.get('mobile'),
website=supplier_data.get('website'),
address_line1=supplier_data.get('address_line1'),
address_line2=supplier_data.get('address_line2'),
city=supplier_data.get('city'),
state_province=supplier_data.get('state_province'),
postal_code=supplier_data.get('postal_code'),
country=supplier_data.get('country'),
payment_terms=payment_terms_value,
credit_limit=supplier_data.get('credit_limit', 0.0),
currency=supplier_data.get('currency', 'EUR'),
standard_lead_time=supplier_data.get('standard_lead_time', 3),
minimum_order_amount=supplier_data.get('minimum_order_amount'),
delivery_area=supplier_data.get('delivery_area'),
quality_rating=supplier_data.get('quality_rating', 0.0),
delivery_rating=supplier_data.get('delivery_rating', 0.0),
total_orders=supplier_data.get('total_orders', 0),
total_amount=supplier_data.get('total_amount', 0.0),
trust_score=supplier_data.get('trust_score', 0.0),
is_preferred_supplier=supplier_data.get('is_preferred_supplier', False),
auto_approve_enabled=supplier_data.get('auto_approve_enabled', False),
total_pos_count=supplier_data.get('total_pos_count', 0),
approved_pos_count=supplier_data.get('approved_pos_count', 0),
on_time_delivery_rate=supplier_data.get('on_time_delivery_rate', 0.0),
fulfillment_rate=supplier_data.get('fulfillment_rate', 0.0),
2025-12-14 11:58:14 +01:00
last_performance_update=parse_date_field(
supplier_data.get('last_performance_update'),
2025-12-13 23:57:54 +01:00
session_time,
2025-12-14 11:58:14 +01:00
"last_performance_update"
),
2025-12-13 23:57:54 +01:00
approved_by=supplier_data.get('approved_by'),
2025-12-14 11:58:14 +01:00
approved_at=parse_date_field(
supplier_data.get('approved_at'),
2025-12-13 23:57:54 +01:00
session_time,
2025-12-14 11:58:14 +01:00
"approved_at"
),
2025-12-13 23:57:54 +01:00
rejection_reason=supplier_data.get('rejection_reason'),
notes=supplier_data.get('notes'),
certifications=supplier_data.get('certifications'),
business_hours=supplier_data.get('business_hours'),
specializations=supplier_data.get('specializations'),
created_at=adjusted_created_at,
updated_at=adjusted_updated_at,
created_by=created_by,
updated_by=updated_by
)
2025-12-13 23:57:54 +01:00
db.add(new_supplier)
stats["suppliers"] += 1
2025-12-13 23:57:54 +01:00
await db.commit()
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
logger.info(
2025-12-13 23:57:54 +01:00
"Suppliers data cloned successfully",
virtual_tenant_id=virtual_tenant_id,
2025-12-13 23:57:54 +01:00
suppliers_cloned=stats["suppliers"],
duration_ms=duration_ms
)
return {
"service": "suppliers",
"status": "completed",
2025-12-13 23:57:54 +01:00
"records_cloned": stats["suppliers"],
"duration_ms": duration_ms,
2025-12-13 23:57:54 +01:00
"details": {
"suppliers": stats["suppliers"],
"virtual_tenant_id": str(virtual_tenant_id)
}
}
except ValueError as e:
2025-12-13 23:57:54 +01:00
logger.error("Invalid UUID format", error=str(e), virtual_tenant_id=virtual_tenant_id)
raise HTTPException(status_code=400, detail=f"Invalid UUID: {str(e)}")
except Exception as e:
logger.error(
"Failed to clone suppliers data",
error=str(e),
virtual_tenant_id=virtual_tenant_id,
exc_info=True
)
# Rollback on error
await db.rollback()
return {
"service": "suppliers",
"status": "failed",
"records_cloned": 0,
"duration_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000),
"error": str(e)
}
@router.get("/clone/health")
async def clone_health_check(_: bool = Depends(verify_internal_api_key)):
"""
Health check for internal cloning endpoint
Used by orchestrator to verify service availability
"""
return {
"service": "suppliers",
"clone_endpoint": "available",
"version": "2.0.0"
}
2025-10-24 13:05:04 +02:00
@router.delete("/tenant/{virtual_tenant_id}")
2025-12-13 23:57:54 +01:00
async def delete_demo_tenant_data(
virtual_tenant_id: UUID,
2025-10-24 13:05:04 +02:00
db: AsyncSession = Depends(get_db),
_: bool = Depends(verify_internal_api_key)
):
2025-12-13 23:57:54 +01:00
"""
Delete all demo data for a virtual tenant.
This endpoint is idempotent - safe to call multiple times.
"""
2025-12-14 11:58:14 +01:00
start_time = datetime.now(timezone.utc)
2025-12-13 23:57:54 +01:00
records_deleted = {
"suppliers": 0,
"total": 0
}
2025-10-24 13:05:04 +02:00
try:
2025-12-13 23:57:54 +01:00
# Delete suppliers
result = await db.execute(
delete(Supplier)
.where(Supplier.tenant_id == virtual_tenant_id)
)
records_deleted["suppliers"] = result.rowcount
2025-10-24 13:05:04 +02:00
2025-12-13 23:57:54 +01:00
records_deleted["total"] = records_deleted["suppliers"]
2025-10-24 13:05:04 +02:00
await db.commit()
2025-12-13 23:57:54 +01:00
logger.info(
"demo_data_deleted",
service="suppliers",
virtual_tenant_id=str(virtual_tenant_id),
records_deleted=records_deleted
)
2025-10-24 13:05:04 +02:00
return {
"service": "suppliers",
"status": "deleted",
2025-12-13 23:57:54 +01:00
"virtual_tenant_id": str(virtual_tenant_id),
"records_deleted": records_deleted,
2025-12-14 11:58:14 +01:00
"duration_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
2025-10-24 13:05:04 +02:00
}
2025-12-13 23:57:54 +01:00
2025-10-24 13:05:04 +02:00
except Exception as e:
await db.rollback()
2025-12-13 23:57:54 +01:00
logger.error(
"demo_data_deletion_failed",
service="suppliers",
virtual_tenant_id=str(virtual_tenant_id),
error=str(e)
)
raise HTTPException(
status_code=500,
detail=f"Failed to delete demo data: {str(e)}"
)