Files
bakery-ia/services/suppliers/app/api/internal_demo.py
2025-10-30 21:08:07 +01:00

383 lines
15 KiB
Python

"""
Internal Demo Cloning API for Suppliers Service
Service-to-service endpoint for cloning supplier and procurement data
"""
from fastapi import APIRouter, Depends, HTTPException, Header
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select, delete, func
import structlog
import uuid
from datetime import datetime, timezone, timedelta, date
from typing import Optional
import os
import sys
from pathlib import Path
# Add shared path
sys.path.insert(0, str(Path(__file__).parent.parent.parent.parent.parent))
from app.core.database import get_db
from app.models.suppliers import (
Supplier, SupplierPriceList, SupplierQualityReview,
SupplierStatus, QualityRating
)
from shared.utils.demo_dates import adjust_date_for_demo, BASE_REFERENCE_DATE
logger = structlog.get_logger()
router = APIRouter(prefix="/internal/demo", tags=["internal"])
# Internal API key for service-to-service auth
INTERNAL_API_KEY = os.getenv("INTERNAL_API_KEY", "dev-internal-key-change-in-production")
# Base demo tenant IDs
DEMO_TENANT_SAN_PABLO = "a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6"
DEMO_TENANT_LA_ESPIGA = "b2c3d4e5-f6a7-48b9-c0d1-e2f3a4b5c6d7"
def verify_internal_api_key(x_internal_api_key: Optional[str] = Header(None)):
"""Verify internal API key for service-to-service communication"""
if x_internal_api_key != INTERNAL_API_KEY:
logger.warning("Unauthorized internal API access attempted")
raise HTTPException(status_code=403, detail="Invalid internal API key")
return True
@router.post("/clone")
async def clone_demo_data(
base_tenant_id: str,
virtual_tenant_id: str,
demo_account_type: str,
session_id: Optional[str] = None,
session_created_at: Optional[str] = None,
db: AsyncSession = Depends(get_db),
_: bool = Depends(verify_internal_api_key)
):
"""
Clone suppliers service data for a virtual demo tenant
Clones:
- Suppliers (vendor master data)
- Supplier price lists (product pricing)
- Quality reviews
Args:
base_tenant_id: Template tenant UUID to clone from
virtual_tenant_id: Target virtual tenant UUID
demo_account_type: Type of demo account
session_id: Originating session ID for tracing
Returns:
Cloning status and record counts
"""
start_time = datetime.now(timezone.utc)
# Parse session creation time for date adjustment
if session_created_at:
try:
session_time = datetime.fromisoformat(session_created_at.replace('Z', '+00:00'))
except (ValueError, AttributeError):
session_time = start_time
else:
session_time = start_time
logger.info(
"Starting suppliers data cloning",
base_tenant_id=base_tenant_id,
virtual_tenant_id=virtual_tenant_id,
demo_account_type=demo_account_type,
session_id=session_id,
session_created_at=session_created_at
)
try:
# Validate UUIDs
base_uuid = uuid.UUID(base_tenant_id)
virtual_uuid = uuid.UUID(virtual_tenant_id)
# Track cloning statistics
stats = {
"suppliers": 0,
"price_lists": 0,
"quality_reviews": 0
}
# ID mappings
supplier_id_map = {}
price_list_map = {}
# Clone Suppliers
result = await db.execute(
select(Supplier).where(Supplier.tenant_id == base_uuid)
)
base_suppliers = result.scalars().all()
logger.info(
"Found suppliers to clone",
count=len(base_suppliers),
base_tenant=str(base_uuid)
)
for supplier in base_suppliers:
new_supplier_id = uuid.uuid4()
supplier_id_map[supplier.id] = new_supplier_id
new_supplier = Supplier(
id=new_supplier_id,
tenant_id=virtual_uuid,
name=supplier.name,
supplier_code=f"SUPP-{uuid.uuid4().hex[:6].upper()}", # New code
tax_id=supplier.tax_id,
registration_number=supplier.registration_number,
supplier_type=supplier.supplier_type,
status=supplier.status,
contact_person=supplier.contact_person,
email=supplier.email,
phone=supplier.phone,
mobile=supplier.mobile,
website=supplier.website,
address_line1=supplier.address_line1,
address_line2=supplier.address_line2,
city=supplier.city,
state_province=supplier.state_province,
postal_code=supplier.postal_code,
country=supplier.country,
payment_terms=supplier.payment_terms,
credit_limit=supplier.credit_limit,
currency=supplier.currency,
standard_lead_time=supplier.standard_lead_time,
minimum_order_amount=supplier.minimum_order_amount,
delivery_area=supplier.delivery_area,
quality_rating=supplier.quality_rating,
delivery_rating=supplier.delivery_rating,
total_orders=supplier.total_orders,
total_amount=supplier.total_amount,
approved_by=supplier.approved_by,
approved_at=supplier.approved_at,
rejection_reason=supplier.rejection_reason,
notes=supplier.notes,
certifications=supplier.certifications,
business_hours=supplier.business_hours,
specializations=supplier.specializations,
created_at=datetime.now(timezone.utc),
updated_at=datetime.now(timezone.utc),
created_by=supplier.created_by,
updated_by=supplier.updated_by
)
db.add(new_supplier)
stats["suppliers"] += 1
# Flush to get supplier IDs
await db.flush()
# Clone Supplier Price Lists
for old_supplier_id, new_supplier_id in supplier_id_map.items():
result = await db.execute(
select(SupplierPriceList).where(SupplierPriceList.supplier_id == old_supplier_id)
)
price_lists = result.scalars().all()
for price_list in price_lists:
new_price_id = uuid.uuid4()
price_list_map[price_list.id] = new_price_id
# Transform inventory_product_id to match virtual tenant's ingredient IDs
# Using same formula as inventory service: tenant_int ^ base_int
base_product_int = int(price_list.inventory_product_id.hex, 16)
virtual_tenant_int = int(virtual_uuid.hex, 16)
base_tenant_int = int(base_uuid.hex, 16)
# Reverse the original XOR to get the base ingredient ID
# base_product = base_tenant ^ base_ingredient_id
# So: base_ingredient_id = base_tenant ^ base_product
base_ingredient_int = base_tenant_int ^ base_product_int
# Now apply virtual tenant XOR
new_product_id = uuid.UUID(int=virtual_tenant_int ^ base_ingredient_int)
logger.debug(
"Transforming price list product ID using XOR",
supplier_name=supplier.name,
base_product_id=str(price_list.inventory_product_id),
new_product_id=str(new_product_id),
product_code=price_list.product_code
)
new_price_list = SupplierPriceList(
id=new_price_id,
tenant_id=virtual_uuid,
supplier_id=new_supplier_id,
inventory_product_id=new_product_id, # Transformed for virtual tenant
product_code=price_list.product_code,
unit_price=price_list.unit_price,
unit_of_measure=price_list.unit_of_measure,
minimum_order_quantity=price_list.minimum_order_quantity,
price_per_unit=price_list.price_per_unit,
tier_pricing=price_list.tier_pricing,
effective_date=price_list.effective_date,
expiry_date=price_list.expiry_date,
is_active=price_list.is_active,
brand=price_list.brand,
packaging_size=price_list.packaging_size,
origin_country=price_list.origin_country,
shelf_life_days=price_list.shelf_life_days,
storage_requirements=price_list.storage_requirements,
quality_specs=price_list.quality_specs,
allergens=price_list.allergens,
created_at=datetime.now(timezone.utc),
updated_at=datetime.now(timezone.utc),
created_by=price_list.created_by,
updated_by=price_list.updated_by
)
db.add(new_price_list)
stats["price_lists"] += 1
# Flush to get price list IDs
await db.flush()
# Clone Quality Reviews
result = await db.execute(
select(SupplierQualityReview).where(SupplierQualityReview.tenant_id == base_uuid)
)
base_reviews = result.scalars().all()
for review in base_reviews:
new_supplier_id = supplier_id_map.get(review.supplier_id, review.supplier_id)
# Adjust dates relative to session creation time
adjusted_review_date = adjust_date_for_demo(
review.review_date, session_time, BASE_REFERENCE_DATE
)
adjusted_follow_up_date = adjust_date_for_demo(
review.follow_up_date, session_time, BASE_REFERENCE_DATE
)
new_review = SupplierQualityReview(
id=uuid.uuid4(),
tenant_id=virtual_uuid,
supplier_id=new_supplier_id,
review_date=adjusted_review_date,
review_type=review.review_type,
quality_rating=review.quality_rating,
delivery_rating=review.delivery_rating,
communication_rating=review.communication_rating,
overall_rating=review.overall_rating,
quality_comments=review.quality_comments,
delivery_comments=review.delivery_comments,
communication_comments=review.communication_comments,
improvement_suggestions=review.improvement_suggestions,
quality_issues=review.quality_issues,
corrective_actions=review.corrective_actions,
follow_up_required=review.follow_up_required,
follow_up_date=adjusted_follow_up_date,
is_final=review.is_final,
approved_by=review.approved_by,
created_at=session_time,
reviewed_by=review.reviewed_by
)
db.add(new_review)
stats["quality_reviews"] += 1
# Commit all changes
await db.commit()
total_records = sum(stats.values())
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
logger.info(
"Suppliers data cloning completed",
virtual_tenant_id=virtual_tenant_id,
total_records=total_records,
stats=stats,
duration_ms=duration_ms
)
return {
"service": "suppliers",
"status": "completed",
"records_cloned": total_records,
"duration_ms": duration_ms,
"details": stats
}
except ValueError as e:
logger.error("Invalid UUID format", error=str(e))
raise HTTPException(status_code=400, detail=f"Invalid UUID: {str(e)}")
except Exception as e:
logger.error(
"Failed to clone suppliers data",
error=str(e),
virtual_tenant_id=virtual_tenant_id,
exc_info=True
)
# Rollback on error
await db.rollback()
return {
"service": "suppliers",
"status": "failed",
"records_cloned": 0,
"duration_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000),
"error": str(e)
}
@router.get("/clone/health")
async def clone_health_check(_: bool = Depends(verify_internal_api_key)):
"""
Health check for internal cloning endpoint
Used by orchestrator to verify service availability
"""
return {
"service": "suppliers",
"clone_endpoint": "available",
"version": "2.0.0"
}
@router.delete("/tenant/{virtual_tenant_id}")
async def delete_demo_data(
virtual_tenant_id: str,
db: AsyncSession = Depends(get_db),
_: bool = Depends(verify_internal_api_key)
):
"""Delete all supplier data for a virtual demo tenant"""
logger.info("Deleting supplier data for virtual tenant", virtual_tenant_id=virtual_tenant_id)
start_time = datetime.now(timezone.utc)
try:
virtual_uuid = uuid.UUID(virtual_tenant_id)
# Count records
supplier_count = await db.scalar(select(func.count(Supplier.id)).where(Supplier.tenant_id == virtual_uuid))
price_list_count = await db.scalar(select(func.count(SupplierPriceList.id)).where(SupplierPriceList.tenant_id == virtual_uuid))
quality_review_count = await db.scalar(select(func.count(SupplierQualityReview.id)).where(SupplierQualityReview.tenant_id == virtual_uuid))
# Delete in order (child tables first)
await db.execute(delete(SupplierQualityReview).where(SupplierQualityReview.tenant_id == virtual_uuid))
await db.execute(delete(SupplierPriceList).where(SupplierPriceList.tenant_id == virtual_uuid))
await db.execute(delete(Supplier).where(Supplier.tenant_id == virtual_uuid))
await db.commit()
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
logger.info("Supplier data deleted successfully", virtual_tenant_id=virtual_tenant_id, duration_ms=duration_ms)
return {
"service": "suppliers",
"status": "deleted",
"virtual_tenant_id": virtual_tenant_id,
"records_deleted": {
"suppliers": supplier_count,
"price_lists": price_list_count,
"quality_reviews": quality_review_count,
"total": supplier_count + price_list_count + quality_review_count
},
"duration_ms": duration_ms
}
except Exception as e:
logger.error("Failed to delete supplier data", error=str(e), exc_info=True)
await db.rollback()
raise HTTPException(status_code=500, detail=str(e))