Files
bakery-ia/services/procurement/app/api/internal_demo.py
2025-12-14 11:58:14 +01:00

529 lines
24 KiB
Python

"""
Internal Demo Cloning API for Procurement Service
Service-to-service endpoint for cloning procurement and purchase order data
"""
from fastapi import APIRouter, Depends, HTTPException, Header
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select, delete, func
import structlog
import uuid
from datetime import datetime, timezone, timedelta, date
from typing import Optional
import os
import json
from pathlib import Path
from app.core.database import get_db
from app.models.procurement_plan import ProcurementPlan, ProcurementRequirement
from app.models.purchase_order import PurchaseOrder, PurchaseOrderItem
from app.models.replenishment import ReplenishmentPlan, ReplenishmentPlanItem
from shared.utils.demo_dates import adjust_date_for_demo, resolve_time_marker
from shared.messaging import RabbitMQClient, UnifiedEventPublisher
from sqlalchemy.orm import selectinload
from shared.schemas.reasoning_types import (
create_po_reasoning_low_stock,
create_po_reasoning_supplier_contract
)
from app.core.config import settings
logger = structlog.get_logger()
router = APIRouter(prefix="/internal/demo", tags=["internal"])
# Base demo tenant IDs
DEMO_TENANT_PROFESSIONAL = "a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6"
def verify_internal_api_key(x_internal_api_key: Optional[str] = Header(None)):
"""Verify internal API key for service-to-service communication"""
if x_internal_api_key != settings.INTERNAL_API_KEY:
logger.warning("Unauthorized internal API access attempted")
raise HTTPException(status_code=403, detail="Invalid internal API key")
return True
@router.post("/clone")
async def clone_demo_data(
base_tenant_id: str,
virtual_tenant_id: str,
demo_account_type: str,
session_id: Optional[str] = None,
session_created_at: Optional[str] = None,
db: AsyncSession = Depends(get_db),
_: bool = Depends(verify_internal_api_key)
):
"""
Clone procurement service data for a virtual demo tenant
Loads seed data from JSON files and creates:
- Purchase orders with line items
- Procurement plans with requirements (if in seed data)
- Replenishment plans with items (if in seed data)
- Adjusts dates to recent timeframe
Args:
base_tenant_id: Template tenant UUID to clone from
virtual_tenant_id: Target virtual tenant UUID
demo_account_type: Type of demo account
session_id: Originating session ID for tracing
Returns:
Cloning status and record counts
"""
start_time = datetime.now(timezone.utc)
# Parse session creation time for date adjustment
if session_created_at:
try:
session_time = datetime.fromisoformat(session_created_at.replace('Z', '+00:00'))
except (ValueError, AttributeError):
session_time = start_time
else:
session_time = start_time
logger.info(
"Starting procurement data cloning from seed files",
base_tenant_id=base_tenant_id,
virtual_tenant_id=virtual_tenant_id,
demo_account_type=demo_account_type,
session_id=session_id,
session_created_at=session_created_at
)
try:
# Validate UUIDs
base_uuid = uuid.UUID(base_tenant_id)
virtual_uuid = uuid.UUID(virtual_tenant_id)
# Track cloning statistics
stats = {
"procurement_plans": 0,
"procurement_requirements": 0,
"purchase_orders": 0,
"purchase_order_items": 0,
"replenishment_plans": 0,
"replenishment_items": 0
}
def parse_date_field(date_value, session_time, field_name="date"):
"""Parse date field, handling both ISO strings and BASE_TS markers"""
if not date_value:
return None
# Check if it's a BASE_TS marker
if isinstance(date_value, str) and date_value.startswith("BASE_TS"):
try:
return resolve_time_marker(date_value, session_time)
except ValueError as e:
logger.warning(
f"Invalid BASE_TS marker in {field_name}",
marker=date_value,
error=str(e)
)
return None
# Handle regular ISO date strings
try:
return adjust_date_for_demo(
datetime.fromisoformat(date_value.replace('Z', '+00:00')),
session_time
)
except (ValueError, AttributeError) as e:
logger.warning(
f"Invalid date format in {field_name}",
date_value=date_value,
error=str(e)
)
return None
# Load seed data from JSON files
try:
from shared.utils.seed_data_paths import get_seed_data_path
if demo_account_type == "professional":
json_file = get_seed_data_path("professional", "07-procurement.json")
elif demo_account_type == "enterprise":
json_file = get_seed_data_path("enterprise", "07-procurement.json")
else:
raise ValueError(f"Invalid demo account type: {demo_account_type}")
except ImportError:
# Fallback to original path
seed_data_dir = Path(__file__).parent.parent.parent.parent / "infrastructure" / "seed-data"
if demo_account_type == "professional":
json_file = seed_data_dir / "professional" / "07-procurement.json"
elif demo_account_type == "enterprise":
json_file = seed_data_dir / "enterprise" / "parent" / "07-procurement.json"
else:
raise ValueError(f"Invalid demo account type: {demo_account_type}")
if not json_file.exists():
raise HTTPException(
status_code=404,
detail=f"Seed data file not found: {json_file}"
)
# Load JSON data
with open(json_file, 'r', encoding='utf-8') as f:
seed_data = json.load(f)
logger.info(
"Loaded procurement seed data",
purchase_orders=len(seed_data.get('purchase_orders', [])),
purchase_order_items=len(seed_data.get('purchase_order_items', [])),
procurement_plans=len(seed_data.get('procurement_plans', []))
)
# Load Purchase Orders from seed data
order_id_map = {}
for po_data in seed_data.get('purchase_orders', []):
# Transform IDs using XOR
from shared.utils.demo_id_transformer import transform_id
try:
logger.debug("Processing purchase order", po_id=po_data.get('id'), po_number=po_data.get('po_number'))
po_uuid = uuid.UUID(po_data['id'])
transformed_id = transform_id(po_data['id'], virtual_uuid)
except ValueError as e:
logger.error("Failed to parse purchase order UUID",
po_id=po_data.get('id'),
po_number=po_data.get('po_number'),
error=str(e))
continue
order_id_map[uuid.UUID(po_data['id'])] = transformed_id
# Adjust dates relative to session creation time
# FIX: Use current UTC time for future dates (expected delivery)
current_time = datetime.now(timezone.utc)
logger.debug("Parsing dates for PO",
po_number=po_data.get('po_number'),
order_date_raw=po_data.get('order_date') or po_data.get('order_date_offset_days'),
required_delivery_raw=po_data.get('required_delivery_date') or po_data.get('required_delivery_date_offset_days'))
# Handle both direct dates and offset-based dates
if 'order_date_offset_days' in po_data:
adjusted_order_date = session_time + timedelta(days=po_data['order_date_offset_days'])
else:
adjusted_order_date = parse_date_field(po_data.get('order_date'), session_time, "order_date") or session_time
if 'required_delivery_date_offset_days' in po_data:
adjusted_required_delivery = session_time + timedelta(days=po_data['required_delivery_date_offset_days'])
else:
adjusted_required_delivery = parse_date_field(po_data.get('required_delivery_date'), session_time, "required_delivery_date")
if 'estimated_delivery_date_offset_days' in po_data:
adjusted_estimated_delivery = session_time + timedelta(days=po_data['estimated_delivery_date_offset_days'])
else:
adjusted_estimated_delivery = parse_date_field(po_data.get('estimated_delivery_date'), session_time, "estimated_delivery_date")
# Calculate expected delivery date (use estimated delivery if not specified separately)
# FIX: Use current UTC time for future delivery dates
if 'expected_delivery_date_offset_days' in po_data:
adjusted_expected_delivery = current_time + timedelta(days=po_data['expected_delivery_date_offset_days'])
else:
adjusted_expected_delivery = adjusted_estimated_delivery # Fallback to estimated delivery
logger.debug("Dates parsed successfully",
po_number=po_data.get('po_number'),
order_date=adjusted_order_date,
required_delivery=adjusted_required_delivery)
# Generate a system user UUID for audit fields (demo purposes)
system_user_id = uuid.uuid4()
# Use status directly from JSON - JSON files should contain valid enum values
# Valid values: draft, pending_approval, approved, sent_to_supplier, confirmed,
# partially_received, completed, cancelled, disputed
raw_status = po_data.get('status', 'draft')
# Validate that the status is a valid enum value
valid_statuses = {'draft', 'pending_approval', 'approved', 'sent_to_supplier',
'confirmed', 'partially_received', 'completed', 'cancelled', 'disputed'}
if raw_status not in valid_statuses:
logger.warning(
"Invalid status value in seed data, using default 'draft'",
invalid_status=raw_status,
po_number=po_data.get('po_number'),
valid_options=sorted(valid_statuses)
)
raw_status = 'draft'
# Create new PurchaseOrder
new_order = PurchaseOrder(
id=str(transformed_id),
tenant_id=virtual_uuid,
po_number=f"{session_id[:8]}-{po_data.get('po_number', f'PO-{uuid.uuid4().hex[:8].upper()}')}",
supplier_id=po_data.get('supplier_id'),
order_date=adjusted_order_date,
required_delivery_date=adjusted_required_delivery,
estimated_delivery_date=adjusted_estimated_delivery,
expected_delivery_date=adjusted_expected_delivery,
status=raw_status,
priority=po_data.get('priority', 'normal').lower() if po_data.get('priority') else 'normal',
subtotal=po_data.get('subtotal', 0.0),
tax_amount=po_data.get('tax_amount', 0.0),
shipping_cost=po_data.get('shipping_cost', 0.0),
discount_amount=po_data.get('discount_amount', 0.0),
total_amount=po_data.get('total_amount', 0.0),
currency=po_data.get('currency', 'EUR'),
delivery_address=po_data.get('delivery_address'),
delivery_instructions=po_data.get('delivery_instructions'),
delivery_contact=po_data.get('delivery_contact'),
delivery_phone=po_data.get('delivery_phone'),
requires_approval=po_data.get('requires_approval', False),
auto_approved=po_data.get('auto_approved', False),
auto_approval_rule_id=po_data.get('auto_approval_rule_id') if po_data.get('auto_approval_rule_id') and len(po_data.get('auto_approval_rule_id', '')) >= 32 else None,
rejection_reason=po_data.get('rejection_reason'),
sent_to_supplier_at=parse_date_field(po_data.get('sent_to_supplier_at'), session_time, "sent_to_supplier_at"),
supplier_confirmation_date=parse_date_field(po_data.get('supplier_confirmation_date'), session_time, "supplier_confirmation_date"),
supplier_reference=po_data.get('supplier_reference'),
notes=po_data.get('notes'),
internal_notes=po_data.get('internal_notes'),
terms_and_conditions=po_data.get('terms_and_conditions'),
reasoning_data=po_data.get('reasoning_data'),
created_at=session_time,
updated_at=session_time,
created_by=system_user_id,
updated_by=system_user_id
)
# Add expected_delivery_date if the model supports it
if hasattr(PurchaseOrder, 'expected_delivery_date'):
if 'expected_delivery_date_offset_days' in po_data:
# Handle offset-based expected delivery dates
expected_delivery = adjusted_order_date + timedelta(
days=po_data['expected_delivery_date_offset_days']
)
else:
expected_delivery = adjusted_estimated_delivery
new_order.expected_delivery_date = expected_delivery
db.add(new_order)
stats["purchase_orders"] += 1
# Load Purchase Order Items from seed data
for po_item_data in seed_data.get('purchase_order_items', []):
# Transform IDs
from shared.utils.demo_id_transformer import transform_id
try:
item_uuid = uuid.UUID(po_item_data['id'])
transformed_id = transform_id(po_item_data['id'], virtual_uuid)
except ValueError as e:
logger.error("Failed to parse purchase order item UUID",
item_id=po_item_data['id'],
error=str(e))
continue
# Map purchase_order_id if it exists in our map
po_id_value = po_item_data.get('purchase_order_id')
if po_id_value:
po_id_value = order_id_map.get(uuid.UUID(po_id_value), uuid.UUID(po_id_value))
new_item = PurchaseOrderItem(
id=str(transformed_id),
tenant_id=virtual_uuid,
purchase_order_id=str(po_id_value) if po_id_value else None,
inventory_product_id=po_item_data.get('inventory_product_id'),
product_name=po_item_data.get('product_name'),
product_code=po_item_data.get('product_code'), # Use product_code directly from JSON
ordered_quantity=po_item_data.get('ordered_quantity', 0.0),
unit_of_measure=po_item_data.get('unit_of_measure'),
unit_price=po_item_data.get('unit_price', 0.0),
line_total=po_item_data.get('line_total', 0.0),
received_quantity=po_item_data.get('received_quantity', 0.0),
remaining_quantity=po_item_data.get('remaining_quantity', po_item_data.get('ordered_quantity', 0.0)),
quality_requirements=po_item_data.get('quality_requirements'),
item_notes=po_item_data.get('item_notes'),
created_at=session_time,
updated_at=session_time
)
db.add(new_item)
stats["purchase_order_items"] += 1
# Load Procurement Plans from seed data (if any)
for plan_data in seed_data.get('procurement_plans', []):
# Transform IDs
from shared.utils.demo_id_transformer import transform_id
try:
plan_uuid = uuid.UUID(plan_data['id'])
transformed_id = transform_id(plan_data['id'], virtual_uuid)
except ValueError as e:
logger.error("Failed to parse procurement plan UUID",
plan_id=plan_data['id'],
error=str(e))
continue
# Adjust dates
adjusted_plan_date = parse_date_field(plan_data.get('plan_date'), session_time, "plan_date")
new_plan = ProcurementPlan(
id=str(transformed_id),
tenant_id=virtual_uuid,
plan_number=plan_data.get('plan_number', f"PROC-{uuid.uuid4().hex[:8].upper()}"),
plan_date=adjusted_plan_date,
plan_period_start=parse_date_field(plan_data.get('plan_period_start'), session_time, "plan_period_start"),
plan_period_end=parse_date_field(plan_data.get('plan_period_end'), session_time, "plan_period_end"),
planning_horizon_days=plan_data.get('planning_horizon_days'),
status=plan_data.get('status', 'draft'),
plan_type=plan_data.get('plan_type'),
priority=plan_data.get('priority', 'normal'),
business_model=plan_data.get('business_model'),
procurement_strategy=plan_data.get('procurement_strategy'),
total_requirements=plan_data.get('total_requirements', 0),
total_estimated_cost=plan_data.get('total_estimated_cost', 0.0),
total_approved_cost=plan_data.get('total_approved_cost', 0.0),
cost_variance=plan_data.get('cost_variance', 0.0),
created_at=session_time,
updated_at=session_time
)
db.add(new_plan)
stats["procurement_plans"] += 1
# Load Replenishment Plans from seed data (if any)
for replan_data in seed_data.get('replenishment_plans', []):
# Transform IDs
from shared.utils.demo_id_transformer import transform_id
try:
replan_uuid = uuid.UUID(replan_data['id'])
transformed_id = transform_id(replan_data['id'], virtual_uuid)
except ValueError as e:
logger.error("Failed to parse replenishment plan UUID",
replan_id=replan_data['id'],
error=str(e))
continue
# Adjust dates
adjusted_plan_date = parse_date_field(replan_data.get('plan_date'), session_time, "plan_date")
new_replan = ReplenishmentPlan(
id=str(transformed_id),
tenant_id=virtual_uuid,
plan_number=replan_data.get('plan_number', f"REPL-{uuid.uuid4().hex[:8].upper()}"),
plan_date=adjusted_plan_date,
plan_period_start=parse_date_field(replan_data.get('plan_period_start'), session_time, "plan_period_start"),
plan_period_end=parse_date_field(replan_data.get('plan_period_end'), session_time, "plan_period_end"),
planning_horizon_days=replan_data.get('planning_horizon_days'),
status=replan_data.get('status', 'draft'),
plan_type=replan_data.get('plan_type'),
priority=replan_data.get('priority', 'normal'),
business_model=replan_data.get('business_model'),
total_items=replan_data.get('total_items', 0),
total_estimated_cost=replan_data.get('total_estimated_cost', 0.0),
created_at=session_time,
updated_at=session_time
)
db.add(new_replan)
stats["replenishment_plans"] += 1
# Commit all loaded data
await db.commit()
# Calculate total records
total_records = (stats["procurement_plans"] + stats["procurement_requirements"] +
stats["purchase_orders"] + stats["purchase_order_items"] +
stats["replenishment_plans"] + stats["replenishment_items"])
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
logger.info(
"Procurement data loading from seed files completed",
virtual_tenant_id=virtual_tenant_id,
total_records=total_records,
stats=stats,
duration_ms=duration_ms
)
return {
"service": "procurement",
"status": "completed",
"records_cloned": total_records,
"duration_ms": duration_ms,
"details": stats
}
except ValueError as e:
logger.error("Invalid UUID format", error=str(e))
raise HTTPException(status_code=400, detail=f"Invalid UUID: {str(e)}")
except Exception as e:
logger.error(
"Failed to load procurement seed data",
error=str(e),
virtual_tenant_id=virtual_tenant_id,
exc_info=True
)
# Rollback on error
await db.rollback()
return {
"service": "procurement",
"status": "failed",
"records_cloned": 0,
"duration_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000),
"error": str(e)
}
@router.get("/clone/health")
async def clone_health_check(_: bool = Depends(verify_internal_api_key)):
"""
Health check for internal cloning endpoint
Used by orchestrator to verify service availability
"""
return {
"service": "procurement",
"clone_endpoint": "available",
"version": "2.0.0"
}
@router.delete("/tenant/{virtual_tenant_id}")
async def delete_demo_data(
virtual_tenant_id: str,
db: AsyncSession = Depends(get_db),
_: bool = Depends(verify_internal_api_key)
):
"""Delete all procurement data for a virtual demo tenant"""
logger.info("Deleting procurement data for virtual tenant", virtual_tenant_id=virtual_tenant_id)
start_time = datetime.now(timezone.utc)
try:
virtual_uuid = uuid.UUID(virtual_tenant_id)
# Count records
po_count = await db.scalar(func.count(PurchaseOrder.id).where(PurchaseOrder.tenant_id == virtual_uuid))
po_item_count = await db.scalar(func.count(PurchaseOrderItem.id).where(PurchaseOrderItem.tenant_id == virtual_uuid))
plan_count = await db.scalar(func.count(ProcurementPlan.id).where(ProcurementPlan.tenant_id == virtual_uuid))
replan_count = await db.scalar(func.count(ReplenishmentPlan.id).where(ReplenishmentPlan.tenant_id == virtual_uuid))
# Delete in order
await db.execute(delete(PurchaseOrderItem).where(PurchaseOrderItem.tenant_id == virtual_uuid))
await db.execute(delete(PurchaseOrder).where(PurchaseOrder.tenant_id == virtual_uuid))
await db.execute(delete(ProcurementRequirement).where(ProcurementRequirement.tenant_id == virtual_uuid))
await db.execute(delete(ProcurementPlan).where(ProcurementPlan.tenant_id == virtual_uuid))
await db.execute(delete(ReplenishmentPlanItem).where(ReplenishmentPlanItem.tenant_id == virtual_uuid))
await db.execute(delete(ReplenishmentPlan).where(ReplenishmentPlan.tenant_id == virtual_uuid))
await db.commit()
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
logger.info("Procurement data deleted successfully", virtual_tenant_id=virtual_tenant_id, duration_ms=duration_ms)
return {
"service": "procurement",
"status": "deleted",
"virtual_tenant_id": virtual_tenant_id,
"records_deleted": {
"purchase_orders": po_count,
"purchase_order_items": po_item_count,
"procurement_plans": plan_count,
"replenishment_plans": replan_count,
"total": po_count + po_item_count + plan_count + replan_count
},
"duration_ms": duration_ms
}
except Exception as e:
logger.error("Failed to delete procurement data", error=str(e), exc_info=True)
await db.rollback()
raise HTTPException(status_code=500, detail=str(e))