demo seed change
This commit is contained in:
@@ -8,7 +8,7 @@ Requires: Professional or Enterprise subscription tier
|
||||
from datetime import date, datetime, timedelta
|
||||
from typing import Optional
|
||||
from uuid import UUID
|
||||
from fastapi import APIRouter, Depends, HTTPException, Path, Query
|
||||
from fastapi import APIRouter, Depends, HTTPException, Path, Query, Request
|
||||
import structlog
|
||||
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
@@ -25,10 +25,11 @@ route_builder = RouteBuilder('production')
|
||||
router = APIRouter(tags=["production-analytics"])
|
||||
|
||||
|
||||
def get_production_service() -> ProductionService:
|
||||
def get_production_service(request: Request) -> ProductionService:
|
||||
"""Dependency injection for production service"""
|
||||
from app.core.database import database_manager
|
||||
return ProductionService(database_manager, settings)
|
||||
notification_service = getattr(request.app.state, 'notification_service', None)
|
||||
return ProductionService(database_manager, settings, notification_service)
|
||||
|
||||
|
||||
# ===== ANALYTICS ENDPOINTS (Professional/Enterprise Only) =====
|
||||
|
||||
@@ -13,6 +13,7 @@ from pydantic import BaseModel, Field
|
||||
import structlog
|
||||
import asyncio
|
||||
|
||||
from fastapi import Request
|
||||
from app.services.production_service import ProductionService
|
||||
from app.core.config import settings
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
@@ -21,10 +22,11 @@ router = APIRouter(tags=["production-batch"])
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
def get_production_service() -> ProductionService:
|
||||
def get_production_service(request: Request) -> ProductionService:
|
||||
"""Dependency injection for production service"""
|
||||
from app.core.database import database_manager
|
||||
return ProductionService(database_manager, settings)
|
||||
notification_service = getattr(request.app.state, 'notification_service', None)
|
||||
return ProductionService(database_manager, settings, notification_service)
|
||||
|
||||
|
||||
class ProductionSummaryBatchRequest(BaseModel):
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
Equipment API - CRUD operations on Equipment model
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Path, Query
|
||||
from fastapi import APIRouter, Depends, HTTPException, Path, Query, Request
|
||||
from typing import Optional
|
||||
from uuid import UUID
|
||||
import structlog
|
||||
@@ -33,10 +33,11 @@ router = APIRouter(tags=["production-equipment"])
|
||||
audit_logger = create_audit_logger("production-service", AuditLog)
|
||||
|
||||
|
||||
def get_production_service() -> ProductionService:
|
||||
def get_production_service(request: Request) -> ProductionService:
|
||||
"""Dependency injection for production service"""
|
||||
from app.core.database import database_manager
|
||||
return ProductionService(database_manager, settings)
|
||||
notification_service = getattr(request.app.state, 'notification_service', None)
|
||||
return ProductionService(database_manager, settings, notification_service)
|
||||
|
||||
|
||||
@router.get(
|
||||
|
||||
@@ -2,6 +2,9 @@
|
||||
"""
|
||||
Internal API for triggering production alerts.
|
||||
Used by demo session cloning to generate realistic production delay alerts.
|
||||
|
||||
URL Pattern: /api/v1/tenants/{tenant_id}/production/internal/alerts/trigger
|
||||
This follows the tenant-scoped pattern so gateway can proxy correctly.
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, HTTPException, Request, Path
|
||||
@@ -13,16 +16,20 @@ logger = structlog.get_logger()
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.post("/api/internal/production-alerts/trigger/{tenant_id}")
|
||||
# New URL pattern: tenant-scoped so gateway proxies to production service correctly
|
||||
@router.post("/api/v1/tenants/{tenant_id}/production/internal/alerts/trigger")
|
||||
async def trigger_production_alerts(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID to check production for"),
|
||||
request: Request = None
|
||||
) -> dict:
|
||||
"""
|
||||
Trigger production alert checks for a specific tenant (internal use only).
|
||||
Trigger comprehensive production alert checks for a specific tenant (internal use only).
|
||||
|
||||
This endpoint is called by the demo session cloning process after production
|
||||
batches are seeded to generate realistic production delay alerts.
|
||||
batches are seeded to generate realistic production alerts including:
|
||||
- Production delays
|
||||
- Equipment maintenance alerts
|
||||
- Batch start delays
|
||||
|
||||
Security: Protected by X-Internal-Service header check.
|
||||
"""
|
||||
@@ -35,40 +42,36 @@ async def trigger_production_alerts(
|
||||
detail="This endpoint is for internal service use only"
|
||||
)
|
||||
|
||||
# Get production alert service from app state
|
||||
production_alert_service = getattr(request.app.state, 'production_alert_service', None)
|
||||
# Get production scheduler from app state
|
||||
production_scheduler = getattr(request.app.state, 'production_scheduler', None)
|
||||
|
||||
if not production_alert_service:
|
||||
logger.error("Production alert service not initialized")
|
||||
if not production_scheduler:
|
||||
logger.error("Production scheduler not initialized")
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail="Production alert service not available"
|
||||
detail="Production scheduler not available"
|
||||
)
|
||||
|
||||
# Trigger production alert checks (checks all tenants, including this one)
|
||||
logger.info("Triggering production alert checks", tenant_id=str(tenant_id))
|
||||
await production_alert_service.check_production_delays()
|
||||
# Trigger comprehensive production alert checks for the specific tenant
|
||||
logger.info("Triggering comprehensive production alert checks", tenant_id=str(tenant_id))
|
||||
|
||||
# Return success (service checks all tenants, we can't get specific count)
|
||||
result = {"total_alerts": 0, "message": "Production alert checks triggered"}
|
||||
# Call the scheduler's manual trigger method
|
||||
result = await production_scheduler.trigger_manual_check(tenant_id)
|
||||
|
||||
logger.info(
|
||||
"Production alert checks completed",
|
||||
tenant_id=str(tenant_id),
|
||||
alerts_generated=result.get("total_alerts", 0)
|
||||
)
|
||||
if result.get("success", False):
|
||||
logger.info(
|
||||
"Production alert checks completed successfully",
|
||||
tenant_id=str(tenant_id),
|
||||
alerts_generated=result.get("alerts_generated", 0)
|
||||
)
|
||||
else:
|
||||
logger.error(
|
||||
"Production alert checks failed",
|
||||
tenant_id=str(tenant_id),
|
||||
error=result.get("error", "Unknown error")
|
||||
)
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"tenant_id": str(tenant_id),
|
||||
"alerts_generated": result.get("total_alerts", 0),
|
||||
"breakdown": {
|
||||
"critical": result.get("critical", 0),
|
||||
"high": result.get("high", 0),
|
||||
"medium": result.get("medium", 0),
|
||||
"low": result.get("low", 0)
|
||||
}
|
||||
}
|
||||
return result
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
|
||||
@@ -8,9 +8,12 @@ from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, delete, func
|
||||
import structlog
|
||||
import uuid
|
||||
from uuid import UUID
|
||||
from datetime import datetime, timezone, timedelta
|
||||
from typing import Optional, Dict, Any
|
||||
import os
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
from app.core.database import get_db
|
||||
from app.models.production import (
|
||||
@@ -19,12 +22,12 @@ from app.models.production import (
|
||||
ProductionStatus, ProductionPriority, ProcessStage,
|
||||
EquipmentStatus, EquipmentType
|
||||
)
|
||||
from shared.utils.demo_dates import adjust_date_for_demo, BASE_REFERENCE_DATE
|
||||
from shared.utils.demo_dates import adjust_date_for_demo, BASE_REFERENCE_DATE, resolve_time_marker
|
||||
|
||||
from app.core.config import settings
|
||||
|
||||
logger = structlog.get_logger()
|
||||
router = APIRouter(prefix="/internal/demo", tags=["internal"])
|
||||
router = APIRouter()
|
||||
|
||||
# Base demo tenant IDs
|
||||
DEMO_TENANT_PROFESSIONAL = "a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6"
|
||||
@@ -38,7 +41,7 @@ def verify_internal_api_key(x_internal_api_key: Optional[str] = Header(None)):
|
||||
return True
|
||||
|
||||
|
||||
@router.post("/clone")
|
||||
@router.post("/internal/demo/clone")
|
||||
async def clone_demo_data(
|
||||
base_tenant_id: str,
|
||||
virtual_tenant_id: str,
|
||||
@@ -91,12 +94,11 @@ async def clone_demo_data(
|
||||
|
||||
try:
|
||||
# Validate UUIDs
|
||||
base_uuid = uuid.UUID(base_tenant_id)
|
||||
virtual_uuid = uuid.UUID(virtual_tenant_id)
|
||||
|
||||
# Track cloning statistics
|
||||
stats = {
|
||||
"production_batches": 0,
|
||||
"batches": 0,
|
||||
"production_schedules": 0,
|
||||
"production_capacity": 0,
|
||||
"quality_check_templates": 0,
|
||||
@@ -105,63 +107,137 @@ async def clone_demo_data(
|
||||
"alerts_generated": 0
|
||||
}
|
||||
|
||||
# ID mappings
|
||||
batch_id_map = {}
|
||||
template_id_map = {}
|
||||
equipment_id_map = {}
|
||||
def parse_date_field(date_value, field_name="date"):
|
||||
"""Parse date field, handling both ISO strings and BASE_TS markers"""
|
||||
if not date_value:
|
||||
return None
|
||||
|
||||
# Check if it's a BASE_TS marker
|
||||
if isinstance(date_value, str) and date_value.startswith("BASE_TS"):
|
||||
try:
|
||||
return resolve_time_marker(date_value, session_time)
|
||||
except ValueError as e:
|
||||
logger.warning(
|
||||
f"Invalid BASE_TS marker in {field_name}",
|
||||
marker=date_value,
|
||||
error=str(e)
|
||||
)
|
||||
return None
|
||||
|
||||
# Handle regular ISO date strings
|
||||
try:
|
||||
return adjust_date_for_demo(
|
||||
datetime.fromisoformat(date_value.replace('Z', '+00:00')),
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
)
|
||||
except (ValueError, AttributeError) as e:
|
||||
logger.warning(
|
||||
f"Invalid date format in {field_name}",
|
||||
date_value=date_value,
|
||||
error=str(e)
|
||||
)
|
||||
return None
|
||||
|
||||
# Clone Equipment first (no dependencies)
|
||||
result = await db.execute(
|
||||
select(Equipment).where(Equipment.tenant_id == base_uuid)
|
||||
)
|
||||
base_equipment = result.scalars().all()
|
||||
# Load seed data from JSON files
|
||||
try:
|
||||
from shared.utils.seed_data_paths import get_seed_data_path
|
||||
|
||||
if demo_account_type == "professional":
|
||||
json_file = get_seed_data_path("professional", "06-production.json")
|
||||
elif demo_account_type == "enterprise":
|
||||
json_file = get_seed_data_path("enterprise", "06-production.json")
|
||||
else:
|
||||
raise ValueError(f"Invalid demo account type: {demo_account_type}")
|
||||
|
||||
logger.info(
|
||||
"Found equipment to clone",
|
||||
count=len(base_equipment),
|
||||
base_tenant=str(base_uuid)
|
||||
)
|
||||
except ImportError:
|
||||
# Fallback to original path
|
||||
seed_data_dir = Path(__file__).parent.parent.parent.parent / "infrastructure" / "seed-data"
|
||||
if demo_account_type == "professional":
|
||||
json_file = seed_data_dir / "professional" / "06-production.json"
|
||||
elif demo_account_type == "enterprise":
|
||||
json_file = seed_data_dir / "enterprise" / "parent" / "06-production.json"
|
||||
else:
|
||||
raise ValueError(f"Invalid demo account type: {demo_account_type}")
|
||||
|
||||
for equipment in base_equipment:
|
||||
new_equipment_id = uuid.uuid4()
|
||||
equipment_id_map[equipment.id] = new_equipment_id
|
||||
if not json_file.exists():
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail=f"Seed data file not found: {json_file}"
|
||||
)
|
||||
|
||||
# Load JSON data
|
||||
with open(json_file, 'r', encoding='utf-8') as f:
|
||||
seed_data = json.load(f)
|
||||
|
||||
# Create Equipment first (no dependencies)
|
||||
for equipment_data in seed_data.get('equipment', []):
|
||||
# Transform equipment ID using XOR
|
||||
from shared.utils.demo_id_transformer import transform_id
|
||||
try:
|
||||
equipment_uuid = UUID(equipment_data['id'])
|
||||
transformed_id = transform_id(equipment_data['id'], virtual_uuid)
|
||||
except ValueError as e:
|
||||
logger.error("Failed to parse equipment UUID",
|
||||
equipment_id=equipment_data['id'],
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"Invalid UUID format in equipment data: {str(e)}"
|
||||
)
|
||||
|
||||
# Adjust dates relative to session creation time
|
||||
adjusted_install_date = adjust_date_for_demo(
|
||||
equipment.install_date, session_time, BASE_REFERENCE_DATE
|
||||
datetime.fromisoformat(equipment_data['install_date'].replace('Z', '+00:00')),
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
)
|
||||
adjusted_last_maintenance = adjust_date_for_demo(
|
||||
equipment.last_maintenance_date, session_time, BASE_REFERENCE_DATE
|
||||
datetime.fromisoformat(equipment_data['last_maintenance_date'].replace('Z', '+00:00')),
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
)
|
||||
adjusted_next_maintenance = adjust_date_for_demo(
|
||||
equipment.next_maintenance_date, session_time, BASE_REFERENCE_DATE
|
||||
datetime.fromisoformat(equipment_data['next_maintenance_date'].replace('Z', '+00:00')),
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
)
|
||||
adjusted_created_at = adjust_date_for_demo(
|
||||
datetime.fromisoformat(equipment_data['created_at'].replace('Z', '+00:00')),
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
)
|
||||
adjusted_updated_at = adjust_date_for_demo(
|
||||
datetime.fromisoformat(equipment_data['updated_at'].replace('Z', '+00:00')),
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
)
|
||||
|
||||
new_equipment = Equipment(
|
||||
id=new_equipment_id,
|
||||
id=str(transformed_id),
|
||||
tenant_id=virtual_uuid,
|
||||
name=equipment.name,
|
||||
type=equipment.type,
|
||||
model=equipment.model,
|
||||
serial_number=equipment.serial_number,
|
||||
location=equipment.location,
|
||||
status=equipment.status,
|
||||
name=equipment_data['name'],
|
||||
type=equipment_data['type'],
|
||||
model=equipment_data['model'],
|
||||
serial_number=equipment_data.get('serial_number'),
|
||||
location=equipment_data['location'],
|
||||
status=equipment_data['status'],
|
||||
install_date=adjusted_install_date,
|
||||
last_maintenance_date=adjusted_last_maintenance,
|
||||
next_maintenance_date=adjusted_next_maintenance,
|
||||
maintenance_interval_days=equipment.maintenance_interval_days,
|
||||
efficiency_percentage=equipment.efficiency_percentage,
|
||||
uptime_percentage=equipment.uptime_percentage,
|
||||
energy_usage_kwh=equipment.energy_usage_kwh,
|
||||
power_kw=equipment.power_kw,
|
||||
capacity=equipment.capacity,
|
||||
weight_kg=equipment.weight_kg,
|
||||
current_temperature=equipment.current_temperature,
|
||||
target_temperature=equipment.target_temperature,
|
||||
is_active=equipment.is_active,
|
||||
notes=equipment.notes,
|
||||
created_at=session_time,
|
||||
updated_at=session_time
|
||||
maintenance_interval_days=equipment_data.get('maintenance_interval_days'),
|
||||
efficiency_percentage=equipment_data.get('efficiency_percentage'),
|
||||
uptime_percentage=equipment_data.get('uptime_percentage'),
|
||||
energy_usage_kwh=equipment_data.get('energy_usage_kwh'),
|
||||
power_kw=equipment_data.get('power_kw'),
|
||||
capacity=equipment_data.get('capacity'),
|
||||
weight_kg=equipment_data.get('weight_kg'),
|
||||
current_temperature=equipment_data.get('current_temperature'),
|
||||
target_temperature=equipment_data.get('target_temperature'),
|
||||
is_active=equipment_data.get('is_active', True),
|
||||
notes=equipment_data.get('notes'),
|
||||
created_at=adjusted_created_at,
|
||||
updated_at=adjusted_updated_at
|
||||
)
|
||||
db.add(new_equipment)
|
||||
stats["equipment"] += 1
|
||||
@@ -170,17 +246,17 @@ async def clone_demo_data(
|
||||
await db.flush()
|
||||
|
||||
# Clone Quality Check Templates
|
||||
result = await db.execute(
|
||||
select(QualityCheckTemplate).where(QualityCheckTemplate.tenant_id == base_uuid)
|
||||
)
|
||||
base_templates = result.scalars().all()
|
||||
# Note: Quality check templates are not included in seed data
|
||||
# They would need to be added to the production seed data if needed
|
||||
template_id_map = {}
|
||||
base_templates = []
|
||||
|
||||
logger.info(
|
||||
"Found quality check templates to clone",
|
||||
count=len(base_templates),
|
||||
base_tenant=str(base_uuid)
|
||||
"No quality check templates to clone (not in seed data)",
|
||||
count=len(base_templates)
|
||||
)
|
||||
|
||||
# Only create templates if they exist in base templates
|
||||
for template in base_templates:
|
||||
new_template_id = uuid.uuid4()
|
||||
template_id_map[template.id] = new_template_id
|
||||
@@ -217,253 +293,333 @@ async def clone_demo_data(
|
||||
# Flush to get template IDs
|
||||
await db.flush()
|
||||
|
||||
# Clone Production Batches
|
||||
result = await db.execute(
|
||||
select(ProductionBatch).where(ProductionBatch.tenant_id == base_uuid)
|
||||
)
|
||||
base_batches = result.scalars().all()
|
||||
# Clone Production Batches from seed data
|
||||
batch_id_map = {}
|
||||
for batch_data in seed_data.get('batches', []):
|
||||
# Transform batch ID using XOR
|
||||
from shared.utils.demo_id_transformer import transform_id
|
||||
try:
|
||||
batch_uuid = UUID(batch_data['id'])
|
||||
transformed_id = transform_id(batch_data['id'], virtual_uuid)
|
||||
except ValueError as e:
|
||||
logger.error("Failed to parse batch UUID",
|
||||
batch_id=batch_data['id'],
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"Invalid UUID format in batch data: {str(e)}"
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Found production batches to clone",
|
||||
count=len(base_batches),
|
||||
base_tenant=str(base_uuid)
|
||||
)
|
||||
|
||||
for batch in base_batches:
|
||||
new_batch_id = uuid.uuid4()
|
||||
batch_id_map[batch.id] = new_batch_id
|
||||
batch_id_map[UUID(batch_data['id'])] = transformed_id
|
||||
|
||||
# Adjust dates relative to session creation time
|
||||
adjusted_planned_start = adjust_date_for_demo(
|
||||
batch.planned_start_time, session_time, BASE_REFERENCE_DATE
|
||||
) if batch.planned_start_time else None
|
||||
adjusted_planned_end = adjust_date_for_demo(
|
||||
batch.planned_end_time, session_time, BASE_REFERENCE_DATE
|
||||
) if batch.planned_end_time else None
|
||||
adjusted_actual_start = adjust_date_for_demo(
|
||||
batch.actual_start_time, session_time, BASE_REFERENCE_DATE
|
||||
) if batch.actual_start_time else None
|
||||
adjusted_actual_end = adjust_date_for_demo(
|
||||
batch.actual_end_time, session_time, BASE_REFERENCE_DATE
|
||||
) if batch.actual_end_time else None
|
||||
adjusted_completed = adjust_date_for_demo(
|
||||
batch.completed_at, session_time, BASE_REFERENCE_DATE
|
||||
) if batch.completed_at else None
|
||||
adjusted_planned_start = parse_date_field(batch_data.get('planned_start_time'), "planned_start_time")
|
||||
adjusted_planned_end = parse_date_field(batch_data.get('planned_end_time'), "planned_end_time")
|
||||
adjusted_actual_start = parse_date_field(batch_data.get('actual_start_time'), "actual_start_time")
|
||||
adjusted_actual_end = parse_date_field(batch_data.get('actual_end_time'), "actual_end_time")
|
||||
adjusted_completed = parse_date_field(batch_data.get('completed_at'), "completed_at")
|
||||
adjusted_created_at = parse_date_field(batch_data.get('created_at'), "created_at") or session_time
|
||||
adjusted_updated_at = parse_date_field(batch_data.get('updated_at'), "updated_at") or adjusted_created_at
|
||||
|
||||
# Map status and priority enums
|
||||
status_value = batch_data.get('status', 'PENDING')
|
||||
if isinstance(status_value, str):
|
||||
try:
|
||||
status_value = ProductionStatus[status_value]
|
||||
except KeyError:
|
||||
status_value = ProductionStatus.PENDING
|
||||
|
||||
priority_value = batch_data.get('priority', 'MEDIUM')
|
||||
if isinstance(priority_value, str):
|
||||
try:
|
||||
priority_value = ProductionPriority[priority_value]
|
||||
except KeyError:
|
||||
priority_value = ProductionPriority.MEDIUM
|
||||
|
||||
# Map process stage enum
|
||||
process_stage_value = batch_data.get('current_process_stage')
|
||||
if process_stage_value and isinstance(process_stage_value, str):
|
||||
try:
|
||||
process_stage_value = ProcessStage[process_stage_value]
|
||||
except KeyError:
|
||||
process_stage_value = None
|
||||
|
||||
new_batch = ProductionBatch(
|
||||
id=new_batch_id,
|
||||
id=str(transformed_id),
|
||||
tenant_id=virtual_uuid,
|
||||
batch_number=f"BATCH-{uuid.uuid4().hex[:8].upper()}", # New batch number
|
||||
product_id=batch.product_id, # Keep product reference
|
||||
product_name=batch.product_name,
|
||||
recipe_id=batch.recipe_id, # Keep recipe reference
|
||||
batch_number=f"{session_id[:8]}-{batch_data.get('batch_number', f'BATCH-{uuid.uuid4().hex[:8].upper()}')}",
|
||||
product_id=batch_data.get('product_id'),
|
||||
product_name=batch_data.get('product_name'),
|
||||
recipe_id=batch_data.get('recipe_id'),
|
||||
planned_start_time=adjusted_planned_start,
|
||||
planned_end_time=adjusted_planned_end,
|
||||
planned_quantity=batch.planned_quantity,
|
||||
planned_duration_minutes=batch.planned_duration_minutes,
|
||||
planned_quantity=batch_data.get('planned_quantity'),
|
||||
planned_duration_minutes=batch_data.get('planned_duration_minutes'),
|
||||
actual_start_time=adjusted_actual_start,
|
||||
actual_end_time=adjusted_actual_end,
|
||||
actual_quantity=batch.actual_quantity,
|
||||
actual_duration_minutes=batch.actual_duration_minutes,
|
||||
status=batch.status,
|
||||
priority=batch.priority,
|
||||
current_process_stage=batch.current_process_stage,
|
||||
process_stage_history=batch.process_stage_history,
|
||||
pending_quality_checks=batch.pending_quality_checks,
|
||||
completed_quality_checks=batch.completed_quality_checks,
|
||||
estimated_cost=batch.estimated_cost,
|
||||
actual_cost=batch.actual_cost,
|
||||
labor_cost=batch.labor_cost,
|
||||
material_cost=batch.material_cost,
|
||||
overhead_cost=batch.overhead_cost,
|
||||
yield_percentage=batch.yield_percentage,
|
||||
quality_score=batch.quality_score,
|
||||
waste_quantity=batch.waste_quantity,
|
||||
defect_quantity=batch.defect_quantity,
|
||||
equipment_used=batch.equipment_used,
|
||||
staff_assigned=batch.staff_assigned,
|
||||
station_id=batch.station_id,
|
||||
order_id=batch.order_id,
|
||||
forecast_id=batch.forecast_id,
|
||||
is_rush_order=batch.is_rush_order,
|
||||
is_special_recipe=batch.is_special_recipe,
|
||||
production_notes=batch.production_notes,
|
||||
quality_notes=batch.quality_notes,
|
||||
delay_reason=batch.delay_reason,
|
||||
cancellation_reason=batch.cancellation_reason,
|
||||
created_at=session_time,
|
||||
updated_at=session_time,
|
||||
actual_quantity=batch_data.get('actual_quantity'),
|
||||
actual_duration_minutes=batch_data.get('actual_duration_minutes'),
|
||||
status=status_value,
|
||||
priority=priority_value,
|
||||
current_process_stage=process_stage_value,
|
||||
process_stage_history=batch_data.get('process_stage_history'),
|
||||
pending_quality_checks=batch_data.get('pending_quality_checks'),
|
||||
completed_quality_checks=batch_data.get('completed_quality_checks'),
|
||||
estimated_cost=batch_data.get('estimated_cost'),
|
||||
actual_cost=batch_data.get('actual_cost'),
|
||||
labor_cost=batch_data.get('labor_cost'),
|
||||
material_cost=batch_data.get('material_cost'),
|
||||
overhead_cost=batch_data.get('overhead_cost'),
|
||||
yield_percentage=batch_data.get('yield_percentage'),
|
||||
quality_score=batch_data.get('quality_score'),
|
||||
waste_quantity=batch_data.get('waste_quantity'),
|
||||
defect_quantity=batch_data.get('defect_quantity'),
|
||||
equipment_used=batch_data.get('equipment_used'),
|
||||
staff_assigned=batch_data.get('staff_assigned'),
|
||||
station_id=batch_data.get('station_id'),
|
||||
order_id=batch_data.get('order_id'),
|
||||
forecast_id=batch_data.get('forecast_id'),
|
||||
is_rush_order=batch_data.get('is_rush_order', False),
|
||||
is_special_recipe=batch_data.get('is_special_recipe', False),
|
||||
production_notes=batch_data.get('production_notes'),
|
||||
quality_notes=batch_data.get('quality_notes'),
|
||||
delay_reason=batch_data.get('delay_reason'),
|
||||
cancellation_reason=batch_data.get('cancellation_reason'),
|
||||
created_at=adjusted_created_at,
|
||||
updated_at=adjusted_updated_at,
|
||||
completed_at=adjusted_completed
|
||||
)
|
||||
db.add(new_batch)
|
||||
stats["production_batches"] += 1
|
||||
stats["batches"] += 1
|
||||
|
||||
# Flush to get batch IDs
|
||||
await db.flush()
|
||||
|
||||
# Clone Quality Checks
|
||||
result = await db.execute(
|
||||
select(QualityCheck).where(QualityCheck.tenant_id == base_uuid)
|
||||
)
|
||||
base_checks = result.scalars().all()
|
||||
# Clone Quality Checks from seed data (if any)
|
||||
for check_data in seed_data.get('quality_checks', []):
|
||||
# Transform IDs
|
||||
from shared.utils.demo_id_transformer import transform_id
|
||||
try:
|
||||
check_uuid = UUID(check_data['id'])
|
||||
transformed_id = transform_id(check_data['id'], virtual_uuid)
|
||||
except ValueError as e:
|
||||
logger.error("Failed to parse check UUID",
|
||||
check_id=check_data['id'],
|
||||
error=str(e))
|
||||
continue
|
||||
|
||||
logger.info(
|
||||
"Found quality checks to clone",
|
||||
count=len(base_checks),
|
||||
base_tenant=str(base_uuid)
|
||||
)
|
||||
# Map batch_id if it exists in our map
|
||||
batch_id_value = check_data.get('batch_id')
|
||||
if batch_id_value:
|
||||
batch_id_value = batch_id_map.get(UUID(batch_id_value), UUID(batch_id_value))
|
||||
|
||||
for check in base_checks:
|
||||
new_batch_id = batch_id_map.get(check.batch_id, check.batch_id)
|
||||
new_template_id = template_id_map.get(check.template_id, check.template_id) if check.template_id else None
|
||||
# Map template_id if it exists
|
||||
template_id_value = check_data.get('template_id')
|
||||
if template_id_value:
|
||||
template_id_value = template_id_map.get(UUID(template_id_value), UUID(template_id_value))
|
||||
|
||||
# Adjust check time relative to session creation time
|
||||
adjusted_check_time = adjust_date_for_demo(
|
||||
check.check_time, session_time, BASE_REFERENCE_DATE
|
||||
) if check.check_time else None
|
||||
datetime.fromisoformat(check_data['check_time'].replace('Z', '+00:00')),
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
) if check_data.get('check_time') else None
|
||||
|
||||
adjusted_created_at = adjust_date_for_demo(
|
||||
datetime.fromisoformat(check_data['created_at'].replace('Z', '+00:00')),
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
)
|
||||
adjusted_updated_at = adjust_date_for_demo(
|
||||
datetime.fromisoformat(check_data['updated_at'].replace('Z', '+00:00')),
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
) if check_data.get('updated_at') else adjusted_created_at
|
||||
|
||||
new_check = QualityCheck(
|
||||
id=uuid.uuid4(),
|
||||
id=str(transformed_id),
|
||||
tenant_id=virtual_uuid,
|
||||
batch_id=new_batch_id,
|
||||
template_id=new_template_id,
|
||||
check_type=check.check_type,
|
||||
process_stage=check.process_stage,
|
||||
batch_id=str(batch_id_value) if batch_id_value else None,
|
||||
template_id=str(template_id_value) if template_id_value else None,
|
||||
check_type=check_data.get('check_type'),
|
||||
process_stage=check_data.get('process_stage'),
|
||||
check_time=adjusted_check_time,
|
||||
checker_id=check.checker_id,
|
||||
quality_score=check.quality_score,
|
||||
pass_fail=check.pass_fail,
|
||||
defect_count=check.defect_count,
|
||||
defect_types=check.defect_types,
|
||||
measured_weight=check.measured_weight,
|
||||
measured_temperature=check.measured_temperature,
|
||||
measured_moisture=check.measured_moisture,
|
||||
measured_dimensions=check.measured_dimensions,
|
||||
stage_specific_data=check.stage_specific_data,
|
||||
target_weight=check.target_weight,
|
||||
target_temperature=check.target_temperature,
|
||||
target_moisture=check.target_moisture,
|
||||
tolerance_percentage=check.tolerance_percentage,
|
||||
within_tolerance=check.within_tolerance,
|
||||
corrective_action_needed=check.corrective_action_needed,
|
||||
corrective_actions=check.corrective_actions,
|
||||
template_results=check.template_results,
|
||||
criteria_scores=check.criteria_scores,
|
||||
check_notes=check.check_notes,
|
||||
photos_urls=check.photos_urls,
|
||||
certificate_url=check.certificate_url,
|
||||
created_at=session_time,
|
||||
updated_at=session_time
|
||||
checker_id=check_data.get('checker_id'),
|
||||
quality_score=check_data.get('quality_score'),
|
||||
pass_fail=check_data.get('pass_fail'),
|
||||
defect_count=check_data.get('defect_count'),
|
||||
defect_types=check_data.get('defect_types'),
|
||||
measured_weight=check_data.get('measured_weight'),
|
||||
measured_temperature=check_data.get('measured_temperature'),
|
||||
measured_moisture=check_data.get('measured_moisture'),
|
||||
measured_dimensions=check_data.get('measured_dimensions'),
|
||||
stage_specific_data=check_data.get('stage_specific_data'),
|
||||
target_weight=check_data.get('target_weight'),
|
||||
target_temperature=check_data.get('target_temperature'),
|
||||
target_moisture=check_data.get('target_moisture'),
|
||||
tolerance_percentage=check_data.get('tolerance_percentage'),
|
||||
within_tolerance=check_data.get('within_tolerance'),
|
||||
corrective_action_needed=check_data.get('corrective_action_needed'),
|
||||
corrective_actions=check_data.get('corrective_actions'),
|
||||
template_results=check_data.get('template_results'),
|
||||
criteria_scores=check_data.get('criteria_scores'),
|
||||
check_notes=check_data.get('check_notes'),
|
||||
photos_urls=check_data.get('photos_urls'),
|
||||
certificate_url=check_data.get('certificate_url'),
|
||||
created_at=adjusted_created_at,
|
||||
updated_at=adjusted_updated_at
|
||||
)
|
||||
db.add(new_check)
|
||||
stats["quality_checks"] += 1
|
||||
|
||||
# Clone Production Schedules
|
||||
result = await db.execute(
|
||||
select(ProductionSchedule).where(ProductionSchedule.tenant_id == base_uuid)
|
||||
)
|
||||
base_schedules = result.scalars().all()
|
||||
# Clone Production Schedules from seed data (if any)
|
||||
for schedule_data in seed_data.get('production_schedules', []):
|
||||
# Transform IDs
|
||||
from shared.utils.demo_id_transformer import transform_id
|
||||
try:
|
||||
schedule_uuid = UUID(schedule_data['id'])
|
||||
transformed_id = transform_id(schedule_data['id'], virtual_uuid)
|
||||
except ValueError as e:
|
||||
logger.error("Failed to parse schedule UUID",
|
||||
schedule_id=schedule_data['id'],
|
||||
error=str(e))
|
||||
continue
|
||||
|
||||
logger.info(
|
||||
"Found production schedules to clone",
|
||||
count=len(base_schedules),
|
||||
base_tenant=str(base_uuid)
|
||||
)
|
||||
|
||||
for schedule in base_schedules:
|
||||
# Adjust schedule dates relative to session creation time
|
||||
adjusted_schedule_date = adjust_date_for_demo(
|
||||
schedule.schedule_date, session_time, BASE_REFERENCE_DATE
|
||||
) if schedule.schedule_date else None
|
||||
datetime.fromisoformat(schedule_data['schedule_date'].replace('Z', '+00:00')),
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
) if schedule_data.get('schedule_date') else None
|
||||
adjusted_shift_start = adjust_date_for_demo(
|
||||
schedule.shift_start, session_time, BASE_REFERENCE_DATE
|
||||
) if schedule.shift_start else None
|
||||
datetime.fromisoformat(schedule_data['shift_start'].replace('Z', '+00:00')),
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
) if schedule_data.get('shift_start') else None
|
||||
adjusted_shift_end = adjust_date_for_demo(
|
||||
schedule.shift_end, session_time, BASE_REFERENCE_DATE
|
||||
) if schedule.shift_end else None
|
||||
datetime.fromisoformat(schedule_data['shift_end'].replace('Z', '+00:00')),
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
) if schedule_data.get('shift_end') else None
|
||||
adjusted_finalized = adjust_date_for_demo(
|
||||
schedule.finalized_at, session_time, BASE_REFERENCE_DATE
|
||||
) if schedule.finalized_at else None
|
||||
datetime.fromisoformat(schedule_data['finalized_at'].replace('Z', '+00:00')),
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
) if schedule_data.get('finalized_at') else None
|
||||
adjusted_created_at = adjust_date_for_demo(
|
||||
datetime.fromisoformat(schedule_data['created_at'].replace('Z', '+00:00')),
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
)
|
||||
adjusted_updated_at = adjust_date_for_demo(
|
||||
datetime.fromisoformat(schedule_data['updated_at'].replace('Z', '+00:00')),
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
) if schedule_data.get('updated_at') else adjusted_created_at
|
||||
|
||||
new_schedule = ProductionSchedule(
|
||||
id=uuid.uuid4(),
|
||||
id=str(transformed_id),
|
||||
tenant_id=virtual_uuid,
|
||||
schedule_date=adjusted_schedule_date,
|
||||
shift_start=adjusted_shift_start,
|
||||
shift_end=adjusted_shift_end,
|
||||
total_capacity_hours=schedule.total_capacity_hours,
|
||||
planned_capacity_hours=schedule.planned_capacity_hours,
|
||||
actual_capacity_hours=schedule.actual_capacity_hours,
|
||||
overtime_hours=schedule.overtime_hours,
|
||||
staff_count=schedule.staff_count,
|
||||
equipment_capacity=schedule.equipment_capacity,
|
||||
station_assignments=schedule.station_assignments,
|
||||
total_batches_planned=schedule.total_batches_planned,
|
||||
total_batches_completed=schedule.total_batches_completed,
|
||||
total_quantity_planned=schedule.total_quantity_planned,
|
||||
total_quantity_produced=schedule.total_quantity_produced,
|
||||
is_finalized=schedule.is_finalized,
|
||||
is_active=schedule.is_active,
|
||||
efficiency_percentage=schedule.efficiency_percentage,
|
||||
utilization_percentage=schedule.utilization_percentage,
|
||||
on_time_completion_rate=schedule.on_time_completion_rate,
|
||||
schedule_notes=schedule.schedule_notes,
|
||||
schedule_adjustments=schedule.schedule_adjustments,
|
||||
created_at=session_time,
|
||||
updated_at=session_time,
|
||||
total_capacity_hours=schedule_data.get('total_capacity_hours'),
|
||||
planned_capacity_hours=schedule_data.get('planned_capacity_hours'),
|
||||
actual_capacity_hours=schedule_data.get('actual_capacity_hours'),
|
||||
overtime_hours=schedule_data.get('overtime_hours', 0.0),
|
||||
staff_count=schedule_data.get('staff_count'),
|
||||
equipment_capacity=schedule_data.get('equipment_capacity'),
|
||||
station_assignments=schedule_data.get('station_assignments'),
|
||||
total_batches_planned=schedule_data.get('total_batches_planned', 0),
|
||||
total_batches_completed=schedule_data.get('total_batches_completed', 0),
|
||||
total_quantity_planned=schedule_data.get('total_quantity_planned', 0.0),
|
||||
total_quantity_produced=schedule_data.get('total_quantity_produced', 0.0),
|
||||
is_finalized=schedule_data.get('is_finalized', False),
|
||||
is_active=schedule_data.get('is_active', True),
|
||||
efficiency_percentage=schedule_data.get('efficiency_percentage'),
|
||||
utilization_percentage=schedule_data.get('utilization_percentage'),
|
||||
on_time_completion_rate=schedule_data.get('on_time_completion_rate'),
|
||||
schedule_notes=schedule_data.get('schedule_notes'),
|
||||
schedule_adjustments=schedule_data.get('schedule_adjustments'),
|
||||
created_at=adjusted_created_at,
|
||||
updated_at=adjusted_updated_at,
|
||||
finalized_at=adjusted_finalized
|
||||
)
|
||||
db.add(new_schedule)
|
||||
stats["production_schedules"] += 1
|
||||
|
||||
# Clone Production Capacity
|
||||
result = await db.execute(
|
||||
select(ProductionCapacity).where(ProductionCapacity.tenant_id == base_uuid)
|
||||
)
|
||||
base_capacity = result.scalars().all()
|
||||
# Clone Production Capacity from seed data (if any)
|
||||
for capacity_data in seed_data.get('production_capacity', []):
|
||||
# Transform IDs
|
||||
from shared.utils.demo_id_transformer import transform_id
|
||||
try:
|
||||
capacity_uuid = UUID(capacity_data['id'])
|
||||
transformed_id = transform_id(capacity_data['id'], virtual_uuid)
|
||||
except ValueError as e:
|
||||
logger.error("Failed to parse capacity UUID",
|
||||
capacity_id=capacity_data['id'],
|
||||
error=str(e))
|
||||
continue
|
||||
|
||||
for capacity in base_capacity:
|
||||
# Adjust capacity dates relative to session creation time
|
||||
adjusted_date = adjust_date_for_demo(
|
||||
capacity.date, session_time, BASE_REFERENCE_DATE
|
||||
) if capacity.date else None
|
||||
datetime.fromisoformat(capacity_data['date'].replace('Z', '+00:00')),
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
) if capacity_data.get('date') else None
|
||||
adjusted_start_time = adjust_date_for_demo(
|
||||
capacity.start_time, session_time, BASE_REFERENCE_DATE
|
||||
) if capacity.start_time else None
|
||||
datetime.fromisoformat(capacity_data['start_time'].replace('Z', '+00:00')),
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
) if capacity_data.get('start_time') else None
|
||||
adjusted_end_time = adjust_date_for_demo(
|
||||
capacity.end_time, session_time, BASE_REFERENCE_DATE
|
||||
) if capacity.end_time else None
|
||||
datetime.fromisoformat(capacity_data['end_time'].replace('Z', '+00:00')),
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
) if capacity_data.get('end_time') else None
|
||||
adjusted_last_maintenance = adjust_date_for_demo(
|
||||
capacity.last_maintenance_date, session_time, BASE_REFERENCE_DATE
|
||||
) if capacity.last_maintenance_date else None
|
||||
datetime.fromisoformat(capacity_data['last_maintenance_date'].replace('Z', '+00:00')),
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
) if capacity_data.get('last_maintenance_date') else None
|
||||
adjusted_created_at = adjust_date_for_demo(
|
||||
datetime.fromisoformat(capacity_data['created_at'].replace('Z', '+00:00')),
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
)
|
||||
adjusted_updated_at = adjust_date_for_demo(
|
||||
datetime.fromisoformat(capacity_data['updated_at'].replace('Z', '+00:00')),
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
) if capacity_data.get('updated_at') else adjusted_created_at
|
||||
|
||||
new_capacity = ProductionCapacity(
|
||||
id=uuid.uuid4(),
|
||||
id=str(transformed_id),
|
||||
tenant_id=virtual_uuid,
|
||||
resource_type=capacity.resource_type,
|
||||
resource_id=capacity.resource_id,
|
||||
resource_name=capacity.resource_name,
|
||||
resource_type=capacity_data.get('resource_type'),
|
||||
resource_id=capacity_data.get('resource_id'),
|
||||
resource_name=capacity_data.get('resource_name'),
|
||||
date=adjusted_date,
|
||||
start_time=adjusted_start_time,
|
||||
end_time=adjusted_end_time,
|
||||
total_capacity_units=capacity.total_capacity_units,
|
||||
allocated_capacity_units=capacity.allocated_capacity_units,
|
||||
remaining_capacity_units=capacity.remaining_capacity_units,
|
||||
is_available=capacity.is_available,
|
||||
is_maintenance=capacity.is_maintenance,
|
||||
is_reserved=capacity.is_reserved,
|
||||
equipment_type=capacity.equipment_type,
|
||||
max_batch_size=capacity.max_batch_size,
|
||||
min_batch_size=capacity.min_batch_size,
|
||||
setup_time_minutes=capacity.setup_time_minutes,
|
||||
cleanup_time_minutes=capacity.cleanup_time_minutes,
|
||||
efficiency_rating=capacity.efficiency_rating,
|
||||
maintenance_status=capacity.maintenance_status,
|
||||
total_capacity_units=capacity_data.get('total_capacity_units'),
|
||||
allocated_capacity_units=capacity_data.get('allocated_capacity_units'),
|
||||
remaining_capacity_units=capacity_data.get('remaining_capacity_units'),
|
||||
is_available=capacity_data.get('is_available'),
|
||||
is_maintenance=capacity_data.get('is_maintenance'),
|
||||
is_reserved=capacity_data.get('is_reserved'),
|
||||
equipment_type=capacity_data.get('equipment_type'),
|
||||
max_batch_size=capacity_data.get('max_batch_size'),
|
||||
min_batch_size=capacity_data.get('min_batch_size'),
|
||||
setup_time_minutes=capacity_data.get('setup_time_minutes'),
|
||||
cleanup_time_minutes=capacity_data.get('cleanup_time_minutes'),
|
||||
efficiency_rating=capacity_data.get('efficiency_rating'),
|
||||
maintenance_status=capacity_data.get('maintenance_status'),
|
||||
last_maintenance_date=adjusted_last_maintenance,
|
||||
notes=capacity.notes,
|
||||
restrictions=capacity.restrictions,
|
||||
created_at=session_time,
|
||||
updated_at=session_time
|
||||
notes=capacity_data.get('notes'),
|
||||
restrictions=capacity_data.get('restrictions'),
|
||||
created_at=adjusted_created_at,
|
||||
updated_at=adjusted_updated_at
|
||||
)
|
||||
db.add(new_capacity)
|
||||
stats["production_capacity"] += 1
|
||||
@@ -477,7 +633,7 @@ async def clone_demo_data(
|
||||
stats["alerts_generated"] = 0
|
||||
|
||||
# Calculate total from non-alert stats
|
||||
total_records = (stats["equipment"] + stats["production_batches"] + stats["production_schedules"] +
|
||||
total_records = (stats["equipment"] + stats["batches"] + stats["production_schedules"] +
|
||||
stats["quality_check_templates"] + stats["quality_checks"] +
|
||||
stats["production_capacity"])
|
||||
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
||||
|
||||
@@ -237,9 +237,8 @@ async def trigger_yield_prediction(
|
||||
logger.error(error_msg, exc_info=True)
|
||||
errors.append(error_msg)
|
||||
|
||||
# Close orchestrator and clients
|
||||
# Close orchestrator
|
||||
await orchestrator.close()
|
||||
await recipes_client.close()
|
||||
|
||||
# Build response
|
||||
response = YieldPredictionResponse(
|
||||
@@ -286,3 +285,89 @@ async def ml_insights_health():
|
||||
"POST /ml/insights/predict-yields"
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
# ================================================================
|
||||
# INTERNAL ENDPOINTS (for demo-session service)
|
||||
# ================================================================
|
||||
|
||||
from fastapi import Request
|
||||
|
||||
# Create a separate router for internal endpoints to avoid the tenant prefix
|
||||
internal_router = APIRouter(
|
||||
tags=["ML Insights - Internal"]
|
||||
)
|
||||
|
||||
|
||||
@internal_router.post("/api/v1/tenants/{tenant_id}/production/internal/ml/generate-yield-insights")
|
||||
async def generate_yield_insights_internal(
|
||||
tenant_id: str,
|
||||
request: Request,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Internal endpoint to trigger yield insights generation for demo sessions.
|
||||
|
||||
This endpoint is called by the demo-session service after cloning data.
|
||||
It uses the same ML logic as the public endpoint but with optimized defaults.
|
||||
|
||||
Security: Protected by X-Internal-Service header check.
|
||||
|
||||
Args:
|
||||
tenant_id: The tenant UUID
|
||||
request: FastAPI request object
|
||||
db: Database session
|
||||
|
||||
Returns:
|
||||
{
|
||||
"insights_posted": int,
|
||||
"tenant_id": str,
|
||||
"status": str
|
||||
}
|
||||
"""
|
||||
# Verify internal service header
|
||||
if not request or request.headers.get("X-Internal-Service") not in ["demo-session", "internal"]:
|
||||
logger.warning("Unauthorized internal API call", tenant_id=tenant_id)
|
||||
raise HTTPException(
|
||||
status_code=403,
|
||||
detail="This endpoint is for internal service use only"
|
||||
)
|
||||
|
||||
logger.info("Internal yield insights generation triggered", tenant_id=tenant_id)
|
||||
|
||||
try:
|
||||
# Use the existing yield prediction logic with sensible defaults
|
||||
request_data = YieldPredictionRequest(
|
||||
recipe_ids=None, # Analyze all recipes
|
||||
lookback_days=90, # 3 months of history
|
||||
min_history_runs=20 # Minimum 20 production runs required
|
||||
)
|
||||
|
||||
# Call the existing yield prediction endpoint logic
|
||||
result = await trigger_yield_prediction(
|
||||
tenant_id=tenant_id,
|
||||
request_data=request_data,
|
||||
db=db
|
||||
)
|
||||
|
||||
# Return simplified response for internal use
|
||||
return {
|
||||
"insights_posted": result.total_insights_posted,
|
||||
"tenant_id": tenant_id,
|
||||
"status": "success" if result.success else "failed",
|
||||
"message": result.message,
|
||||
"recipes_analyzed": result.recipes_analyzed,
|
||||
"recipes_with_issues": result.recipes_with_issues
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Internal yield insights generation failed",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Internal yield insights generation failed: {str(e)}"
|
||||
)
|
||||
|
||||
@@ -6,7 +6,7 @@ Production Orchestrator API - Endpoints for orchestrated production scheduling
|
||||
Called by the Orchestrator Service to generate production schedules from forecast data
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Path
|
||||
from fastapi import APIRouter, Depends, HTTPException, Path, Request
|
||||
from typing import Optional, Dict, Any, List
|
||||
from datetime import date
|
||||
from uuid import UUID
|
||||
@@ -23,10 +23,11 @@ route_builder = RouteBuilder('production')
|
||||
router = APIRouter(tags=["production-orchestrator"])
|
||||
|
||||
|
||||
def get_production_service() -> ProductionService:
|
||||
def get_production_service(request: Request) -> ProductionService:
|
||||
"""Dependency injection for production service"""
|
||||
from app.core.database import database_manager
|
||||
return ProductionService(database_manager, settings)
|
||||
notification_service = getattr(request.app.state, 'notification_service', None)
|
||||
return ProductionService(database_manager, settings, notification_service)
|
||||
|
||||
|
||||
# ================================================================
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
Production Batches API - ATOMIC CRUD operations on ProductionBatch model
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Path, Query
|
||||
from fastapi import APIRouter, Depends, HTTPException, Path, Query, Request
|
||||
from typing import Optional
|
||||
from datetime import date
|
||||
from uuid import UUID
|
||||
@@ -26,8 +26,19 @@ from app.schemas.production import (
|
||||
)
|
||||
from app.core.config import settings
|
||||
from app.utils.cache import get_cached, set_cached, make_cache_key
|
||||
from app.services.production_alert_service import ProductionAlertService
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
async def get_production_alert_service(request: Request) -> ProductionAlertService:
|
||||
"""Dependency injection for production alert service"""
|
||||
# Get the alert service from app state, which is where it's stored during app startup
|
||||
alert_service = getattr(request.app.state, 'production_alert_service', None)
|
||||
if not alert_service:
|
||||
logger.warning("Production alert service not available in app state")
|
||||
return None
|
||||
return alert_service
|
||||
route_builder = RouteBuilder('production')
|
||||
router = APIRouter(tags=["production-batches"])
|
||||
|
||||
@@ -35,10 +46,11 @@ router = APIRouter(tags=["production-batches"])
|
||||
audit_logger = create_audit_logger("production-service", AuditLog)
|
||||
|
||||
|
||||
def get_production_service() -> ProductionService:
|
||||
def get_production_service(request: Request) -> ProductionService:
|
||||
"""Dependency injection for production service"""
|
||||
from app.core.database import database_manager
|
||||
return ProductionService(database_manager, settings)
|
||||
notification_service = getattr(request.app.state, 'notification_service', None)
|
||||
return ProductionService(database_manager, settings, notification_service)
|
||||
|
||||
|
||||
@router.get(
|
||||
@@ -108,12 +120,60 @@ async def create_production_batch(
|
||||
batch_data: ProductionBatchCreate,
|
||||
tenant_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
production_service: ProductionService = Depends(get_production_service)
|
||||
production_service: ProductionService = Depends(get_production_service),
|
||||
request: Request = None,
|
||||
alert_service: ProductionAlertService = Depends(get_production_alert_service)
|
||||
):
|
||||
"""Create a new production batch"""
|
||||
try:
|
||||
batch = await production_service.create_production_batch(tenant_id, batch_data)
|
||||
|
||||
# Trigger Start Production alert
|
||||
if alert_service:
|
||||
try:
|
||||
# Generate reasoning data for the batch
|
||||
reasoning_data = {
|
||||
"type": "manual_creation",
|
||||
"parameters": {
|
||||
"product_name": batch.product_name,
|
||||
"planned_quantity": batch.planned_quantity,
|
||||
"priority": batch.priority.value if batch.priority else "MEDIUM"
|
||||
},
|
||||
"urgency": {
|
||||
"level": "normal",
|
||||
"ready_by_time": batch.planned_start_time.strftime('%H:%M') if batch.planned_start_time else "unknown"
|
||||
},
|
||||
"metadata": {
|
||||
"trigger_source": "manual_creation",
|
||||
"created_by": current_user.get("user_id", "unknown"),
|
||||
"is_ai_assisted": False
|
||||
}
|
||||
}
|
||||
|
||||
# Update batch with reasoning data
|
||||
from app.core.database import get_db
|
||||
db = next(get_db())
|
||||
batch.reasoning_data = reasoning_data
|
||||
await db.commit()
|
||||
|
||||
# Emit Start Production alert
|
||||
await alert_service.emit_start_production_alert(
|
||||
tenant_id=tenant_id,
|
||||
batch_id=batch.id,
|
||||
product_name=batch.product_name,
|
||||
batch_number=batch.batch_number,
|
||||
reasoning_data=reasoning_data,
|
||||
planned_start_time=batch.planned_start_time.isoformat() if batch.planned_start_time else None
|
||||
)
|
||||
|
||||
logger.info("Start Production alert triggered for batch",
|
||||
batch_id=str(batch.id), tenant_id=str(tenant_id))
|
||||
|
||||
except Exception as alert_error:
|
||||
logger.error("Failed to trigger Start Production alert",
|
||||
error=str(alert_error), batch_id=str(batch.id))
|
||||
# Don't fail the batch creation if alert fails
|
||||
|
||||
logger.info("Created production batch",
|
||||
batch_id=str(batch.id), tenant_id=str(tenant_id))
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
Production Dashboard API - Dashboard endpoints for production overview
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Path, Query
|
||||
from fastapi import APIRouter, Depends, HTTPException, Path, Query, Request
|
||||
from typing import Optional
|
||||
from datetime import date, datetime
|
||||
from uuid import UUID
|
||||
@@ -21,10 +21,11 @@ route_builder = RouteBuilder('production')
|
||||
router = APIRouter(tags=["production-dashboard"])
|
||||
|
||||
|
||||
def get_production_service() -> ProductionService:
|
||||
def get_production_service(request: Request) -> ProductionService:
|
||||
"""Dependency injection for production service"""
|
||||
from app.core.database import database_manager
|
||||
return ProductionService(database_manager, settings)
|
||||
notification_service = getattr(request.app.state, 'notification_service', None)
|
||||
return ProductionService(database_manager, settings, notification_service)
|
||||
|
||||
|
||||
@router.get(
|
||||
|
||||
@@ -25,10 +25,11 @@ route_builder = RouteBuilder('production')
|
||||
router = APIRouter(tags=["production-operations"])
|
||||
|
||||
|
||||
def get_production_service() -> ProductionService:
|
||||
def get_production_service(request: Request) -> ProductionService:
|
||||
"""Dependency injection for production service"""
|
||||
from app.core.database import database_manager
|
||||
return ProductionService(database_manager, settings)
|
||||
notification_service = getattr(request.app.state, 'notification_service', None)
|
||||
return ProductionService(database_manager, settings, notification_service)
|
||||
|
||||
|
||||
# ===== BATCH OPERATIONS =====
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
Production Schedules API - ATOMIC CRUD operations on ProductionSchedule model
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Path, Query
|
||||
from fastapi import APIRouter, Depends, HTTPException, Path, Query, Request
|
||||
from typing import Optional
|
||||
from datetime import date, datetime, timedelta
|
||||
from uuid import UUID
|
||||
@@ -31,10 +31,11 @@ router = APIRouter(tags=["production-schedules"])
|
||||
audit_logger = create_audit_logger("production-service", AuditLog)
|
||||
|
||||
|
||||
def get_production_service() -> ProductionService:
|
||||
def get_production_service(request: Request) -> ProductionService:
|
||||
"""Dependency injection for production service"""
|
||||
from app.core.database import database_manager
|
||||
return ProductionService(database_manager, settings)
|
||||
notification_service = getattr(request.app.state, 'notification_service', None)
|
||||
return ProductionService(database_manager, settings, notification_service)
|
||||
|
||||
|
||||
@router.get(
|
||||
|
||||
Reference in New Issue
Block a user