Improve the frontend 3

This commit is contained in:
Urtzi Alfaro
2025-10-30 21:08:07 +01:00
parent 36217a2729
commit 63f5c6d512
184 changed files with 21512 additions and 7442 deletions

View File

@@ -231,31 +231,40 @@ async def clone_demo_data(
base_tenant=str(base_uuid)
)
# Calculate date offset to make production recent
if base_batches:
max_date = max(batch.planned_start_time for batch in base_batches if batch.planned_start_time)
today = datetime.now(timezone.utc)
date_offset = today - max_date
else:
date_offset = timedelta(days=0)
for batch in base_batches:
new_batch_id = uuid.uuid4()
batch_id_map[batch.id] = new_batch_id
# Adjust dates relative to session creation time
adjusted_planned_start = adjust_date_for_demo(
batch.planned_start_time, session_time, BASE_REFERENCE_DATE
) if batch.planned_start_time else None
adjusted_planned_end = adjust_date_for_demo(
batch.planned_end_time, session_time, BASE_REFERENCE_DATE
) if batch.planned_end_time else None
adjusted_actual_start = adjust_date_for_demo(
batch.actual_start_time, session_time, BASE_REFERENCE_DATE
) if batch.actual_start_time else None
adjusted_actual_end = adjust_date_for_demo(
batch.actual_end_time, session_time, BASE_REFERENCE_DATE
) if batch.actual_end_time else None
adjusted_completed = adjust_date_for_demo(
batch.completed_at, session_time, BASE_REFERENCE_DATE
) if batch.completed_at else None
new_batch = ProductionBatch(
id=new_batch_id,
tenant_id=virtual_uuid,
batch_number=f"BATCH-{uuid.uuid4().hex[:8].upper()}", # New batch number
product_id=batch.product_id, # Keep product reference
product_id=batch.product_id, # Keep product reference
product_name=batch.product_name,
recipe_id=batch.recipe_id, # Keep recipe reference
planned_start_time=batch.planned_start_time + date_offset if batch.planned_start_time else None,
planned_end_time=batch.planned_end_time + date_offset if batch.planned_end_time else None,
planned_start_time=adjusted_planned_start,
planned_end_time=adjusted_planned_end,
planned_quantity=batch.planned_quantity,
planned_duration_minutes=batch.planned_duration_minutes,
actual_start_time=batch.actual_start_time + date_offset if batch.actual_start_time else None,
actual_end_time=batch.actual_end_time + date_offset if batch.actual_end_time else None,
actual_start_time=adjusted_actual_start,
actual_end_time=adjusted_actual_end,
actual_quantity=batch.actual_quantity,
actual_duration_minutes=batch.actual_duration_minutes,
status=batch.status,
@@ -284,9 +293,9 @@ async def clone_demo_data(
quality_notes=batch.quality_notes,
delay_reason=batch.delay_reason,
cancellation_reason=batch.cancellation_reason,
created_at=datetime.now(timezone.utc),
updated_at=datetime.now(timezone.utc),
completed_at=batch.completed_at + date_offset if batch.completed_at else None
created_at=session_time,
updated_at=session_time,
completed_at=adjusted_completed
)
db.add(new_batch)
stats["production_batches"] += 1
@@ -310,6 +319,11 @@ async def clone_demo_data(
new_batch_id = batch_id_map.get(check.batch_id, check.batch_id)
new_template_id = template_id_map.get(check.template_id, check.template_id) if check.template_id else None
# Adjust check time relative to session creation time
adjusted_check_time = adjust_date_for_demo(
check.check_time, session_time, BASE_REFERENCE_DATE
) if check.check_time else None
new_check = QualityCheck(
id=uuid.uuid4(),
tenant_id=virtual_uuid,
@@ -317,7 +331,7 @@ async def clone_demo_data(
template_id=new_template_id,
check_type=check.check_type,
process_stage=check.process_stage,
check_time=check.check_time + date_offset,
check_time=adjusted_check_time,
checker_id=check.checker_id,
quality_score=check.quality_score,
pass_fail=check.pass_fail,
@@ -340,8 +354,8 @@ async def clone_demo_data(
check_notes=check.check_notes,
photos_urls=check.photos_urls,
certificate_url=check.certificate_url,
created_at=datetime.now(timezone.utc),
updated_at=datetime.now(timezone.utc)
created_at=session_time,
updated_at=session_time
)
db.add(new_check)
stats["quality_checks"] += 1
@@ -359,12 +373,26 @@ async def clone_demo_data(
)
for schedule in base_schedules:
# Adjust schedule dates relative to session creation time
adjusted_schedule_date = adjust_date_for_demo(
schedule.schedule_date, session_time, BASE_REFERENCE_DATE
) if schedule.schedule_date else None
adjusted_shift_start = adjust_date_for_demo(
schedule.shift_start, session_time, BASE_REFERENCE_DATE
) if schedule.shift_start else None
adjusted_shift_end = adjust_date_for_demo(
schedule.shift_end, session_time, BASE_REFERENCE_DATE
) if schedule.shift_end else None
adjusted_finalized = adjust_date_for_demo(
schedule.finalized_at, session_time, BASE_REFERENCE_DATE
) if schedule.finalized_at else None
new_schedule = ProductionSchedule(
id=uuid.uuid4(),
tenant_id=virtual_uuid,
schedule_date=schedule.schedule_date + date_offset,
shift_start=schedule.shift_start + date_offset,
shift_end=schedule.shift_end + date_offset,
schedule_date=adjusted_schedule_date,
shift_start=adjusted_shift_start,
shift_end=adjusted_shift_end,
total_capacity_hours=schedule.total_capacity_hours,
planned_capacity_hours=schedule.planned_capacity_hours,
actual_capacity_hours=schedule.actual_capacity_hours,
@@ -383,9 +411,9 @@ async def clone_demo_data(
on_time_completion_rate=schedule.on_time_completion_rate,
schedule_notes=schedule.schedule_notes,
schedule_adjustments=schedule.schedule_adjustments,
created_at=datetime.now(timezone.utc),
updated_at=datetime.now(timezone.utc),
finalized_at=schedule.finalized_at + date_offset if schedule.finalized_at else None
created_at=session_time,
updated_at=session_time,
finalized_at=adjusted_finalized
)
db.add(new_schedule)
stats["production_schedules"] += 1
@@ -397,15 +425,29 @@ async def clone_demo_data(
base_capacity = result.scalars().all()
for capacity in base_capacity:
# Adjust capacity dates relative to session creation time
adjusted_date = adjust_date_for_demo(
capacity.date, session_time, BASE_REFERENCE_DATE
) if capacity.date else None
adjusted_start_time = adjust_date_for_demo(
capacity.start_time, session_time, BASE_REFERENCE_DATE
) if capacity.start_time else None
adjusted_end_time = adjust_date_for_demo(
capacity.end_time, session_time, BASE_REFERENCE_DATE
) if capacity.end_time else None
adjusted_last_maintenance = adjust_date_for_demo(
capacity.last_maintenance_date, session_time, BASE_REFERENCE_DATE
) if capacity.last_maintenance_date else None
new_capacity = ProductionCapacity(
id=uuid.uuid4(),
tenant_id=virtual_uuid,
resource_type=capacity.resource_type,
resource_id=capacity.resource_id,
resource_name=capacity.resource_name,
date=capacity.date + date_offset,
start_time=capacity.start_time + date_offset,
end_time=capacity.end_time + date_offset,
date=adjusted_date,
start_time=adjusted_start_time,
end_time=adjusted_end_time,
total_capacity_units=capacity.total_capacity_units,
allocated_capacity_units=capacity.allocated_capacity_units,
remaining_capacity_units=capacity.remaining_capacity_units,
@@ -419,11 +461,11 @@ async def clone_demo_data(
cleanup_time_minutes=capacity.cleanup_time_minutes,
efficiency_rating=capacity.efficiency_rating,
maintenance_status=capacity.maintenance_status,
last_maintenance_date=capacity.last_maintenance_date + date_offset if capacity.last_maintenance_date else None,
last_maintenance_date=adjusted_last_maintenance,
notes=capacity.notes,
restrictions=capacity.restrictions,
created_at=datetime.now(timezone.utc),
updated_at=datetime.now(timezone.utc)
created_at=session_time,
updated_at=session_time
)
db.add(new_capacity)
stats["production_capacity"] += 1
@@ -437,8 +479,8 @@ async def clone_demo_data(
stats["alerts_generated"] = 0
# Calculate total from non-alert stats
total_records = (stats["equipment"] + stats["batches"] + stats["schedules"] +
stats["quality_templates"] + stats["quality_checks"] +
total_records = (stats["equipment"] + stats["production_batches"] + stats["production_schedules"] +
stats["quality_check_templates"] + stats["quality_checks"] +
stats["production_capacity"])
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)

View File

@@ -0,0 +1,240 @@
# ================================================================
# services/production/app/api/orchestrator.py
# ================================================================
"""
Production Orchestrator API - Endpoints for orchestrated production scheduling
Called by the Orchestrator Service to generate production schedules from forecast data
"""
from fastapi import APIRouter, Depends, HTTPException, Path
from typing import Optional, Dict, Any, List
from datetime import date
from uuid import UUID
from pydantic import BaseModel, Field
import structlog
from shared.routing import RouteBuilder
from app.services.production_service import ProductionService
from app.schemas.production import ProductionScheduleResponse
from app.core.config import settings
logger = structlog.get_logger()
route_builder = RouteBuilder('production')
router = APIRouter(tags=["production-orchestrator"])
def get_production_service() -> ProductionService:
"""Dependency injection for production service"""
from app.core.database import database_manager
return ProductionService(database_manager, settings)
# ================================================================
# REQUEST/RESPONSE SCHEMAS
# ================================================================
class GenerateScheduleRequest(BaseModel):
"""
Request to generate production schedule (called by Orchestrator)
The Orchestrator calls Forecasting Service first, then passes forecast data here.
Production Service uses this data to determine what to produce.
NEW: Accepts cached data snapshots from Orchestrator to eliminate duplicate API calls.
"""
forecast_data: Dict[str, Any] = Field(..., description="Forecast data from Forecasting Service")
target_date: Optional[date] = Field(None, description="Target production date")
planning_horizon_days: int = Field(default=1, ge=1, le=7, description="Planning horizon in days")
# NEW: Cached data from Orchestrator
inventory_data: Optional[Dict[str, Any]] = Field(None, description="Cached inventory snapshot from Orchestrator")
recipes_data: Optional[Dict[str, Any]] = Field(None, description="Cached recipes snapshot from Orchestrator")
class Config:
json_schema_extra = {
"example": {
"forecast_data": {
"forecasts": [
{
"product_id": "uuid-here",
"predicted_demand": 100.0,
"confidence_score": 0.85
}
],
"forecast_id": "uuid-here",
"generated_at": "2025-01-30T10:00:00Z"
},
"target_date": "2025-01-31",
"planning_horizon_days": 1
}
}
class GenerateScheduleResponse(BaseModel):
"""Response from generate_schedule endpoint"""
success: bool
message: str
schedule_id: Optional[UUID] = None
schedule_number: Optional[str] = None
batches_created: int = 0
total_planned_quantity: float = 0.0
warnings: List[str] = []
errors: List[str] = []
class Config:
json_schema_extra = {
"example": {
"success": True,
"message": "Production schedule generated successfully",
"schedule_id": "uuid-here",
"schedule_number": "PROD-2025-01-30-001",
"batches_created": 5,
"total_planned_quantity": 500.0,
"warnings": [],
"errors": []
}
}
# ================================================================
# ORCHESTRATOR ENTRY POINT
# ================================================================
@router.post(
route_builder.build_nested_resource_route("", None, "generate-schedule"),
response_model=GenerateScheduleResponse
)
async def generate_production_schedule(
tenant_id: UUID = Path(...),
request_data: GenerateScheduleRequest = ...,
production_service: ProductionService = Depends(get_production_service)
):
"""
Generate production schedule from forecast data (called by Orchestrator)
This is the main entry point for orchestrated production planning.
The Orchestrator calls Forecasting Service first, then passes forecast data here.
Flow:
1. Receive forecast data from orchestrator
2. Parse forecast to extract product demands
3. Check inventory levels for each product
4. Calculate production quantities needed
5. Create production schedule and batches
6. Return schedule summary
Args:
tenant_id: Tenant UUID
request_data: Schedule generation request with forecast data
Returns:
GenerateScheduleResponse with schedule details and created batches
"""
try:
logger.info("Generate production schedule endpoint called",
tenant_id=str(tenant_id),
has_forecast_data=bool(request_data.forecast_data))
target_date = request_data.target_date or date.today()
forecast_data = request_data.forecast_data
# Parse forecast data from orchestrator
forecasts = _parse_forecast_data(forecast_data)
if not forecasts:
return GenerateScheduleResponse(
success=False,
message="No forecast data provided",
errors=["Forecast data is empty or invalid"]
)
# Generate production schedule using the service (with cached data if available)
result = await production_service.generate_production_schedule_from_forecast(
tenant_id=tenant_id,
target_date=target_date,
forecasts=forecasts,
planning_horizon_days=request_data.planning_horizon_days,
inventory_data=request_data.inventory_data, # NEW: Pass cached inventory
recipes_data=request_data.recipes_data # NEW: Pass cached recipes
)
logger.info("Production schedule generated successfully",
tenant_id=str(tenant_id),
schedule_id=str(result.get('schedule_id')) if result.get('schedule_id') else None,
batches_created=result.get('batches_created', 0))
return GenerateScheduleResponse(
success=True,
message="Production schedule generated successfully",
schedule_id=result.get('schedule_id'),
schedule_number=result.get('schedule_number'),
batches_created=result.get('batches_created', 0),
total_planned_quantity=result.get('total_planned_quantity', 0.0),
warnings=result.get('warnings', []),
errors=[]
)
except Exception as e:
logger.error("Error generating production schedule",
error=str(e), tenant_id=str(tenant_id))
return GenerateScheduleResponse(
success=False,
message="Failed to generate production schedule",
errors=[str(e)]
)
# ================================================================
# HELPER FUNCTIONS
# ================================================================
def _parse_forecast_data(forecast_data: Dict[str, Any]) -> List[Dict[str, Any]]:
"""
Parse forecast data received from orchestrator
Expected format from Forecasting Service via Orchestrator:
{
"forecasts": [
{
"product_id": "uuid",
"inventory_product_id": "uuid", # Alternative field name
"predicted_demand": 100.0,
"predicted_value": 100.0, # Alternative field name
"confidence_score": 0.85,
...
}
],
"forecast_id": "uuid",
"generated_at": "2025-01-30T10:00:00Z"
}
"""
forecasts = []
forecast_list = forecast_data.get('forecasts', [])
for forecast_item in forecast_list:
# Extract product ID (try multiple field names)
product_id = (
forecast_item.get('product_id') or
forecast_item.get('inventory_product_id') or
forecast_item.get('item_id')
)
# Extract predicted demand (try multiple field names)
predicted_demand = (
forecast_item.get('predicted_demand') or
forecast_item.get('predicted_value') or
forecast_item.get('demand') or
0
)
if product_id and predicted_demand > 0:
forecasts.append({
'product_id': product_id,
'predicted_demand': float(predicted_demand),
'confidence_score': forecast_item.get('confidence_score', 0.8),
'lower_bound': forecast_item.get('lower_bound', 0),
'upper_bound': forecast_item.get('upper_bound', 0),
'forecast_id': forecast_data.get('forecast_id'),
})
return forecasts