Add improved production UI
This commit is contained in:
@@ -17,7 +17,9 @@ from app.services.production_service import ProductionService
|
||||
from app.schemas.production import (
|
||||
ProductionBatchCreate, ProductionBatchUpdate, ProductionBatchStatusUpdate,
|
||||
ProductionBatchResponse, ProductionBatchListResponse,
|
||||
ProductionScheduleCreate, ProductionScheduleUpdate, ProductionScheduleResponse,
|
||||
DailyProductionRequirements, ProductionDashboardSummary, ProductionMetrics,
|
||||
ProductionStatusEnum
|
||||
)
|
||||
from app.core.config import settings
|
||||
|
||||
@@ -38,7 +40,7 @@ def get_production_service() -> ProductionService:
|
||||
# DASHBOARD ENDPOINTS
|
||||
# ================================================================
|
||||
|
||||
@router.get("/tenants/{tenant_id}/production/dashboard-summary", response_model=ProductionDashboardSummary)
|
||||
@router.get("/tenants/{tenant_id}/production/dashboard/summary", response_model=ProductionDashboardSummary)
|
||||
async def get_dashboard_summary(
|
||||
tenant_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
@@ -46,44 +48,13 @@ async def get_dashboard_summary(
|
||||
):
|
||||
"""Get production dashboard summary using shared auth"""
|
||||
try:
|
||||
# Extract tenant from user context for security
|
||||
current_tenant = current_user.get("tenant_id")
|
||||
if str(tenant_id) != current_tenant:
|
||||
raise HTTPException(status_code=403, detail="Access denied to this tenant")
|
||||
|
||||
summary = await production_service.get_dashboard_summary(tenant_id)
|
||||
|
||||
logger.info("Retrieved production dashboard summary",
|
||||
tenant_id=str(tenant_id), user_id=current_user.get("user_id"))
|
||||
tenant_id=str(tenant_id))
|
||||
|
||||
return summary
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting production dashboard summary",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise HTTPException(status_code=500, detail="Failed to get dashboard summary")
|
||||
|
||||
|
||||
@router.get("/tenants/{tenant_id}/production/daily-requirements", response_model=DailyProductionRequirements)
|
||||
async def get_daily_requirements(
|
||||
tenant_id: UUID = Path(...),
|
||||
date: Optional[date] = Query(None, description="Target date for production requirements"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
production_service: ProductionService = Depends(get_production_service)
|
||||
):
|
||||
"""Get daily production requirements"""
|
||||
try:
|
||||
current_tenant = current_user.get("tenant_id")
|
||||
if str(tenant_id) != current_tenant:
|
||||
raise HTTPException(status_code=403, detail="Access denied to this tenant")
|
||||
|
||||
target_date = date or datetime.now().date()
|
||||
requirements = await production_service.calculate_daily_requirements(tenant_id, target_date)
|
||||
|
||||
logger.info("Retrieved daily production requirements",
|
||||
tenant_id=str(tenant_id), date=target_date.isoformat())
|
||||
|
||||
return requirements
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting daily production requirements",
|
||||
@@ -100,9 +71,6 @@ async def get_production_requirements(
|
||||
):
|
||||
"""Get production requirements for procurement planning"""
|
||||
try:
|
||||
current_tenant = current_user.get("tenant_id")
|
||||
if str(tenant_id) != current_tenant:
|
||||
raise HTTPException(status_code=403, detail="Access denied to this tenant")
|
||||
|
||||
target_date = date or datetime.now().date()
|
||||
requirements = await production_service.get_production_requirements(tenant_id, target_date)
|
||||
@@ -122,6 +90,43 @@ async def get_production_requirements(
|
||||
# PRODUCTION BATCH ENDPOINTS
|
||||
# ================================================================
|
||||
|
||||
@router.get("/tenants/{tenant_id}/production/batches", response_model=ProductionBatchListResponse)
|
||||
async def list_production_batches(
|
||||
tenant_id: UUID = Path(...),
|
||||
status: Optional[ProductionStatusEnum] = Query(None, description="Filter by status"),
|
||||
product_id: Optional[UUID] = Query(None, description="Filter by product"),
|
||||
order_id: Optional[UUID] = Query(None, description="Filter by order"),
|
||||
start_date: Optional[date] = Query(None, description="Filter from date"),
|
||||
end_date: Optional[date] = Query(None, description="Filter to date"),
|
||||
page: int = Query(1, ge=1, description="Page number"),
|
||||
page_size: int = Query(50, ge=1, le=100, description="Page size"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
production_service: ProductionService = Depends(get_production_service)
|
||||
):
|
||||
"""List batches with filters: date, status, product, order_id"""
|
||||
try:
|
||||
|
||||
filters = {
|
||||
"status": status,
|
||||
"product_id": str(product_id) if product_id else None,
|
||||
"order_id": str(order_id) if order_id else None,
|
||||
"start_date": start_date,
|
||||
"end_date": end_date
|
||||
}
|
||||
|
||||
batch_list = await production_service.get_production_batches_list(tenant_id, filters, page, page_size)
|
||||
|
||||
logger.info("Retrieved production batches list",
|
||||
tenant_id=str(tenant_id), filters=filters)
|
||||
|
||||
return batch_list
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error listing production batches",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise HTTPException(status_code=500, detail="Failed to list production batches")
|
||||
|
||||
|
||||
@router.post("/tenants/{tenant_id}/production/batches", response_model=ProductionBatchResponse)
|
||||
async def create_production_batch(
|
||||
batch_data: ProductionBatchCreate,
|
||||
@@ -131,22 +136,19 @@ async def create_production_batch(
|
||||
):
|
||||
"""Create a new production batch"""
|
||||
try:
|
||||
current_tenant = current_user.get("tenant_id")
|
||||
if str(tenant_id) != current_tenant:
|
||||
raise HTTPException(status_code=403, detail="Access denied to this tenant")
|
||||
|
||||
|
||||
batch = await production_service.create_production_batch(tenant_id, batch_data)
|
||||
|
||||
logger.info("Created production batch",
|
||||
|
||||
logger.info("Created production batch",
|
||||
batch_id=str(batch.id), tenant_id=str(tenant_id))
|
||||
|
||||
|
||||
return ProductionBatchResponse.model_validate(batch)
|
||||
|
||||
|
||||
except ValueError as e:
|
||||
logger.warning("Invalid batch data", error=str(e), tenant_id=str(tenant_id))
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
except Exception as e:
|
||||
logger.error("Error creating production batch",
|
||||
logger.error("Error creating production batch",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise HTTPException(status_code=500, detail="Failed to create production batch")
|
||||
|
||||
@@ -159,9 +161,6 @@ async def get_active_batches(
|
||||
):
|
||||
"""Get currently active production batches"""
|
||||
try:
|
||||
current_tenant = current_user.get("tenant_id")
|
||||
if str(tenant_id) != current_tenant:
|
||||
raise HTTPException(status_code=403, detail="Access denied to this tenant")
|
||||
|
||||
from app.repositories.production_batch_repository import ProductionBatchRepository
|
||||
batch_repo = ProductionBatchRepository(db)
|
||||
@@ -194,9 +193,6 @@ async def get_batch_details(
|
||||
):
|
||||
"""Get detailed information about a production batch"""
|
||||
try:
|
||||
current_tenant = current_user.get("tenant_id")
|
||||
if str(tenant_id) != current_tenant:
|
||||
raise HTTPException(status_code=403, detail="Access denied to this tenant")
|
||||
|
||||
from app.repositories.production_batch_repository import ProductionBatchRepository
|
||||
batch_repo = ProductionBatchRepository(db)
|
||||
@@ -228,9 +224,6 @@ async def update_batch_status(
|
||||
):
|
||||
"""Update production batch status"""
|
||||
try:
|
||||
current_tenant = current_user.get("tenant_id")
|
||||
if str(tenant_id) != current_tenant:
|
||||
raise HTTPException(status_code=403, detail="Access denied to this tenant")
|
||||
|
||||
batch = await production_service.update_batch_status(tenant_id, batch_id, status_update)
|
||||
|
||||
@@ -250,11 +243,147 @@ async def update_batch_status(
|
||||
raise HTTPException(status_code=500, detail="Failed to update batch status")
|
||||
|
||||
|
||||
@router.put("/tenants/{tenant_id}/production/batches/{batch_id}", response_model=ProductionBatchResponse)
|
||||
async def update_production_batch(
|
||||
batch_update: ProductionBatchUpdate,
|
||||
tenant_id: UUID = Path(...),
|
||||
batch_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
production_service: ProductionService = Depends(get_production_service)
|
||||
):
|
||||
"""Update batch (e.g., start time, notes, status)"""
|
||||
try:
|
||||
|
||||
batch = await production_service.update_production_batch(tenant_id, batch_id, batch_update)
|
||||
|
||||
logger.info("Updated production batch",
|
||||
batch_id=str(batch_id), tenant_id=str(tenant_id))
|
||||
|
||||
return ProductionBatchResponse.model_validate(batch)
|
||||
|
||||
except ValueError as e:
|
||||
logger.warning("Invalid batch update", error=str(e), batch_id=str(batch_id))
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
except Exception as e:
|
||||
logger.error("Error updating production batch",
|
||||
error=str(e), batch_id=str(batch_id), tenant_id=str(tenant_id))
|
||||
raise HTTPException(status_code=500, detail="Failed to update production batch")
|
||||
|
||||
|
||||
@router.delete("/tenants/{tenant_id}/production/batches/{batch_id}")
|
||||
async def delete_production_batch(
|
||||
tenant_id: UUID = Path(...),
|
||||
batch_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
production_service: ProductionService = Depends(get_production_service)
|
||||
):
|
||||
"""Cancel/delete draft batch (soft delete preferred)"""
|
||||
try:
|
||||
|
||||
await production_service.delete_production_batch(tenant_id, batch_id)
|
||||
|
||||
logger.info("Deleted production batch",
|
||||
batch_id=str(batch_id), tenant_id=str(tenant_id))
|
||||
|
||||
return {"message": "Production batch deleted successfully"}
|
||||
|
||||
except ValueError as e:
|
||||
logger.warning("Cannot delete batch", error=str(e), batch_id=str(batch_id))
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
except Exception as e:
|
||||
logger.error("Error deleting production batch",
|
||||
error=str(e), batch_id=str(batch_id), tenant_id=str(tenant_id))
|
||||
raise HTTPException(status_code=500, detail="Failed to delete production batch")
|
||||
|
||||
|
||||
@router.post("/tenants/{tenant_id}/production/batches/{batch_id}/start", response_model=ProductionBatchResponse)
|
||||
async def start_production_batch(
|
||||
tenant_id: UUID = Path(...),
|
||||
batch_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
production_service: ProductionService = Depends(get_production_service)
|
||||
):
|
||||
"""Mark batch as started (updates actual_start_time)"""
|
||||
try:
|
||||
|
||||
batch = await production_service.start_production_batch(tenant_id, batch_id)
|
||||
|
||||
logger.info("Started production batch",
|
||||
batch_id=str(batch_id), tenant_id=str(tenant_id))
|
||||
|
||||
return ProductionBatchResponse.model_validate(batch)
|
||||
|
||||
except ValueError as e:
|
||||
logger.warning("Cannot start batch", error=str(e), batch_id=str(batch_id))
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
except Exception as e:
|
||||
logger.error("Error starting production batch",
|
||||
error=str(e), batch_id=str(batch_id), tenant_id=str(tenant_id))
|
||||
raise HTTPException(status_code=500, detail="Failed to start production batch")
|
||||
|
||||
|
||||
@router.post("/tenants/{tenant_id}/production/batches/{batch_id}/complete", response_model=ProductionBatchResponse)
|
||||
async def complete_production_batch(
|
||||
tenant_id: UUID = Path(...),
|
||||
batch_id: UUID = Path(...),
|
||||
completion_data: Optional[dict] = None,
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
production_service: ProductionService = Depends(get_production_service)
|
||||
):
|
||||
"""Complete batch — auto-calculates yield, duration, cost summary"""
|
||||
try:
|
||||
|
||||
batch = await production_service.complete_production_batch(tenant_id, batch_id, completion_data)
|
||||
|
||||
logger.info("Completed production batch",
|
||||
batch_id=str(batch_id), tenant_id=str(tenant_id))
|
||||
|
||||
return ProductionBatchResponse.model_validate(batch)
|
||||
|
||||
except ValueError as e:
|
||||
logger.warning("Cannot complete batch", error=str(e), batch_id=str(batch_id))
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
except Exception as e:
|
||||
logger.error("Error completing production batch",
|
||||
error=str(e), batch_id=str(batch_id), tenant_id=str(tenant_id))
|
||||
raise HTTPException(status_code=500, detail="Failed to complete production batch")
|
||||
|
||||
|
||||
@router.get("/tenants/{tenant_id}/production/batches/stats", response_model=dict)
|
||||
async def get_production_batch_stats(
|
||||
tenant_id: UUID = Path(...),
|
||||
start_date: Optional[date] = Query(None, description="Start date for stats"),
|
||||
end_date: Optional[date] = Query(None, description="End date for stats"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
production_service: ProductionService = Depends(get_production_service)
|
||||
):
|
||||
"""Aggregated stats: completed vs failed, avg yield, on-time rate"""
|
||||
try:
|
||||
|
||||
# Default to last 30 days if no dates provided
|
||||
if not start_date:
|
||||
start_date = (datetime.now() - timedelta(days=30)).date()
|
||||
if not end_date:
|
||||
end_date = datetime.now().date()
|
||||
|
||||
stats = await production_service.get_batch_statistics(tenant_id, start_date, end_date)
|
||||
|
||||
logger.info("Retrieved production batch statistics",
|
||||
tenant_id=str(tenant_id), start_date=start_date.isoformat(), end_date=end_date.isoformat())
|
||||
|
||||
return stats
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting production batch stats",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise HTTPException(status_code=500, detail="Failed to get production batch stats")
|
||||
|
||||
|
||||
# ================================================================
|
||||
# PRODUCTION SCHEDULE ENDPOINTS
|
||||
# ================================================================
|
||||
|
||||
@router.get("/tenants/{tenant_id}/production/schedule", response_model=dict)
|
||||
@router.get("/tenants/{tenant_id}/production/schedules", response_model=dict)
|
||||
async def get_production_schedule(
|
||||
tenant_id: UUID = Path(...),
|
||||
start_date: Optional[date] = Query(None, description="Start date for schedule"),
|
||||
@@ -264,9 +393,6 @@ async def get_production_schedule(
|
||||
):
|
||||
"""Get production schedule for a date range"""
|
||||
try:
|
||||
current_tenant = current_user.get("tenant_id")
|
||||
if str(tenant_id) != current_tenant:
|
||||
raise HTTPException(status_code=403, detail="Access denied to this tenant")
|
||||
|
||||
# Default to next 7 days if no dates provided
|
||||
if not start_date:
|
||||
@@ -313,6 +439,166 @@ async def get_production_schedule(
|
||||
raise HTTPException(status_code=500, detail="Failed to get production schedule")
|
||||
|
||||
|
||||
@router.get("/tenants/{tenant_id}/production/schedules/{schedule_id}", response_model=ProductionScheduleResponse)
|
||||
async def get_production_schedule_details(
|
||||
tenant_id: UUID = Path(...),
|
||||
schedule_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db=Depends(get_db)
|
||||
):
|
||||
"""Retrieve full schedule details including assignments"""
|
||||
try:
|
||||
|
||||
from app.repositories.production_schedule_repository import ProductionScheduleRepository
|
||||
schedule_repo = ProductionScheduleRepository(db)
|
||||
|
||||
schedule = await schedule_repo.get(schedule_id)
|
||||
if not schedule or str(schedule.tenant_id) != str(tenant_id):
|
||||
raise HTTPException(status_code=404, detail="Production schedule not found")
|
||||
|
||||
logger.info("Retrieved production schedule details",
|
||||
schedule_id=str(schedule_id), tenant_id=str(tenant_id))
|
||||
|
||||
return ProductionScheduleResponse.model_validate(schedule)
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Error getting production schedule details",
|
||||
error=str(e), schedule_id=str(schedule_id), tenant_id=str(tenant_id))
|
||||
raise HTTPException(status_code=500, detail="Failed to get production schedule details")
|
||||
|
||||
|
||||
@router.post("/tenants/{tenant_id}/production/schedules", response_model=ProductionScheduleResponse)
|
||||
async def create_production_schedule(
|
||||
schedule_data: ProductionScheduleCreate,
|
||||
tenant_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
production_service: ProductionService = Depends(get_production_service)
|
||||
):
|
||||
"""Generate or manually create a daily/shift schedule"""
|
||||
try:
|
||||
|
||||
schedule = await production_service.create_production_schedule(tenant_id, schedule_data)
|
||||
|
||||
logger.info("Created production schedule",
|
||||
schedule_id=str(schedule.id), tenant_id=str(tenant_id))
|
||||
|
||||
return ProductionScheduleResponse.model_validate(schedule)
|
||||
|
||||
except ValueError as e:
|
||||
logger.warning("Invalid schedule data", error=str(e), tenant_id=str(tenant_id))
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
except Exception as e:
|
||||
logger.error("Error creating production schedule",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise HTTPException(status_code=500, detail="Failed to create production schedule")
|
||||
|
||||
|
||||
@router.put("/tenants/{tenant_id}/production/schedules/{schedule_id}", response_model=ProductionScheduleResponse)
|
||||
async def update_production_schedule(
|
||||
schedule_update: ProductionScheduleUpdate,
|
||||
tenant_id: UUID = Path(...),
|
||||
schedule_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
production_service: ProductionService = Depends(get_production_service)
|
||||
):
|
||||
"""Edit schedule before finalizing"""
|
||||
try:
|
||||
|
||||
schedule = await production_service.update_production_schedule(tenant_id, schedule_id, schedule_update)
|
||||
|
||||
logger.info("Updated production schedule",
|
||||
schedule_id=str(schedule_id), tenant_id=str(tenant_id))
|
||||
|
||||
return ProductionScheduleResponse.model_validate(schedule)
|
||||
|
||||
except ValueError as e:
|
||||
logger.warning("Invalid schedule update", error=str(e), schedule_id=str(schedule_id))
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
except Exception as e:
|
||||
logger.error("Error updating production schedule",
|
||||
error=str(e), schedule_id=str(schedule_id), tenant_id=str(tenant_id))
|
||||
raise HTTPException(status_code=500, detail="Failed to update production schedule")
|
||||
|
||||
|
||||
@router.post("/tenants/{tenant_id}/production/schedules/{schedule_id}/finalize", response_model=ProductionScheduleResponse)
|
||||
async def finalize_production_schedule(
|
||||
tenant_id: UUID = Path(...),
|
||||
schedule_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
production_service: ProductionService = Depends(get_production_service)
|
||||
):
|
||||
"""Lock schedule; prevents further changes"""
|
||||
try:
|
||||
|
||||
schedule = await production_service.finalize_production_schedule(tenant_id, schedule_id)
|
||||
|
||||
logger.info("Finalized production schedule",
|
||||
schedule_id=str(schedule_id), tenant_id=str(tenant_id))
|
||||
|
||||
return ProductionScheduleResponse.model_validate(schedule)
|
||||
|
||||
except ValueError as e:
|
||||
logger.warning("Cannot finalize schedule", error=str(e), schedule_id=str(schedule_id))
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
except Exception as e:
|
||||
logger.error("Error finalizing production schedule",
|
||||
error=str(e), schedule_id=str(schedule_id), tenant_id=str(tenant_id))
|
||||
raise HTTPException(status_code=500, detail="Failed to finalize production schedule")
|
||||
|
||||
|
||||
@router.get("/tenants/{tenant_id}/production/schedules/{date}/optimize", response_model=dict)
|
||||
async def optimize_production_schedule(
|
||||
tenant_id: UUID = Path(...),
|
||||
target_date: date = Path(..., alias="date"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
production_service: ProductionService = Depends(get_production_service)
|
||||
):
|
||||
"""Trigger AI-based rescheduling suggestion based on demand/capacity"""
|
||||
try:
|
||||
|
||||
optimization_result = await production_service.optimize_schedule(tenant_id, target_date)
|
||||
|
||||
logger.info("Generated schedule optimization suggestions",
|
||||
tenant_id=str(tenant_id), date=target_date.isoformat())
|
||||
|
||||
return optimization_result
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error optimizing production schedule",
|
||||
error=str(e), tenant_id=str(tenant_id), date=target_date.isoformat())
|
||||
raise HTTPException(status_code=500, detail="Failed to optimize production schedule")
|
||||
|
||||
|
||||
@router.get("/tenants/{tenant_id}/production/schedules/capacity-usage", response_model=dict)
|
||||
async def get_schedule_capacity_usage(
|
||||
tenant_id: UUID = Path(...),
|
||||
start_date: Optional[date] = Query(None, description="Start date for capacity usage"),
|
||||
end_date: Optional[date] = Query(None, description="End date for capacity usage"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
production_service: ProductionService = Depends(get_production_service)
|
||||
):
|
||||
"""View capacity utilization over time (for reporting)"""
|
||||
try:
|
||||
|
||||
# Default to last 30 days if no dates provided
|
||||
if not start_date:
|
||||
start_date = (datetime.now() - timedelta(days=30)).date()
|
||||
if not end_date:
|
||||
end_date = datetime.now().date()
|
||||
|
||||
capacity_usage = await production_service.get_capacity_usage_report(tenant_id, start_date, end_date)
|
||||
|
||||
logger.info("Retrieved schedule capacity usage",
|
||||
tenant_id=str(tenant_id), start_date=start_date.isoformat(), end_date=end_date.isoformat())
|
||||
|
||||
return capacity_usage
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting schedule capacity usage",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise HTTPException(status_code=500, detail="Failed to get schedule capacity usage")
|
||||
|
||||
|
||||
# ================================================================
|
||||
@@ -328,9 +614,6 @@ async def get_capacity_status(
|
||||
):
|
||||
"""Get production capacity status for a specific date"""
|
||||
try:
|
||||
current_tenant = current_user.get("tenant_id")
|
||||
if str(tenant_id) != current_tenant:
|
||||
raise HTTPException(status_code=403, detail="Access denied to this tenant")
|
||||
|
||||
target_date = date or datetime.now().date()
|
||||
|
||||
@@ -352,6 +635,438 @@ async def get_capacity_status(
|
||||
raise HTTPException(status_code=500, detail="Failed to get capacity status")
|
||||
|
||||
|
||||
@router.get("/tenants/{tenant_id}/production/capacity", response_model=dict)
|
||||
async def list_production_capacity(
|
||||
tenant_id: UUID = Path(...),
|
||||
resource_type: Optional[str] = Query(None, description="Filter by resource type (equipment/staff)"),
|
||||
date: Optional[date] = Query(None, description="Filter by date"),
|
||||
availability: Optional[bool] = Query(None, description="Filter by availability"),
|
||||
page: int = Query(1, ge=1, description="Page number"),
|
||||
page_size: int = Query(50, ge=1, le=100, description="Page size"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
production_service: ProductionService = Depends(get_production_service)
|
||||
):
|
||||
"""Filter by resource_type (equipment/staff), date, availability"""
|
||||
try:
|
||||
|
||||
filters = {
|
||||
"resource_type": resource_type,
|
||||
"date": date,
|
||||
"availability": availability
|
||||
}
|
||||
|
||||
capacity_list = await production_service.get_capacity_list(tenant_id, filters, page, page_size)
|
||||
|
||||
logger.info("Retrieved production capacity list",
|
||||
tenant_id=str(tenant_id), filters=filters)
|
||||
|
||||
return capacity_list
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error listing production capacity",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise HTTPException(status_code=500, detail="Failed to list production capacity")
|
||||
|
||||
|
||||
@router.get("/tenants/{tenant_id}/production/capacity/{resource_id}/availability", response_model=dict)
|
||||
async def check_resource_availability(
|
||||
tenant_id: UUID = Path(...),
|
||||
resource_id: str = Path(...),
|
||||
start_time: datetime = Query(..., description="Start time for availability check"),
|
||||
end_time: datetime = Query(..., description="End time for availability check"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
production_service: ProductionService = Depends(get_production_service)
|
||||
):
|
||||
"""Check if oven/station is free during a time window"""
|
||||
try:
|
||||
|
||||
availability = await production_service.check_resource_availability(
|
||||
tenant_id, resource_id, start_time, end_time
|
||||
)
|
||||
|
||||
logger.info("Checked resource availability",
|
||||
tenant_id=str(tenant_id), resource_id=resource_id)
|
||||
|
||||
return availability
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error checking resource availability",
|
||||
error=str(e), tenant_id=str(tenant_id), resource_id=resource_id)
|
||||
raise HTTPException(status_code=500, detail="Failed to check resource availability")
|
||||
|
||||
|
||||
@router.post("/tenants/{tenant_id}/production/capacity/reserve", response_model=dict)
|
||||
async def reserve_capacity(
|
||||
reservation_data: dict,
|
||||
tenant_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
production_service: ProductionService = Depends(get_production_service)
|
||||
):
|
||||
"""Reserve equipment/staff for a future batch"""
|
||||
try:
|
||||
|
||||
reservation = await production_service.reserve_capacity(tenant_id, reservation_data)
|
||||
|
||||
logger.info("Reserved production capacity",
|
||||
tenant_id=str(tenant_id))
|
||||
|
||||
return reservation
|
||||
|
||||
except ValueError as e:
|
||||
logger.warning("Invalid reservation data", error=str(e), tenant_id=str(tenant_id))
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
except Exception as e:
|
||||
logger.error("Error reserving capacity",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise HTTPException(status_code=500, detail="Failed to reserve capacity")
|
||||
|
||||
|
||||
@router.put("/tenants/{tenant_id}/production/capacity/{capacity_id}", response_model=dict)
|
||||
async def update_capacity(
|
||||
capacity_update: dict,
|
||||
tenant_id: UUID = Path(...),
|
||||
capacity_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
production_service: ProductionService = Depends(get_production_service)
|
||||
):
|
||||
"""Update maintenance status or efficiency rating"""
|
||||
try:
|
||||
|
||||
updated_capacity = await production_service.update_capacity(tenant_id, capacity_id, capacity_update)
|
||||
|
||||
logger.info("Updated production capacity",
|
||||
tenant_id=str(tenant_id), capacity_id=str(capacity_id))
|
||||
|
||||
return updated_capacity
|
||||
|
||||
except ValueError as e:
|
||||
logger.warning("Invalid capacity update", error=str(e), capacity_id=str(capacity_id))
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
except Exception as e:
|
||||
logger.error("Error updating capacity",
|
||||
error=str(e), tenant_id=str(tenant_id), capacity_id=str(capacity_id))
|
||||
raise HTTPException(status_code=500, detail="Failed to update capacity")
|
||||
|
||||
|
||||
@router.get("/tenants/{tenant_id}/production/capacity/bottlenecks", response_model=dict)
|
||||
async def get_capacity_bottlenecks(
|
||||
tenant_id: UUID = Path(...),
|
||||
days_ahead: int = Query(3, ge=1, le=30, description="Number of days to predict ahead"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
production_service: ProductionService = Depends(get_production_service)
|
||||
):
|
||||
"""AI-powered endpoint: returns predicted bottlenecks for next 3 days"""
|
||||
try:
|
||||
|
||||
bottlenecks = await production_service.predict_capacity_bottlenecks(tenant_id, days_ahead)
|
||||
|
||||
logger.info("Retrieved capacity bottleneck predictions",
|
||||
tenant_id=str(tenant_id), days_ahead=days_ahead)
|
||||
|
||||
return bottlenecks
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting capacity bottlenecks",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise HTTPException(status_code=500, detail="Failed to get capacity bottlenecks")
|
||||
|
||||
|
||||
# ================================================================
|
||||
# QUALITY CHECK ENDPOINTS
|
||||
# ================================================================
|
||||
|
||||
@router.get("/tenants/{tenant_id}/production/quality-checks", response_model=dict)
|
||||
async def list_quality_checks(
|
||||
tenant_id: UUID = Path(...),
|
||||
batch_id: Optional[UUID] = Query(None, description="Filter by batch"),
|
||||
product_id: Optional[UUID] = Query(None, description="Filter by product"),
|
||||
start_date: Optional[date] = Query(None, description="Filter from date"),
|
||||
end_date: Optional[date] = Query(None, description="Filter to date"),
|
||||
pass_fail: Optional[bool] = Query(None, description="Filter by pass/fail"),
|
||||
page: int = Query(1, ge=1, description="Page number"),
|
||||
page_size: int = Query(50, ge=1, le=100, description="Page size"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
production_service: ProductionService = Depends(get_production_service)
|
||||
):
|
||||
"""List checks filtered by batch, product, date, pass/fail"""
|
||||
try:
|
||||
|
||||
filters = {
|
||||
"batch_id": str(batch_id) if batch_id else None,
|
||||
"product_id": str(product_id) if product_id else None,
|
||||
"start_date": start_date,
|
||||
"end_date": end_date,
|
||||
"pass_fail": pass_fail
|
||||
}
|
||||
|
||||
quality_checks = await production_service.get_quality_checks_list(tenant_id, filters, page, page_size)
|
||||
|
||||
logger.info("Retrieved quality checks list",
|
||||
tenant_id=str(tenant_id), filters=filters)
|
||||
|
||||
return quality_checks
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error listing quality checks",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise HTTPException(status_code=500, detail="Failed to list quality checks")
|
||||
|
||||
|
||||
@router.get("/tenants/{tenant_id}/production/batches/{batch_id}/quality-checks", response_model=dict)
|
||||
async def get_batch_quality_checks(
|
||||
tenant_id: UUID = Path(...),
|
||||
batch_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
production_service: ProductionService = Depends(get_production_service)
|
||||
):
|
||||
"""Get all quality checks for a specific batch"""
|
||||
try:
|
||||
|
||||
quality_checks = await production_service.get_batch_quality_checks(tenant_id, batch_id)
|
||||
|
||||
logger.info("Retrieved quality checks for batch",
|
||||
tenant_id=str(tenant_id), batch_id=str(batch_id))
|
||||
|
||||
return quality_checks
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting batch quality checks",
|
||||
error=str(e), tenant_id=str(tenant_id), batch_id=str(batch_id))
|
||||
raise HTTPException(status_code=500, detail="Failed to get batch quality checks")
|
||||
|
||||
|
||||
@router.post("/tenants/{tenant_id}/production/quality-checks", response_model=dict)
|
||||
async def create_quality_check(
|
||||
quality_check_data: dict,
|
||||
tenant_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
production_service: ProductionService = Depends(get_production_service)
|
||||
):
|
||||
"""Submit a new quality inspection result"""
|
||||
try:
|
||||
|
||||
quality_check = await production_service.create_quality_check(tenant_id, quality_check_data)
|
||||
|
||||
logger.info("Created quality check",
|
||||
tenant_id=str(tenant_id))
|
||||
|
||||
return quality_check
|
||||
|
||||
except ValueError as e:
|
||||
logger.warning("Invalid quality check data", error=str(e), tenant_id=str(tenant_id))
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
except Exception as e:
|
||||
logger.error("Error creating quality check",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise HTTPException(status_code=500, detail="Failed to create quality check")
|
||||
|
||||
|
||||
@router.get("/tenants/{tenant_id}/production/quality-checks/trends", response_model=dict)
|
||||
async def get_quality_trends(
|
||||
tenant_id: UUID = Path(...),
|
||||
start_date: Optional[date] = Query(None, description="Start date for trends"),
|
||||
end_date: Optional[date] = Query(None, description="End date for trends"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
production_service: ProductionService = Depends(get_production_service)
|
||||
):
|
||||
"""Returns defect trends, average scores by product/equipment"""
|
||||
try:
|
||||
|
||||
# Default to last 30 days if no dates provided
|
||||
if not start_date:
|
||||
start_date = (datetime.now() - timedelta(days=30)).date()
|
||||
if not end_date:
|
||||
end_date = datetime.now().date()
|
||||
|
||||
trends = await production_service.get_quality_trends(tenant_id, start_date, end_date)
|
||||
|
||||
logger.info("Retrieved quality trends",
|
||||
tenant_id=str(tenant_id), start_date=start_date.isoformat(), end_date=end_date.isoformat())
|
||||
|
||||
return trends
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting quality trends",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise HTTPException(status_code=500, detail="Failed to get quality trends")
|
||||
|
||||
|
||||
@router.get("/tenants/{tenant_id}/production/quality-checks/alerts", response_model=dict)
|
||||
async def get_quality_alerts(
|
||||
tenant_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
production_service: ProductionService = Depends(get_production_service)
|
||||
):
|
||||
"""Active alerts where corrective action is needed"""
|
||||
try:
|
||||
|
||||
alerts = await production_service.get_quality_alerts(tenant_id)
|
||||
|
||||
logger.info("Retrieved quality alerts",
|
||||
tenant_id=str(tenant_id))
|
||||
|
||||
return alerts
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting quality alerts",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise HTTPException(status_code=500, detail="Failed to get quality alerts")
|
||||
|
||||
|
||||
@router.put("/tenants/{tenant_id}/production/quality-checks/{check_id}", response_model=dict)
|
||||
async def update_quality_check(
|
||||
check_update: dict,
|
||||
tenant_id: UUID = Path(...),
|
||||
check_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
production_service: ProductionService = Depends(get_production_service)
|
||||
):
|
||||
"""Add photos, notes, or mark corrective actions as completed"""
|
||||
try:
|
||||
|
||||
updated_check = await production_service.update_quality_check(tenant_id, check_id, check_update)
|
||||
|
||||
logger.info("Updated quality check",
|
||||
tenant_id=str(tenant_id), check_id=str(check_id))
|
||||
|
||||
return updated_check
|
||||
|
||||
except ValueError as e:
|
||||
logger.warning("Invalid quality check update", error=str(e), check_id=str(check_id))
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
except Exception as e:
|
||||
logger.error("Error updating quality check",
|
||||
error=str(e), tenant_id=str(tenant_id), check_id=str(check_id))
|
||||
raise HTTPException(status_code=500, detail="Failed to update quality check")
|
||||
|
||||
|
||||
# ================================================================
|
||||
# ANALYTICS / CROSS-CUTTING ENDPOINTS
|
||||
# ================================================================
|
||||
|
||||
@router.get("/tenants/{tenant_id}/production/analytics/performance", response_model=dict)
|
||||
async def get_performance_analytics(
|
||||
tenant_id: UUID = Path(...),
|
||||
start_date: Optional[date] = Query(None, description="Start date for analytics"),
|
||||
end_date: Optional[date] = Query(None, description="End date for analytics"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
production_service: ProductionService = Depends(get_production_service)
|
||||
):
|
||||
"""Daily performance: completion rate, waste %, labor cost per unit"""
|
||||
try:
|
||||
|
||||
# Default to last 30 days if no dates provided
|
||||
if not start_date:
|
||||
start_date = (datetime.now() - timedelta(days=30)).date()
|
||||
if not end_date:
|
||||
end_date = datetime.now().date()
|
||||
|
||||
performance = await production_service.get_performance_analytics(tenant_id, start_date, end_date)
|
||||
|
||||
logger.info("Retrieved performance analytics",
|
||||
tenant_id=str(tenant_id), start_date=start_date.isoformat(), end_date=end_date.isoformat())
|
||||
|
||||
return performance
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting performance analytics",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise HTTPException(status_code=500, detail="Failed to get performance analytics")
|
||||
|
||||
|
||||
@router.get("/tenants/{tenant_id}/production/analytics/yield-trends", response_model=dict)
|
||||
async def get_yield_trends_analytics(
|
||||
tenant_id: UUID = Path(...),
|
||||
period: str = Query("week", regex="^(week|month)$", description="Time period for trends"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
production_service: ProductionService = Depends(get_production_service)
|
||||
):
|
||||
"""Yield trendline by product over past week/month"""
|
||||
try:
|
||||
|
||||
yield_trends = await production_service.get_yield_trends_analytics(tenant_id, period)
|
||||
|
||||
logger.info("Retrieved yield trends analytics",
|
||||
tenant_id=str(tenant_id), period=period)
|
||||
|
||||
return yield_trends
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting yield trends analytics",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise HTTPException(status_code=500, detail="Failed to get yield trends analytics")
|
||||
|
||||
|
||||
@router.get("/tenants/{tenant_id}/production/analytics/top-defects", response_model=dict)
|
||||
async def get_top_defects_analytics(
|
||||
tenant_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
production_service: ProductionService = Depends(get_production_service)
|
||||
):
|
||||
"""Top 5 defect types across batches"""
|
||||
try:
|
||||
|
||||
top_defects = await production_service.get_top_defects_analytics(tenant_id)
|
||||
|
||||
logger.info("Retrieved top defects analytics",
|
||||
tenant_id=str(tenant_id))
|
||||
|
||||
return top_defects
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting top defects analytics",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise HTTPException(status_code=500, detail="Failed to get top defects analytics")
|
||||
|
||||
|
||||
@router.get("/tenants/{tenant_id}/production/analytics/equipment-efficiency", response_model=dict)
|
||||
async def get_equipment_efficiency_analytics(
|
||||
tenant_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
production_service: ProductionService = Depends(get_production_service)
|
||||
):
|
||||
"""Rank ovens/mixers by uptime, yield, downtime"""
|
||||
try:
|
||||
|
||||
equipment_efficiency = await production_service.get_equipment_efficiency_analytics(tenant_id)
|
||||
|
||||
logger.info("Retrieved equipment efficiency analytics",
|
||||
tenant_id=str(tenant_id))
|
||||
|
||||
return equipment_efficiency
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting equipment efficiency analytics",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise HTTPException(status_code=500, detail="Failed to get equipment efficiency analytics")
|
||||
|
||||
|
||||
@router.post("/tenants/{tenant_id}/production/analytics/generate-report", response_model=dict)
|
||||
async def generate_analytics_report(
|
||||
report_config: dict,
|
||||
tenant_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
production_service: ProductionService = Depends(get_production_service)
|
||||
):
|
||||
"""Generate PDF report (daily summary, compliance audit)"""
|
||||
try:
|
||||
|
||||
report = await production_service.generate_analytics_report(tenant_id, report_config)
|
||||
|
||||
logger.info("Generated analytics report",
|
||||
tenant_id=str(tenant_id))
|
||||
|
||||
return report
|
||||
|
||||
except ValueError as e:
|
||||
logger.warning("Invalid report configuration", error=str(e), tenant_id=str(tenant_id))
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
except Exception as e:
|
||||
logger.error("Error generating analytics report",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise HTTPException(status_code=500, detail="Failed to generate analytics report")
|
||||
|
||||
|
||||
# ================================================================
|
||||
# METRICS AND ANALYTICS ENDPOINTS
|
||||
# ================================================================
|
||||
@@ -366,9 +1081,6 @@ async def get_yield_metrics(
|
||||
):
|
||||
"""Get production yield metrics for analysis"""
|
||||
try:
|
||||
current_tenant = current_user.get("tenant_id")
|
||||
if str(tenant_id) != current_tenant:
|
||||
raise HTTPException(status_code=403, detail="Access denied to this tenant")
|
||||
|
||||
from app.repositories.production_batch_repository import ProductionBatchRepository
|
||||
batch_repo = ProductionBatchRepository(db)
|
||||
|
||||
@@ -369,4 +369,324 @@ class ProductionBatchRepository(ProductionBaseRepository, BatchCountProvider):
|
||||
tenant_id=tenant_id,
|
||||
prefix=prefix
|
||||
)
|
||||
raise
|
||||
raise
|
||||
|
||||
async def get_batches_with_filters(
|
||||
self,
|
||||
tenant_id: str,
|
||||
status: Optional[ProductionStatus] = None,
|
||||
product_id: Optional[UUID] = None,
|
||||
order_id: Optional[UUID] = None,
|
||||
start_date: Optional[date] = None,
|
||||
end_date: Optional[date] = None,
|
||||
page: int = 1,
|
||||
page_size: int = 50
|
||||
) -> tuple[List[ProductionBatch], int]:
|
||||
"""Get production batches with filters and pagination"""
|
||||
try:
|
||||
filters = {"tenant_id": tenant_id}
|
||||
|
||||
if status:
|
||||
filters["status"] = status
|
||||
if product_id:
|
||||
filters["product_id"] = product_id
|
||||
if order_id:
|
||||
filters["order_id"] = order_id
|
||||
if start_date:
|
||||
start_datetime = datetime.combine(start_date, datetime.min.time())
|
||||
filters["planned_start_time__gte"] = start_datetime
|
||||
if end_date:
|
||||
end_datetime = datetime.combine(end_date, datetime.max.time())
|
||||
filters["planned_start_time__lte"] = end_datetime
|
||||
|
||||
# Get total count
|
||||
total_count = await self.count(filters)
|
||||
|
||||
# Get paginated results
|
||||
offset = (page - 1) * page_size
|
||||
batches = await self.get_multi(
|
||||
filters=filters,
|
||||
order_by="planned_start_time",
|
||||
order_desc=True,
|
||||
limit=page_size,
|
||||
offset=offset
|
||||
)
|
||||
|
||||
logger.info("Retrieved batches with filters",
|
||||
count=len(batches),
|
||||
total_count=total_count,
|
||||
page=page,
|
||||
page_size=page_size,
|
||||
tenant_id=tenant_id)
|
||||
|
||||
return batches, total_count
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error fetching batches with filters", error=str(e))
|
||||
raise DatabaseError(f"Failed to fetch batches with filters: {str(e)}")
|
||||
|
||||
async def update_batch(self, batch_id: UUID, update_data: Dict[str, Any]) -> ProductionBatch:
|
||||
"""Update a production batch"""
|
||||
try:
|
||||
batch = await self.get(batch_id)
|
||||
if not batch:
|
||||
raise ValidationError(f"Batch {batch_id} not found")
|
||||
|
||||
# Add updated timestamp
|
||||
update_data["updated_at"] = datetime.utcnow()
|
||||
|
||||
# Update the batch
|
||||
batch = await self.update(batch_id, update_data)
|
||||
|
||||
logger.info("Updated production batch",
|
||||
batch_id=str(batch_id),
|
||||
update_fields=list(update_data.keys()))
|
||||
|
||||
return batch
|
||||
|
||||
except ValidationError:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Error updating production batch", error=str(e))
|
||||
raise DatabaseError(f"Failed to update production batch: {str(e)}")
|
||||
|
||||
async def delete_batch(self, batch_id: UUID) -> bool:
|
||||
"""Delete a production batch"""
|
||||
try:
|
||||
batch = await self.get(batch_id)
|
||||
if not batch:
|
||||
raise ValidationError(f"Batch {batch_id} not found")
|
||||
|
||||
# Check if batch can be deleted (not in progress or completed)
|
||||
if batch.status in [ProductionStatus.IN_PROGRESS, ProductionStatus.COMPLETED]:
|
||||
raise ValidationError(f"Cannot delete batch in {batch.status.value} status")
|
||||
|
||||
success = await self.delete(batch_id)
|
||||
|
||||
logger.info("Deleted production batch",
|
||||
batch_id=str(batch_id),
|
||||
success=success)
|
||||
|
||||
return success
|
||||
|
||||
except ValidationError:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Error deleting production batch", error=str(e))
|
||||
raise DatabaseError(f"Failed to delete production batch: {str(e)}")
|
||||
|
||||
async def start_batch(self, batch_id: UUID) -> ProductionBatch:
|
||||
"""Start a production batch"""
|
||||
try:
|
||||
batch = await self.get(batch_id)
|
||||
if not batch:
|
||||
raise ValidationError(f"Batch {batch_id} not found")
|
||||
|
||||
# Check if batch can be started
|
||||
if batch.status != ProductionStatus.PENDING:
|
||||
raise ValidationError(f"Cannot start batch in {batch.status.value} status")
|
||||
|
||||
# Update status and start time
|
||||
update_data = {
|
||||
"status": ProductionStatus.IN_PROGRESS,
|
||||
"actual_start_time": datetime.utcnow(),
|
||||
"updated_at": datetime.utcnow()
|
||||
}
|
||||
|
||||
batch = await self.update(batch_id, update_data)
|
||||
|
||||
logger.info("Started production batch",
|
||||
batch_id=str(batch_id),
|
||||
actual_start_time=batch.actual_start_time)
|
||||
|
||||
return batch
|
||||
|
||||
except ValidationError:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Error starting production batch", error=str(e))
|
||||
raise DatabaseError(f"Failed to start production batch: {str(e)}")
|
||||
|
||||
async def complete_batch(
|
||||
self,
|
||||
batch_id: UUID,
|
||||
completion_data: Optional[Dict[str, Any]] = None
|
||||
) -> ProductionBatch:
|
||||
"""Complete a production batch"""
|
||||
try:
|
||||
batch = await self.get(batch_id)
|
||||
if not batch:
|
||||
raise ValidationError(f"Batch {batch_id} not found")
|
||||
|
||||
# Check if batch can be completed
|
||||
if batch.status not in [ProductionStatus.IN_PROGRESS, ProductionStatus.QUALITY_CHECK]:
|
||||
raise ValidationError(f"Cannot complete batch in {batch.status.value} status")
|
||||
|
||||
# Prepare completion data
|
||||
update_data = {
|
||||
"status": ProductionStatus.COMPLETED,
|
||||
"actual_end_time": datetime.utcnow(),
|
||||
"completed_at": datetime.utcnow(),
|
||||
"updated_at": datetime.utcnow()
|
||||
}
|
||||
|
||||
# Add optional completion data
|
||||
if completion_data:
|
||||
if "actual_quantity" in completion_data:
|
||||
update_data["actual_quantity"] = completion_data["actual_quantity"]
|
||||
# Calculate yield percentage
|
||||
if batch.planned_quantity > 0:
|
||||
update_data["yield_percentage"] = (
|
||||
completion_data["actual_quantity"] / batch.planned_quantity
|
||||
) * 100
|
||||
|
||||
if "notes" in completion_data:
|
||||
update_data["production_notes"] = completion_data["notes"]
|
||||
|
||||
if "quality_score" in completion_data:
|
||||
update_data["quality_score"] = completion_data["quality_score"]
|
||||
|
||||
# Calculate actual duration if start time exists
|
||||
if batch.actual_start_time:
|
||||
duration = update_data["actual_end_time"] - batch.actual_start_time
|
||||
update_data["actual_duration_minutes"] = int(duration.total_seconds() / 60)
|
||||
|
||||
batch = await self.update(batch_id, update_data)
|
||||
|
||||
logger.info("Completed production batch",
|
||||
batch_id=str(batch_id),
|
||||
actual_quantity=update_data.get("actual_quantity"),
|
||||
yield_percentage=update_data.get("yield_percentage"))
|
||||
|
||||
return batch
|
||||
|
||||
except ValidationError:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Error completing production batch", error=str(e))
|
||||
raise DatabaseError(f"Failed to complete production batch: {str(e)}")
|
||||
|
||||
async def get_batch_statistics(
|
||||
self,
|
||||
tenant_id: str,
|
||||
start_date: Optional[date] = None,
|
||||
end_date: Optional[date] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""Get batch statistics for a tenant"""
|
||||
try:
|
||||
# Use date range or default to last 30 days
|
||||
if not start_date:
|
||||
start_date = (datetime.utcnow() - timedelta(days=30)).date()
|
||||
if not end_date:
|
||||
end_date = datetime.utcnow().date()
|
||||
|
||||
batches = await self.get_batches_by_date_range(tenant_id, start_date, end_date)
|
||||
|
||||
total_batches = len(batches)
|
||||
completed_batches = len([b for b in batches if b.status == ProductionStatus.COMPLETED])
|
||||
failed_batches = len([b for b in batches if b.status == ProductionStatus.FAILED])
|
||||
cancelled_batches = len([b for b in batches if b.status == ProductionStatus.CANCELLED])
|
||||
|
||||
# Calculate rates
|
||||
completion_rate = (completed_batches / total_batches * 100) if total_batches > 0 else 0
|
||||
|
||||
# Calculate average yield
|
||||
completed_with_yield = [b for b in batches if b.yield_percentage is not None]
|
||||
average_yield = (
|
||||
sum(b.yield_percentage for b in completed_with_yield) / len(completed_with_yield)
|
||||
if completed_with_yield else 0
|
||||
)
|
||||
|
||||
# Calculate on-time rate
|
||||
on_time_completed = len([
|
||||
b for b in batches
|
||||
if b.status == ProductionStatus.COMPLETED
|
||||
and b.actual_end_time
|
||||
and b.planned_end_time
|
||||
and b.actual_end_time <= b.planned_end_time
|
||||
])
|
||||
on_time_rate = (on_time_completed / completed_batches * 100) if completed_batches > 0 else 0
|
||||
|
||||
return {
|
||||
"total_batches": total_batches,
|
||||
"completed_batches": completed_batches,
|
||||
"failed_batches": failed_batches,
|
||||
"cancelled_batches": cancelled_batches,
|
||||
"completion_rate": round(completion_rate, 2),
|
||||
"average_yield": round(average_yield, 2),
|
||||
"on_time_rate": round(on_time_rate, 2),
|
||||
"period_start": start_date.isoformat(),
|
||||
"period_end": end_date.isoformat()
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error calculating batch statistics", error=str(e))
|
||||
raise DatabaseError(f"Failed to calculate batch statistics: {str(e)}")
|
||||
|
||||
async def get_batches_filtered(
|
||||
self,
|
||||
filters: Dict[str, Any],
|
||||
page: int,
|
||||
page_size: int
|
||||
) -> List[ProductionBatch]:
|
||||
"""Get batches with filters and pagination"""
|
||||
try:
|
||||
query = select(ProductionBatch)
|
||||
|
||||
# Apply filters
|
||||
if "tenant_id" in filters:
|
||||
query = query.where(ProductionBatch.tenant_id == filters["tenant_id"])
|
||||
if "status" in filters:
|
||||
query = query.where(ProductionBatch.status == filters["status"])
|
||||
if "product_id" in filters:
|
||||
query = query.where(ProductionBatch.product_id == filters["product_id"])
|
||||
if "order_id" in filters:
|
||||
query = query.where(ProductionBatch.order_id == filters["order_id"])
|
||||
if "start_date" in filters:
|
||||
query = query.where(ProductionBatch.planned_start_time >= filters["start_date"])
|
||||
if "end_date" in filters:
|
||||
query = query.where(ProductionBatch.planned_end_time <= filters["end_date"])
|
||||
|
||||
# Apply pagination
|
||||
offset = (page - 1) * page_size
|
||||
query = query.offset(offset).limit(page_size)
|
||||
|
||||
# Order by created_at descending
|
||||
query = query.order_by(desc(ProductionBatch.created_at))
|
||||
|
||||
result = await self.session.execute(query)
|
||||
batches = result.scalars().all()
|
||||
|
||||
return list(batches)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting filtered batches", error=str(e))
|
||||
raise DatabaseError(f"Failed to get filtered batches: {str(e)}")
|
||||
|
||||
async def count_batches_filtered(self, filters: Dict[str, Any]) -> int:
|
||||
"""Count batches with filters"""
|
||||
try:
|
||||
query = select(func.count(ProductionBatch.id))
|
||||
|
||||
# Apply same filters as get_batches_filtered
|
||||
if "tenant_id" in filters:
|
||||
query = query.where(ProductionBatch.tenant_id == filters["tenant_id"])
|
||||
if "status" in filters:
|
||||
query = query.where(ProductionBatch.status == filters["status"])
|
||||
if "product_id" in filters:
|
||||
query = query.where(ProductionBatch.product_id == filters["product_id"])
|
||||
if "order_id" in filters:
|
||||
query = query.where(ProductionBatch.order_id == filters["order_id"])
|
||||
if "start_date" in filters:
|
||||
query = query.where(ProductionBatch.planned_start_time >= filters["start_date"])
|
||||
if "end_date" in filters:
|
||||
query = query.where(ProductionBatch.planned_end_time <= filters["end_date"])
|
||||
|
||||
result = await self.session.execute(query)
|
||||
count = result.scalar()
|
||||
|
||||
return count or 0
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error counting filtered batches", error=str(e))
|
||||
raise DatabaseError(f"Failed to count filtered batches: {str(e)}")
|
||||
@@ -338,4 +338,72 @@ class ProductionCapacityRepository(ProductionBaseRepository):
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Error setting maintenance mode", error=str(e))
|
||||
raise DatabaseError(f"Failed to set maintenance mode: {str(e)}")
|
||||
raise DatabaseError(f"Failed to set maintenance mode: {str(e)}")
|
||||
|
||||
async def get_capacity_with_filters(
|
||||
self,
|
||||
tenant_id: str,
|
||||
resource_type: Optional[str] = None,
|
||||
date_filter: Optional[date] = None,
|
||||
availability: Optional[bool] = None,
|
||||
page: int = 1,
|
||||
page_size: int = 50
|
||||
) -> tuple[List[ProductionCapacity], int]:
|
||||
"""Get production capacity with filters and pagination"""
|
||||
try:
|
||||
filters = {"tenant_id": tenant_id}
|
||||
|
||||
if resource_type:
|
||||
filters["resource_type"] = resource_type
|
||||
if date_filter:
|
||||
filters["date"] = date_filter
|
||||
if availability is not None:
|
||||
filters["is_available"] = availability
|
||||
|
||||
# Get total count
|
||||
total_count = await self.count(filters)
|
||||
|
||||
# Get paginated results
|
||||
offset = (page - 1) * page_size
|
||||
capacities = await self.get_multi(
|
||||
filters=filters,
|
||||
order_by="date",
|
||||
order_desc=True,
|
||||
limit=page_size,
|
||||
offset=offset
|
||||
)
|
||||
|
||||
logger.info("Retrieved capacity with filters",
|
||||
count=len(capacities),
|
||||
total_count=total_count,
|
||||
page=page,
|
||||
page_size=page_size,
|
||||
tenant_id=tenant_id)
|
||||
|
||||
return capacities, total_count
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error fetching capacity with filters", error=str(e))
|
||||
raise DatabaseError(f"Failed to fetch capacity with filters: {str(e)}")
|
||||
|
||||
async def get_capacity_by_date(self, tenant_id: str, target_date: date) -> List[ProductionCapacity]:
|
||||
"""Get all capacity entries for a specific date"""
|
||||
try:
|
||||
capacities = await self.get_multi(
|
||||
filters={
|
||||
"tenant_id": tenant_id,
|
||||
"date": target_date
|
||||
},
|
||||
order_by="start_time"
|
||||
)
|
||||
|
||||
logger.info("Retrieved capacity by date",
|
||||
count=len(capacities),
|
||||
date=target_date.isoformat(),
|
||||
tenant_id=tenant_id)
|
||||
|
||||
return capacities
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error fetching capacity by date", error=str(e))
|
||||
raise DatabaseError(f"Failed to fetch capacity by date: {str(e)}")
|
||||
@@ -276,4 +276,110 @@ class ProductionScheduleRepository(ProductionBaseRepository):
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error calculating schedule performance summary", error=str(e))
|
||||
raise DatabaseError(f"Failed to calculate schedule performance summary: {str(e)}")
|
||||
raise DatabaseError(f"Failed to calculate schedule performance summary: {str(e)}")
|
||||
|
||||
async def get_schedules_with_filters(
|
||||
self,
|
||||
tenant_id: str,
|
||||
start_date: Optional[date] = None,
|
||||
end_date: Optional[date] = None,
|
||||
is_finalized: Optional[bool] = None,
|
||||
page: int = 1,
|
||||
page_size: int = 50
|
||||
) -> tuple[List[ProductionSchedule], int]:
|
||||
"""Get production schedules with filters and pagination"""
|
||||
try:
|
||||
filters = {"tenant_id": tenant_id}
|
||||
|
||||
if start_date:
|
||||
filters["schedule_date__gte"] = start_date
|
||||
if end_date:
|
||||
filters["schedule_date__lte"] = end_date
|
||||
if is_finalized is not None:
|
||||
filters["is_finalized"] = is_finalized
|
||||
|
||||
# Get total count
|
||||
total_count = await self.count(filters)
|
||||
|
||||
# Get paginated results
|
||||
offset = (page - 1) * page_size
|
||||
schedules = await self.get_multi(
|
||||
filters=filters,
|
||||
order_by="schedule_date",
|
||||
order_desc=True,
|
||||
limit=page_size,
|
||||
offset=offset
|
||||
)
|
||||
|
||||
logger.info("Retrieved schedules with filters",
|
||||
count=len(schedules),
|
||||
total_count=total_count,
|
||||
page=page,
|
||||
page_size=page_size,
|
||||
tenant_id=tenant_id)
|
||||
|
||||
return schedules, total_count
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error fetching schedules with filters", error=str(e))
|
||||
raise DatabaseError(f"Failed to fetch schedules with filters: {str(e)}")
|
||||
|
||||
async def update_schedule(self, schedule_id: UUID, update_data: Dict[str, Any]) -> ProductionSchedule:
|
||||
"""Update a production schedule"""
|
||||
try:
|
||||
schedule = await self.get(schedule_id)
|
||||
if not schedule:
|
||||
raise ValidationError(f"Schedule {schedule_id} not found")
|
||||
|
||||
# Add updated timestamp
|
||||
update_data["updated_at"] = datetime.utcnow()
|
||||
|
||||
# Update the schedule
|
||||
schedule = await self.update(schedule_id, update_data)
|
||||
|
||||
logger.info("Updated production schedule",
|
||||
schedule_id=str(schedule_id),
|
||||
update_fields=list(update_data.keys()))
|
||||
|
||||
return schedule
|
||||
|
||||
except ValidationError:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Error updating production schedule", error=str(e))
|
||||
raise DatabaseError(f"Failed to update production schedule: {str(e)}")
|
||||
|
||||
async def delete_schedule(self, schedule_id: UUID) -> bool:
|
||||
"""Delete a production schedule"""
|
||||
try:
|
||||
schedule = await self.get(schedule_id)
|
||||
if not schedule:
|
||||
raise ValidationError(f"Schedule {schedule_id} not found")
|
||||
|
||||
# Check if schedule can be deleted (not finalized)
|
||||
if schedule.is_finalized:
|
||||
raise ValidationError("Cannot delete finalized schedule")
|
||||
|
||||
success = await self.delete(schedule_id)
|
||||
|
||||
logger.info("Deleted production schedule",
|
||||
schedule_id=str(schedule_id),
|
||||
success=success)
|
||||
|
||||
return success
|
||||
|
||||
except ValidationError:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Error deleting production schedule", error=str(e))
|
||||
raise DatabaseError(f"Failed to delete production schedule: {str(e)}")
|
||||
|
||||
async def get_todays_schedule(self, tenant_id: str) -> Optional[ProductionSchedule]:
|
||||
"""Get today's production schedule for a tenant"""
|
||||
try:
|
||||
today = datetime.utcnow().date()
|
||||
return await self.get_schedule_by_date(tenant_id, today)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error fetching today's schedule", error=str(e))
|
||||
raise DatabaseError(f"Failed to fetch today's schedule: {str(e)}")
|
||||
@@ -316,4 +316,60 @@ class QualityCheckRepository(ProductionBaseRepository):
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error calculating quality trends", error=str(e))
|
||||
raise DatabaseError(f"Failed to calculate quality trends: {str(e)}")
|
||||
raise DatabaseError(f"Failed to calculate quality trends: {str(e)}")
|
||||
|
||||
async def get_quality_checks_with_filters(
|
||||
self,
|
||||
tenant_id: str,
|
||||
batch_id: Optional[UUID] = None,
|
||||
product_id: Optional[UUID] = None,
|
||||
start_date: Optional[date] = None,
|
||||
end_date: Optional[date] = None,
|
||||
pass_fail: Optional[bool] = None,
|
||||
page: int = 1,
|
||||
page_size: int = 50
|
||||
) -> tuple[List[QualityCheck], int]:
|
||||
"""Get quality checks with filters and pagination"""
|
||||
try:
|
||||
filters = {"tenant_id": tenant_id}
|
||||
|
||||
if batch_id:
|
||||
filters["batch_id"] = batch_id
|
||||
if product_id:
|
||||
# Note: This would require a join with production batches to filter by product_id
|
||||
# For now, we'll skip this filter or implement it differently
|
||||
pass
|
||||
if start_date:
|
||||
start_datetime = datetime.combine(start_date, datetime.min.time())
|
||||
filters["check_time__gte"] = start_datetime
|
||||
if end_date:
|
||||
end_datetime = datetime.combine(end_date, datetime.max.time())
|
||||
filters["check_time__lte"] = end_datetime
|
||||
if pass_fail is not None:
|
||||
filters["pass_fail"] = pass_fail
|
||||
|
||||
# Get total count
|
||||
total_count = await self.count(filters)
|
||||
|
||||
# Get paginated results
|
||||
offset = (page - 1) * page_size
|
||||
checks = await self.get_multi(
|
||||
filters=filters,
|
||||
order_by="check_time",
|
||||
order_desc=True,
|
||||
limit=page_size,
|
||||
offset=offset
|
||||
)
|
||||
|
||||
logger.info("Retrieved quality checks with filters",
|
||||
count=len(checks),
|
||||
total_count=total_count,
|
||||
page=page,
|
||||
page_size=page_size,
|
||||
tenant_id=tenant_id)
|
||||
|
||||
return checks, total_count
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error fetching quality checks with filters", error=str(e))
|
||||
raise DatabaseError(f"Failed to fetch quality checks with filters: {str(e)}")
|
||||
@@ -18,9 +18,10 @@ from app.repositories.production_batch_repository import ProductionBatchReposito
|
||||
from app.repositories.production_schedule_repository import ProductionScheduleRepository
|
||||
from app.repositories.production_capacity_repository import ProductionCapacityRepository
|
||||
from app.repositories.quality_check_repository import QualityCheckRepository
|
||||
from app.models.production import ProductionBatch, ProductionStatus, ProductionPriority
|
||||
from app.models.production import ProductionBatch, ProductionSchedule, ProductionStatus, ProductionPriority
|
||||
from app.schemas.production import (
|
||||
ProductionBatchCreate, ProductionBatchUpdate, ProductionBatchStatusUpdate,
|
||||
ProductionScheduleCreate, ProductionScheduleUpdate, ProductionScheduleResponse,
|
||||
DailyProductionRequirements, ProductionDashboardSummary, ProductionMetrics
|
||||
)
|
||||
|
||||
@@ -124,10 +125,46 @@ class ProductionService:
|
||||
return batch
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error creating production batch",
|
||||
logger.error("Error creating production batch",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
|
||||
async def get_production_batches_list(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
filters: Dict[str, Any],
|
||||
page: int,
|
||||
page_size: int
|
||||
) -> Dict[str, Any]:
|
||||
"""Get list of production batches with filtering and pagination"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
batch_repo = ProductionBatchRepository(session)
|
||||
|
||||
# Apply filters
|
||||
filter_dict = {k: v for k, v in filters.items() if v is not None}
|
||||
filter_dict["tenant_id"] = str(tenant_id)
|
||||
|
||||
# Get batches with pagination
|
||||
batches = await batch_repo.get_batches_filtered(filter_dict, page, page_size)
|
||||
total_count = await batch_repo.count_batches_filtered(filter_dict)
|
||||
|
||||
# Convert to response format
|
||||
from app.schemas.production import ProductionBatchResponse, ProductionBatchListResponse
|
||||
batch_responses = [ProductionBatchResponse.model_validate(batch) for batch in batches]
|
||||
|
||||
return ProductionBatchListResponse(
|
||||
batches=batch_responses,
|
||||
total_count=total_count,
|
||||
page=page,
|
||||
page_size=page_size
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting production batches list",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def update_batch_status(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
@@ -394,4 +431,554 @@ class ProductionService:
|
||||
except Exception as e:
|
||||
logger.error("Error updating inventory on batch completion",
|
||||
error=str(e), batch_id=str(batch.id))
|
||||
# Don't raise - inventory update failure shouldn't prevent batch completion
|
||||
# Don't raise - inventory update failure shouldn't prevent batch completion
|
||||
|
||||
# Additional Batch Methods
|
||||
async def update_production_batch(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
batch_id: UUID,
|
||||
batch_update: ProductionBatchUpdate
|
||||
) -> ProductionBatch:
|
||||
"""Update production batch"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
batch_repo = ProductionBatchRepository(session)
|
||||
|
||||
batch = await batch_repo.update_batch(batch_id, batch_update.model_dump(exclude_none=True))
|
||||
|
||||
logger.info("Updated production batch",
|
||||
batch_id=str(batch_id), tenant_id=str(tenant_id))
|
||||
|
||||
return batch
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error updating production batch",
|
||||
error=str(e), batch_id=str(batch_id), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def delete_production_batch(self, tenant_id: UUID, batch_id: UUID):
|
||||
"""Delete/cancel production batch"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
batch_repo = ProductionBatchRepository(session)
|
||||
|
||||
# Check if batch can be deleted
|
||||
batch = await batch_repo.get(batch_id)
|
||||
if batch.status in [ProductionStatus.IN_PROGRESS, ProductionStatus.COMPLETED]:
|
||||
raise ValueError("Cannot delete batch that is in progress or completed")
|
||||
|
||||
await batch_repo.delete_batch(batch_id)
|
||||
|
||||
logger.info("Deleted production batch",
|
||||
batch_id=str(batch_id), tenant_id=str(tenant_id))
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error deleting production batch",
|
||||
error=str(e), batch_id=str(batch_id), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def start_production_batch(self, tenant_id: UUID, batch_id: UUID) -> ProductionBatch:
|
||||
"""Start production batch"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
batch_repo = ProductionBatchRepository(session)
|
||||
|
||||
batch = await batch_repo.start_batch(batch_id)
|
||||
|
||||
logger.info("Started production batch",
|
||||
batch_id=str(batch_id), tenant_id=str(tenant_id))
|
||||
|
||||
return batch
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error starting production batch",
|
||||
error=str(e), batch_id=str(batch_id), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def complete_production_batch(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
batch_id: UUID,
|
||||
completion_data: Optional[Dict[str, Any]] = None
|
||||
) -> ProductionBatch:
|
||||
"""Complete production batch"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
batch_repo = ProductionBatchRepository(session)
|
||||
|
||||
batch = await batch_repo.complete_batch(batch_id, completion_data or {})
|
||||
|
||||
# Update inventory if actual quantity is available
|
||||
if batch.actual_quantity:
|
||||
await self._update_inventory_on_completion(tenant_id, batch, batch.actual_quantity)
|
||||
|
||||
logger.info("Completed production batch",
|
||||
batch_id=str(batch_id), tenant_id=str(tenant_id))
|
||||
|
||||
return batch
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error completing production batch",
|
||||
error=str(e), batch_id=str(batch_id), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_batch_statistics(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
start_date: date,
|
||||
end_date: date
|
||||
) -> Dict[str, Any]:
|
||||
"""Get batch statistics"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
batch_repo = ProductionBatchRepository(session)
|
||||
|
||||
stats = await batch_repo.get_batch_statistics(str(tenant_id), start_date, end_date)
|
||||
|
||||
return stats
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting batch statistics",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
# Production Schedule Methods
|
||||
async def create_production_schedule(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
schedule_data: ProductionScheduleCreate
|
||||
) -> ProductionSchedule:
|
||||
"""Create production schedule"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
schedule_repo = ProductionScheduleRepository(session)
|
||||
|
||||
schedule_dict = schedule_data.model_dump()
|
||||
schedule_dict["tenant_id"] = tenant_id
|
||||
|
||||
schedule = await schedule_repo.create_schedule(schedule_dict)
|
||||
|
||||
logger.info("Created production schedule",
|
||||
schedule_id=str(schedule.id), tenant_id=str(tenant_id))
|
||||
|
||||
return schedule
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error creating production schedule",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def update_production_schedule(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
schedule_id: UUID,
|
||||
schedule_update: ProductionScheduleUpdate
|
||||
) -> ProductionSchedule:
|
||||
"""Update production schedule"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
schedule_repo = ProductionScheduleRepository(session)
|
||||
|
||||
schedule = await schedule_repo.update_schedule(
|
||||
schedule_id,
|
||||
schedule_update.model_dump(exclude_none=True)
|
||||
)
|
||||
|
||||
logger.info("Updated production schedule",
|
||||
schedule_id=str(schedule_id), tenant_id=str(tenant_id))
|
||||
|
||||
return schedule
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error updating production schedule",
|
||||
error=str(e), schedule_id=str(schedule_id), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def finalize_production_schedule(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
schedule_id: UUID
|
||||
) -> ProductionSchedule:
|
||||
"""Finalize production schedule"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
schedule_repo = ProductionScheduleRepository(session)
|
||||
|
||||
schedule = await schedule_repo.finalize_schedule(schedule_id)
|
||||
|
||||
logger.info("Finalized production schedule",
|
||||
schedule_id=str(schedule_id), tenant_id=str(tenant_id))
|
||||
|
||||
return schedule
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error finalizing production schedule",
|
||||
error=str(e), schedule_id=str(schedule_id), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def optimize_schedule(self, tenant_id: UUID, target_date: date) -> Dict[str, Any]:
|
||||
"""Optimize schedule using AI"""
|
||||
try:
|
||||
# Mock AI optimization for now
|
||||
return {
|
||||
"optimized": True,
|
||||
"suggestions": [
|
||||
{
|
||||
"type": "reschedule",
|
||||
"message": "Move croissant production to 6 AM to avoid oven congestion",
|
||||
"impact": "Reduces wait time by 30 minutes"
|
||||
}
|
||||
],
|
||||
"predicted_efficiency": 92.5
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error optimizing schedule",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_capacity_usage_report(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
start_date: date,
|
||||
end_date: date
|
||||
) -> Dict[str, Any]:
|
||||
"""Get capacity usage report"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
capacity_repo = ProductionCapacityRepository(session)
|
||||
|
||||
usage_data = await capacity_repo.get_capacity_usage_report(
|
||||
str(tenant_id), start_date, end_date
|
||||
)
|
||||
|
||||
return usage_data
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting capacity usage report",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
# Capacity Methods
|
||||
async def get_capacity_list(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
filters: Dict[str, Any],
|
||||
page: int,
|
||||
page_size: int
|
||||
) -> Dict[str, Any]:
|
||||
"""Get capacity list with filters"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
capacity_repo = ProductionCapacityRepository(session)
|
||||
|
||||
capacity_list = await capacity_repo.get_capacity_list(
|
||||
str(tenant_id), filters, page, page_size
|
||||
)
|
||||
|
||||
return capacity_list
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting capacity list",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def check_resource_availability(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
resource_id: str,
|
||||
start_time: datetime,
|
||||
end_time: datetime
|
||||
) -> Dict[str, Any]:
|
||||
"""Check resource availability"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
capacity_repo = ProductionCapacityRepository(session)
|
||||
|
||||
availability = await capacity_repo.check_resource_availability(
|
||||
str(tenant_id), resource_id, start_time, end_time
|
||||
)
|
||||
|
||||
return availability
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error checking resource availability",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def reserve_capacity(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
reservation_data: Dict[str, Any]
|
||||
) -> Dict[str, Any]:
|
||||
"""Reserve capacity"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
capacity_repo = ProductionCapacityRepository(session)
|
||||
|
||||
reservation_data["tenant_id"] = str(tenant_id)
|
||||
reservation = await capacity_repo.reserve_capacity(reservation_data)
|
||||
|
||||
return reservation
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error reserving capacity",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def update_capacity(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
capacity_id: UUID,
|
||||
update_data: Dict[str, Any]
|
||||
) -> Dict[str, Any]:
|
||||
"""Update capacity"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
capacity_repo = ProductionCapacityRepository(session)
|
||||
|
||||
capacity = await capacity_repo.update_capacity(capacity_id, update_data)
|
||||
|
||||
return capacity
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error updating capacity",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def predict_capacity_bottlenecks(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
days_ahead: int
|
||||
) -> Dict[str, Any]:
|
||||
"""Predict capacity bottlenecks"""
|
||||
try:
|
||||
# Mock AI prediction for now
|
||||
return {
|
||||
"bottlenecks": [
|
||||
{
|
||||
"date": (date.today() + timedelta(days=1)).isoformat(),
|
||||
"time_slot": "06:00-07:00",
|
||||
"resource_name": "Oven #3",
|
||||
"predicted_utilization": 95.0,
|
||||
"severity": "high",
|
||||
"suggestion": "Consider scheduling lighter load items during this period"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error predicting capacity bottlenecks",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
# Quality Methods
|
||||
async def get_quality_checks_list(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
filters: Dict[str, Any],
|
||||
page: int,
|
||||
page_size: int
|
||||
) -> Dict[str, Any]:
|
||||
"""Get quality checks list"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
quality_repo = QualityCheckRepository(session)
|
||||
|
||||
quality_checks = await quality_repo.get_quality_checks_list(
|
||||
str(tenant_id), filters, page, page_size
|
||||
)
|
||||
|
||||
return quality_checks
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting quality checks list",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_batch_quality_checks(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
batch_id: UUID
|
||||
) -> Dict[str, Any]:
|
||||
"""Get quality checks for a specific batch"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
quality_repo = QualityCheckRepository(session)
|
||||
|
||||
checks = await quality_repo.get_checks_by_batch(str(tenant_id), str(batch_id))
|
||||
|
||||
return {"quality_checks": [check.to_dict() for check in checks]}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting batch quality checks",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def create_quality_check(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
quality_check_data: Dict[str, Any]
|
||||
) -> Dict[str, Any]:
|
||||
"""Create quality check"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
quality_repo = QualityCheckRepository(session)
|
||||
|
||||
quality_check_data["tenant_id"] = str(tenant_id)
|
||||
check = await quality_repo.create_quality_check(quality_check_data)
|
||||
|
||||
return check.to_dict()
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error creating quality check",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def update_quality_check(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
check_id: UUID,
|
||||
update_data: Dict[str, Any]
|
||||
) -> Dict[str, Any]:
|
||||
"""Update quality check"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
quality_repo = QualityCheckRepository(session)
|
||||
|
||||
check = await quality_repo.update_quality_check(check_id, update_data)
|
||||
|
||||
return check.to_dict()
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error updating quality check",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_quality_trends(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
start_date: date,
|
||||
end_date: date
|
||||
) -> Dict[str, Any]:
|
||||
"""Get quality trends"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
quality_repo = QualityCheckRepository(session)
|
||||
|
||||
trends = await quality_repo.get_quality_trends(str(tenant_id), start_date, end_date)
|
||||
|
||||
return trends
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting quality trends",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_quality_alerts(self, tenant_id: UUID) -> Dict[str, Any]:
|
||||
"""Get quality alerts"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
quality_repo = QualityCheckRepository(session)
|
||||
|
||||
alerts = await quality_repo.get_quality_alerts(str(tenant_id))
|
||||
|
||||
return alerts
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting quality alerts",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
# Analytics Methods
|
||||
async def get_performance_analytics(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
start_date: date,
|
||||
end_date: date
|
||||
) -> Dict[str, Any]:
|
||||
"""Get performance analytics"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
batch_repo = ProductionBatchRepository(session)
|
||||
|
||||
analytics = await batch_repo.get_performance_analytics(
|
||||
str(tenant_id), start_date, end_date
|
||||
)
|
||||
|
||||
return analytics
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting performance analytics",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_yield_trends_analytics(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
period: str
|
||||
) -> Dict[str, Any]:
|
||||
"""Get yield trends analytics"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
batch_repo = ProductionBatchRepository(session)
|
||||
|
||||
trends = await batch_repo.get_yield_trends(str(tenant_id), period)
|
||||
|
||||
return trends
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting yield trends analytics",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_top_defects_analytics(self, tenant_id: UUID) -> Dict[str, Any]:
|
||||
"""Get top defects analytics"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
quality_repo = QualityCheckRepository(session)
|
||||
|
||||
defects = await quality_repo.get_top_defects(str(tenant_id))
|
||||
|
||||
return defects
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting top defects analytics",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_equipment_efficiency_analytics(self, tenant_id: UUID) -> Dict[str, Any]:
|
||||
"""Get equipment efficiency analytics"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
capacity_repo = ProductionCapacityRepository(session)
|
||||
|
||||
efficiency = await capacity_repo.get_equipment_efficiency(str(tenant_id))
|
||||
|
||||
return efficiency
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting equipment efficiency analytics",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def generate_analytics_report(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
report_config: Dict[str, Any]
|
||||
) -> Dict[str, Any]:
|
||||
"""Generate analytics report"""
|
||||
try:
|
||||
# Mock report generation for now
|
||||
return {
|
||||
"report_id": f"report_{tenant_id}_{date.today().isoformat()}",
|
||||
"generated_at": datetime.now().isoformat(),
|
||||
"config": report_config,
|
||||
"download_url": f"/reports/production_{tenant_id}_{date.today().isoformat()}.pdf"
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error generating analytics report",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
@@ -514,6 +514,26 @@ async def get_user_tenants_enhanced(
|
||||
detail="Failed to get user tenants"
|
||||
)
|
||||
|
||||
@router.get("/tenants/members/user/{user_id}")
|
||||
@track_endpoint_metrics("tenant_get_user_memberships")
|
||||
async def get_user_memberships(
|
||||
user_id: str = Path(..., description="User ID"),
|
||||
tenant_service: EnhancedTenantService = Depends(get_enhanced_tenant_service)
|
||||
):
|
||||
"""Get all tenant memberships for a user (for authentication service)"""
|
||||
|
||||
try:
|
||||
memberships = await tenant_service.get_user_memberships(user_id)
|
||||
logger.info("Retrieved user memberships", user_id=user_id, membership_count=len(memberships))
|
||||
return memberships
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Get user memberships failed", user_id=user_id, error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to get user memberships"
|
||||
)
|
||||
|
||||
@router.get("/tenants/statistics", dependencies=[Depends(require_admin_role_dep)])
|
||||
@track_endpoint_metrics("tenant_get_statistics")
|
||||
async def get_tenant_statistics_enhanced(
|
||||
@@ -521,11 +541,11 @@ async def get_tenant_statistics_enhanced(
|
||||
tenant_service: EnhancedTenantService = Depends(get_enhanced_tenant_service)
|
||||
):
|
||||
"""Get comprehensive tenant statistics (admin only) with enhanced analytics"""
|
||||
|
||||
|
||||
try:
|
||||
stats = await tenant_service.get_tenant_statistics()
|
||||
return stats
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Get tenant statistics failed", error=str(e))
|
||||
raise HTTPException(
|
||||
|
||||
Reference in New Issue
Block a user