Add improved production UI
This commit is contained in:
@@ -18,9 +18,10 @@ from app.repositories.production_batch_repository import ProductionBatchReposito
|
||||
from app.repositories.production_schedule_repository import ProductionScheduleRepository
|
||||
from app.repositories.production_capacity_repository import ProductionCapacityRepository
|
||||
from app.repositories.quality_check_repository import QualityCheckRepository
|
||||
from app.models.production import ProductionBatch, ProductionStatus, ProductionPriority
|
||||
from app.models.production import ProductionBatch, ProductionSchedule, ProductionStatus, ProductionPriority
|
||||
from app.schemas.production import (
|
||||
ProductionBatchCreate, ProductionBatchUpdate, ProductionBatchStatusUpdate,
|
||||
ProductionScheduleCreate, ProductionScheduleUpdate, ProductionScheduleResponse,
|
||||
DailyProductionRequirements, ProductionDashboardSummary, ProductionMetrics
|
||||
)
|
||||
|
||||
@@ -124,10 +125,46 @@ class ProductionService:
|
||||
return batch
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error creating production batch",
|
||||
logger.error("Error creating production batch",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
|
||||
async def get_production_batches_list(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
filters: Dict[str, Any],
|
||||
page: int,
|
||||
page_size: int
|
||||
) -> Dict[str, Any]:
|
||||
"""Get list of production batches with filtering and pagination"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
batch_repo = ProductionBatchRepository(session)
|
||||
|
||||
# Apply filters
|
||||
filter_dict = {k: v for k, v in filters.items() if v is not None}
|
||||
filter_dict["tenant_id"] = str(tenant_id)
|
||||
|
||||
# Get batches with pagination
|
||||
batches = await batch_repo.get_batches_filtered(filter_dict, page, page_size)
|
||||
total_count = await batch_repo.count_batches_filtered(filter_dict)
|
||||
|
||||
# Convert to response format
|
||||
from app.schemas.production import ProductionBatchResponse, ProductionBatchListResponse
|
||||
batch_responses = [ProductionBatchResponse.model_validate(batch) for batch in batches]
|
||||
|
||||
return ProductionBatchListResponse(
|
||||
batches=batch_responses,
|
||||
total_count=total_count,
|
||||
page=page,
|
||||
page_size=page_size
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting production batches list",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def update_batch_status(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
@@ -394,4 +431,554 @@ class ProductionService:
|
||||
except Exception as e:
|
||||
logger.error("Error updating inventory on batch completion",
|
||||
error=str(e), batch_id=str(batch.id))
|
||||
# Don't raise - inventory update failure shouldn't prevent batch completion
|
||||
# Don't raise - inventory update failure shouldn't prevent batch completion
|
||||
|
||||
# Additional Batch Methods
|
||||
async def update_production_batch(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
batch_id: UUID,
|
||||
batch_update: ProductionBatchUpdate
|
||||
) -> ProductionBatch:
|
||||
"""Update production batch"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
batch_repo = ProductionBatchRepository(session)
|
||||
|
||||
batch = await batch_repo.update_batch(batch_id, batch_update.model_dump(exclude_none=True))
|
||||
|
||||
logger.info("Updated production batch",
|
||||
batch_id=str(batch_id), tenant_id=str(tenant_id))
|
||||
|
||||
return batch
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error updating production batch",
|
||||
error=str(e), batch_id=str(batch_id), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def delete_production_batch(self, tenant_id: UUID, batch_id: UUID):
|
||||
"""Delete/cancel production batch"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
batch_repo = ProductionBatchRepository(session)
|
||||
|
||||
# Check if batch can be deleted
|
||||
batch = await batch_repo.get(batch_id)
|
||||
if batch.status in [ProductionStatus.IN_PROGRESS, ProductionStatus.COMPLETED]:
|
||||
raise ValueError("Cannot delete batch that is in progress or completed")
|
||||
|
||||
await batch_repo.delete_batch(batch_id)
|
||||
|
||||
logger.info("Deleted production batch",
|
||||
batch_id=str(batch_id), tenant_id=str(tenant_id))
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error deleting production batch",
|
||||
error=str(e), batch_id=str(batch_id), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def start_production_batch(self, tenant_id: UUID, batch_id: UUID) -> ProductionBatch:
|
||||
"""Start production batch"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
batch_repo = ProductionBatchRepository(session)
|
||||
|
||||
batch = await batch_repo.start_batch(batch_id)
|
||||
|
||||
logger.info("Started production batch",
|
||||
batch_id=str(batch_id), tenant_id=str(tenant_id))
|
||||
|
||||
return batch
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error starting production batch",
|
||||
error=str(e), batch_id=str(batch_id), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def complete_production_batch(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
batch_id: UUID,
|
||||
completion_data: Optional[Dict[str, Any]] = None
|
||||
) -> ProductionBatch:
|
||||
"""Complete production batch"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
batch_repo = ProductionBatchRepository(session)
|
||||
|
||||
batch = await batch_repo.complete_batch(batch_id, completion_data or {})
|
||||
|
||||
# Update inventory if actual quantity is available
|
||||
if batch.actual_quantity:
|
||||
await self._update_inventory_on_completion(tenant_id, batch, batch.actual_quantity)
|
||||
|
||||
logger.info("Completed production batch",
|
||||
batch_id=str(batch_id), tenant_id=str(tenant_id))
|
||||
|
||||
return batch
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error completing production batch",
|
||||
error=str(e), batch_id=str(batch_id), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_batch_statistics(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
start_date: date,
|
||||
end_date: date
|
||||
) -> Dict[str, Any]:
|
||||
"""Get batch statistics"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
batch_repo = ProductionBatchRepository(session)
|
||||
|
||||
stats = await batch_repo.get_batch_statistics(str(tenant_id), start_date, end_date)
|
||||
|
||||
return stats
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting batch statistics",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
# Production Schedule Methods
|
||||
async def create_production_schedule(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
schedule_data: ProductionScheduleCreate
|
||||
) -> ProductionSchedule:
|
||||
"""Create production schedule"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
schedule_repo = ProductionScheduleRepository(session)
|
||||
|
||||
schedule_dict = schedule_data.model_dump()
|
||||
schedule_dict["tenant_id"] = tenant_id
|
||||
|
||||
schedule = await schedule_repo.create_schedule(schedule_dict)
|
||||
|
||||
logger.info("Created production schedule",
|
||||
schedule_id=str(schedule.id), tenant_id=str(tenant_id))
|
||||
|
||||
return schedule
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error creating production schedule",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def update_production_schedule(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
schedule_id: UUID,
|
||||
schedule_update: ProductionScheduleUpdate
|
||||
) -> ProductionSchedule:
|
||||
"""Update production schedule"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
schedule_repo = ProductionScheduleRepository(session)
|
||||
|
||||
schedule = await schedule_repo.update_schedule(
|
||||
schedule_id,
|
||||
schedule_update.model_dump(exclude_none=True)
|
||||
)
|
||||
|
||||
logger.info("Updated production schedule",
|
||||
schedule_id=str(schedule_id), tenant_id=str(tenant_id))
|
||||
|
||||
return schedule
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error updating production schedule",
|
||||
error=str(e), schedule_id=str(schedule_id), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def finalize_production_schedule(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
schedule_id: UUID
|
||||
) -> ProductionSchedule:
|
||||
"""Finalize production schedule"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
schedule_repo = ProductionScheduleRepository(session)
|
||||
|
||||
schedule = await schedule_repo.finalize_schedule(schedule_id)
|
||||
|
||||
logger.info("Finalized production schedule",
|
||||
schedule_id=str(schedule_id), tenant_id=str(tenant_id))
|
||||
|
||||
return schedule
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error finalizing production schedule",
|
||||
error=str(e), schedule_id=str(schedule_id), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def optimize_schedule(self, tenant_id: UUID, target_date: date) -> Dict[str, Any]:
|
||||
"""Optimize schedule using AI"""
|
||||
try:
|
||||
# Mock AI optimization for now
|
||||
return {
|
||||
"optimized": True,
|
||||
"suggestions": [
|
||||
{
|
||||
"type": "reschedule",
|
||||
"message": "Move croissant production to 6 AM to avoid oven congestion",
|
||||
"impact": "Reduces wait time by 30 minutes"
|
||||
}
|
||||
],
|
||||
"predicted_efficiency": 92.5
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error optimizing schedule",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_capacity_usage_report(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
start_date: date,
|
||||
end_date: date
|
||||
) -> Dict[str, Any]:
|
||||
"""Get capacity usage report"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
capacity_repo = ProductionCapacityRepository(session)
|
||||
|
||||
usage_data = await capacity_repo.get_capacity_usage_report(
|
||||
str(tenant_id), start_date, end_date
|
||||
)
|
||||
|
||||
return usage_data
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting capacity usage report",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
# Capacity Methods
|
||||
async def get_capacity_list(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
filters: Dict[str, Any],
|
||||
page: int,
|
||||
page_size: int
|
||||
) -> Dict[str, Any]:
|
||||
"""Get capacity list with filters"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
capacity_repo = ProductionCapacityRepository(session)
|
||||
|
||||
capacity_list = await capacity_repo.get_capacity_list(
|
||||
str(tenant_id), filters, page, page_size
|
||||
)
|
||||
|
||||
return capacity_list
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting capacity list",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def check_resource_availability(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
resource_id: str,
|
||||
start_time: datetime,
|
||||
end_time: datetime
|
||||
) -> Dict[str, Any]:
|
||||
"""Check resource availability"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
capacity_repo = ProductionCapacityRepository(session)
|
||||
|
||||
availability = await capacity_repo.check_resource_availability(
|
||||
str(tenant_id), resource_id, start_time, end_time
|
||||
)
|
||||
|
||||
return availability
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error checking resource availability",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def reserve_capacity(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
reservation_data: Dict[str, Any]
|
||||
) -> Dict[str, Any]:
|
||||
"""Reserve capacity"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
capacity_repo = ProductionCapacityRepository(session)
|
||||
|
||||
reservation_data["tenant_id"] = str(tenant_id)
|
||||
reservation = await capacity_repo.reserve_capacity(reservation_data)
|
||||
|
||||
return reservation
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error reserving capacity",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def update_capacity(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
capacity_id: UUID,
|
||||
update_data: Dict[str, Any]
|
||||
) -> Dict[str, Any]:
|
||||
"""Update capacity"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
capacity_repo = ProductionCapacityRepository(session)
|
||||
|
||||
capacity = await capacity_repo.update_capacity(capacity_id, update_data)
|
||||
|
||||
return capacity
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error updating capacity",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def predict_capacity_bottlenecks(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
days_ahead: int
|
||||
) -> Dict[str, Any]:
|
||||
"""Predict capacity bottlenecks"""
|
||||
try:
|
||||
# Mock AI prediction for now
|
||||
return {
|
||||
"bottlenecks": [
|
||||
{
|
||||
"date": (date.today() + timedelta(days=1)).isoformat(),
|
||||
"time_slot": "06:00-07:00",
|
||||
"resource_name": "Oven #3",
|
||||
"predicted_utilization": 95.0,
|
||||
"severity": "high",
|
||||
"suggestion": "Consider scheduling lighter load items during this period"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error predicting capacity bottlenecks",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
# Quality Methods
|
||||
async def get_quality_checks_list(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
filters: Dict[str, Any],
|
||||
page: int,
|
||||
page_size: int
|
||||
) -> Dict[str, Any]:
|
||||
"""Get quality checks list"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
quality_repo = QualityCheckRepository(session)
|
||||
|
||||
quality_checks = await quality_repo.get_quality_checks_list(
|
||||
str(tenant_id), filters, page, page_size
|
||||
)
|
||||
|
||||
return quality_checks
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting quality checks list",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_batch_quality_checks(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
batch_id: UUID
|
||||
) -> Dict[str, Any]:
|
||||
"""Get quality checks for a specific batch"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
quality_repo = QualityCheckRepository(session)
|
||||
|
||||
checks = await quality_repo.get_checks_by_batch(str(tenant_id), str(batch_id))
|
||||
|
||||
return {"quality_checks": [check.to_dict() for check in checks]}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting batch quality checks",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def create_quality_check(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
quality_check_data: Dict[str, Any]
|
||||
) -> Dict[str, Any]:
|
||||
"""Create quality check"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
quality_repo = QualityCheckRepository(session)
|
||||
|
||||
quality_check_data["tenant_id"] = str(tenant_id)
|
||||
check = await quality_repo.create_quality_check(quality_check_data)
|
||||
|
||||
return check.to_dict()
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error creating quality check",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def update_quality_check(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
check_id: UUID,
|
||||
update_data: Dict[str, Any]
|
||||
) -> Dict[str, Any]:
|
||||
"""Update quality check"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
quality_repo = QualityCheckRepository(session)
|
||||
|
||||
check = await quality_repo.update_quality_check(check_id, update_data)
|
||||
|
||||
return check.to_dict()
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error updating quality check",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_quality_trends(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
start_date: date,
|
||||
end_date: date
|
||||
) -> Dict[str, Any]:
|
||||
"""Get quality trends"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
quality_repo = QualityCheckRepository(session)
|
||||
|
||||
trends = await quality_repo.get_quality_trends(str(tenant_id), start_date, end_date)
|
||||
|
||||
return trends
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting quality trends",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_quality_alerts(self, tenant_id: UUID) -> Dict[str, Any]:
|
||||
"""Get quality alerts"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
quality_repo = QualityCheckRepository(session)
|
||||
|
||||
alerts = await quality_repo.get_quality_alerts(str(tenant_id))
|
||||
|
||||
return alerts
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting quality alerts",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
# Analytics Methods
|
||||
async def get_performance_analytics(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
start_date: date,
|
||||
end_date: date
|
||||
) -> Dict[str, Any]:
|
||||
"""Get performance analytics"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
batch_repo = ProductionBatchRepository(session)
|
||||
|
||||
analytics = await batch_repo.get_performance_analytics(
|
||||
str(tenant_id), start_date, end_date
|
||||
)
|
||||
|
||||
return analytics
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting performance analytics",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_yield_trends_analytics(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
period: str
|
||||
) -> Dict[str, Any]:
|
||||
"""Get yield trends analytics"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
batch_repo = ProductionBatchRepository(session)
|
||||
|
||||
trends = await batch_repo.get_yield_trends(str(tenant_id), period)
|
||||
|
||||
return trends
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting yield trends analytics",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_top_defects_analytics(self, tenant_id: UUID) -> Dict[str, Any]:
|
||||
"""Get top defects analytics"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
quality_repo = QualityCheckRepository(session)
|
||||
|
||||
defects = await quality_repo.get_top_defects(str(tenant_id))
|
||||
|
||||
return defects
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting top defects analytics",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_equipment_efficiency_analytics(self, tenant_id: UUID) -> Dict[str, Any]:
|
||||
"""Get equipment efficiency analytics"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
capacity_repo = ProductionCapacityRepository(session)
|
||||
|
||||
efficiency = await capacity_repo.get_equipment_efficiency(str(tenant_id))
|
||||
|
||||
return efficiency
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting equipment efficiency analytics",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def generate_analytics_report(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
report_config: Dict[str, Any]
|
||||
) -> Dict[str, Any]:
|
||||
"""Generate analytics report"""
|
||||
try:
|
||||
# Mock report generation for now
|
||||
return {
|
||||
"report_id": f"report_{tenant_id}_{date.today().isoformat()}",
|
||||
"generated_at": datetime.now().isoformat(),
|
||||
"config": report_config,
|
||||
"download_url": f"/reports/production_{tenant_id}_{date.today().isoformat()}.pdf"
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error generating analytics report",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
Reference in New Issue
Block a user