Add improved production UI

This commit is contained in:
Urtzi Alfaro
2025-09-23 12:49:35 +02:00
parent 8d54202e91
commit 4ae8e14e55
35 changed files with 6848 additions and 415 deletions

View File

@@ -369,4 +369,324 @@ class ProductionBatchRepository(ProductionBaseRepository, BatchCountProvider):
tenant_id=tenant_id,
prefix=prefix
)
raise
raise
async def get_batches_with_filters(
self,
tenant_id: str,
status: Optional[ProductionStatus] = None,
product_id: Optional[UUID] = None,
order_id: Optional[UUID] = None,
start_date: Optional[date] = None,
end_date: Optional[date] = None,
page: int = 1,
page_size: int = 50
) -> tuple[List[ProductionBatch], int]:
"""Get production batches with filters and pagination"""
try:
filters = {"tenant_id": tenant_id}
if status:
filters["status"] = status
if product_id:
filters["product_id"] = product_id
if order_id:
filters["order_id"] = order_id
if start_date:
start_datetime = datetime.combine(start_date, datetime.min.time())
filters["planned_start_time__gte"] = start_datetime
if end_date:
end_datetime = datetime.combine(end_date, datetime.max.time())
filters["planned_start_time__lte"] = end_datetime
# Get total count
total_count = await self.count(filters)
# Get paginated results
offset = (page - 1) * page_size
batches = await self.get_multi(
filters=filters,
order_by="planned_start_time",
order_desc=True,
limit=page_size,
offset=offset
)
logger.info("Retrieved batches with filters",
count=len(batches),
total_count=total_count,
page=page,
page_size=page_size,
tenant_id=tenant_id)
return batches, total_count
except Exception as e:
logger.error("Error fetching batches with filters", error=str(e))
raise DatabaseError(f"Failed to fetch batches with filters: {str(e)}")
async def update_batch(self, batch_id: UUID, update_data: Dict[str, Any]) -> ProductionBatch:
"""Update a production batch"""
try:
batch = await self.get(batch_id)
if not batch:
raise ValidationError(f"Batch {batch_id} not found")
# Add updated timestamp
update_data["updated_at"] = datetime.utcnow()
# Update the batch
batch = await self.update(batch_id, update_data)
logger.info("Updated production batch",
batch_id=str(batch_id),
update_fields=list(update_data.keys()))
return batch
except ValidationError:
raise
except Exception as e:
logger.error("Error updating production batch", error=str(e))
raise DatabaseError(f"Failed to update production batch: {str(e)}")
async def delete_batch(self, batch_id: UUID) -> bool:
"""Delete a production batch"""
try:
batch = await self.get(batch_id)
if not batch:
raise ValidationError(f"Batch {batch_id} not found")
# Check if batch can be deleted (not in progress or completed)
if batch.status in [ProductionStatus.IN_PROGRESS, ProductionStatus.COMPLETED]:
raise ValidationError(f"Cannot delete batch in {batch.status.value} status")
success = await self.delete(batch_id)
logger.info("Deleted production batch",
batch_id=str(batch_id),
success=success)
return success
except ValidationError:
raise
except Exception as e:
logger.error("Error deleting production batch", error=str(e))
raise DatabaseError(f"Failed to delete production batch: {str(e)}")
async def start_batch(self, batch_id: UUID) -> ProductionBatch:
"""Start a production batch"""
try:
batch = await self.get(batch_id)
if not batch:
raise ValidationError(f"Batch {batch_id} not found")
# Check if batch can be started
if batch.status != ProductionStatus.PENDING:
raise ValidationError(f"Cannot start batch in {batch.status.value} status")
# Update status and start time
update_data = {
"status": ProductionStatus.IN_PROGRESS,
"actual_start_time": datetime.utcnow(),
"updated_at": datetime.utcnow()
}
batch = await self.update(batch_id, update_data)
logger.info("Started production batch",
batch_id=str(batch_id),
actual_start_time=batch.actual_start_time)
return batch
except ValidationError:
raise
except Exception as e:
logger.error("Error starting production batch", error=str(e))
raise DatabaseError(f"Failed to start production batch: {str(e)}")
async def complete_batch(
self,
batch_id: UUID,
completion_data: Optional[Dict[str, Any]] = None
) -> ProductionBatch:
"""Complete a production batch"""
try:
batch = await self.get(batch_id)
if not batch:
raise ValidationError(f"Batch {batch_id} not found")
# Check if batch can be completed
if batch.status not in [ProductionStatus.IN_PROGRESS, ProductionStatus.QUALITY_CHECK]:
raise ValidationError(f"Cannot complete batch in {batch.status.value} status")
# Prepare completion data
update_data = {
"status": ProductionStatus.COMPLETED,
"actual_end_time": datetime.utcnow(),
"completed_at": datetime.utcnow(),
"updated_at": datetime.utcnow()
}
# Add optional completion data
if completion_data:
if "actual_quantity" in completion_data:
update_data["actual_quantity"] = completion_data["actual_quantity"]
# Calculate yield percentage
if batch.planned_quantity > 0:
update_data["yield_percentage"] = (
completion_data["actual_quantity"] / batch.planned_quantity
) * 100
if "notes" in completion_data:
update_data["production_notes"] = completion_data["notes"]
if "quality_score" in completion_data:
update_data["quality_score"] = completion_data["quality_score"]
# Calculate actual duration if start time exists
if batch.actual_start_time:
duration = update_data["actual_end_time"] - batch.actual_start_time
update_data["actual_duration_minutes"] = int(duration.total_seconds() / 60)
batch = await self.update(batch_id, update_data)
logger.info("Completed production batch",
batch_id=str(batch_id),
actual_quantity=update_data.get("actual_quantity"),
yield_percentage=update_data.get("yield_percentage"))
return batch
except ValidationError:
raise
except Exception as e:
logger.error("Error completing production batch", error=str(e))
raise DatabaseError(f"Failed to complete production batch: {str(e)}")
async def get_batch_statistics(
self,
tenant_id: str,
start_date: Optional[date] = None,
end_date: Optional[date] = None
) -> Dict[str, Any]:
"""Get batch statistics for a tenant"""
try:
# Use date range or default to last 30 days
if not start_date:
start_date = (datetime.utcnow() - timedelta(days=30)).date()
if not end_date:
end_date = datetime.utcnow().date()
batches = await self.get_batches_by_date_range(tenant_id, start_date, end_date)
total_batches = len(batches)
completed_batches = len([b for b in batches if b.status == ProductionStatus.COMPLETED])
failed_batches = len([b for b in batches if b.status == ProductionStatus.FAILED])
cancelled_batches = len([b for b in batches if b.status == ProductionStatus.CANCELLED])
# Calculate rates
completion_rate = (completed_batches / total_batches * 100) if total_batches > 0 else 0
# Calculate average yield
completed_with_yield = [b for b in batches if b.yield_percentage is not None]
average_yield = (
sum(b.yield_percentage for b in completed_with_yield) / len(completed_with_yield)
if completed_with_yield else 0
)
# Calculate on-time rate
on_time_completed = len([
b for b in batches
if b.status == ProductionStatus.COMPLETED
and b.actual_end_time
and b.planned_end_time
and b.actual_end_time <= b.planned_end_time
])
on_time_rate = (on_time_completed / completed_batches * 100) if completed_batches > 0 else 0
return {
"total_batches": total_batches,
"completed_batches": completed_batches,
"failed_batches": failed_batches,
"cancelled_batches": cancelled_batches,
"completion_rate": round(completion_rate, 2),
"average_yield": round(average_yield, 2),
"on_time_rate": round(on_time_rate, 2),
"period_start": start_date.isoformat(),
"period_end": end_date.isoformat()
}
except Exception as e:
logger.error("Error calculating batch statistics", error=str(e))
raise DatabaseError(f"Failed to calculate batch statistics: {str(e)}")
async def get_batches_filtered(
self,
filters: Dict[str, Any],
page: int,
page_size: int
) -> List[ProductionBatch]:
"""Get batches with filters and pagination"""
try:
query = select(ProductionBatch)
# Apply filters
if "tenant_id" in filters:
query = query.where(ProductionBatch.tenant_id == filters["tenant_id"])
if "status" in filters:
query = query.where(ProductionBatch.status == filters["status"])
if "product_id" in filters:
query = query.where(ProductionBatch.product_id == filters["product_id"])
if "order_id" in filters:
query = query.where(ProductionBatch.order_id == filters["order_id"])
if "start_date" in filters:
query = query.where(ProductionBatch.planned_start_time >= filters["start_date"])
if "end_date" in filters:
query = query.where(ProductionBatch.planned_end_time <= filters["end_date"])
# Apply pagination
offset = (page - 1) * page_size
query = query.offset(offset).limit(page_size)
# Order by created_at descending
query = query.order_by(desc(ProductionBatch.created_at))
result = await self.session.execute(query)
batches = result.scalars().all()
return list(batches)
except Exception as e:
logger.error("Error getting filtered batches", error=str(e))
raise DatabaseError(f"Failed to get filtered batches: {str(e)}")
async def count_batches_filtered(self, filters: Dict[str, Any]) -> int:
"""Count batches with filters"""
try:
query = select(func.count(ProductionBatch.id))
# Apply same filters as get_batches_filtered
if "tenant_id" in filters:
query = query.where(ProductionBatch.tenant_id == filters["tenant_id"])
if "status" in filters:
query = query.where(ProductionBatch.status == filters["status"])
if "product_id" in filters:
query = query.where(ProductionBatch.product_id == filters["product_id"])
if "order_id" in filters:
query = query.where(ProductionBatch.order_id == filters["order_id"])
if "start_date" in filters:
query = query.where(ProductionBatch.planned_start_time >= filters["start_date"])
if "end_date" in filters:
query = query.where(ProductionBatch.planned_end_time <= filters["end_date"])
result = await self.session.execute(query)
count = result.scalar()
return count or 0
except Exception as e:
logger.error("Error counting filtered batches", error=str(e))
raise DatabaseError(f"Failed to count filtered batches: {str(e)}")

View File

@@ -338,4 +338,72 @@ class ProductionCapacityRepository(ProductionBaseRepository):
raise
except Exception as e:
logger.error("Error setting maintenance mode", error=str(e))
raise DatabaseError(f"Failed to set maintenance mode: {str(e)}")
raise DatabaseError(f"Failed to set maintenance mode: {str(e)}")
async def get_capacity_with_filters(
self,
tenant_id: str,
resource_type: Optional[str] = None,
date_filter: Optional[date] = None,
availability: Optional[bool] = None,
page: int = 1,
page_size: int = 50
) -> tuple[List[ProductionCapacity], int]:
"""Get production capacity with filters and pagination"""
try:
filters = {"tenant_id": tenant_id}
if resource_type:
filters["resource_type"] = resource_type
if date_filter:
filters["date"] = date_filter
if availability is not None:
filters["is_available"] = availability
# Get total count
total_count = await self.count(filters)
# Get paginated results
offset = (page - 1) * page_size
capacities = await self.get_multi(
filters=filters,
order_by="date",
order_desc=True,
limit=page_size,
offset=offset
)
logger.info("Retrieved capacity with filters",
count=len(capacities),
total_count=total_count,
page=page,
page_size=page_size,
tenant_id=tenant_id)
return capacities, total_count
except Exception as e:
logger.error("Error fetching capacity with filters", error=str(e))
raise DatabaseError(f"Failed to fetch capacity with filters: {str(e)}")
async def get_capacity_by_date(self, tenant_id: str, target_date: date) -> List[ProductionCapacity]:
"""Get all capacity entries for a specific date"""
try:
capacities = await self.get_multi(
filters={
"tenant_id": tenant_id,
"date": target_date
},
order_by="start_time"
)
logger.info("Retrieved capacity by date",
count=len(capacities),
date=target_date.isoformat(),
tenant_id=tenant_id)
return capacities
except Exception as e:
logger.error("Error fetching capacity by date", error=str(e))
raise DatabaseError(f"Failed to fetch capacity by date: {str(e)}")

View File

@@ -276,4 +276,110 @@ class ProductionScheduleRepository(ProductionBaseRepository):
except Exception as e:
logger.error("Error calculating schedule performance summary", error=str(e))
raise DatabaseError(f"Failed to calculate schedule performance summary: {str(e)}")
raise DatabaseError(f"Failed to calculate schedule performance summary: {str(e)}")
async def get_schedules_with_filters(
self,
tenant_id: str,
start_date: Optional[date] = None,
end_date: Optional[date] = None,
is_finalized: Optional[bool] = None,
page: int = 1,
page_size: int = 50
) -> tuple[List[ProductionSchedule], int]:
"""Get production schedules with filters and pagination"""
try:
filters = {"tenant_id": tenant_id}
if start_date:
filters["schedule_date__gte"] = start_date
if end_date:
filters["schedule_date__lte"] = end_date
if is_finalized is not None:
filters["is_finalized"] = is_finalized
# Get total count
total_count = await self.count(filters)
# Get paginated results
offset = (page - 1) * page_size
schedules = await self.get_multi(
filters=filters,
order_by="schedule_date",
order_desc=True,
limit=page_size,
offset=offset
)
logger.info("Retrieved schedules with filters",
count=len(schedules),
total_count=total_count,
page=page,
page_size=page_size,
tenant_id=tenant_id)
return schedules, total_count
except Exception as e:
logger.error("Error fetching schedules with filters", error=str(e))
raise DatabaseError(f"Failed to fetch schedules with filters: {str(e)}")
async def update_schedule(self, schedule_id: UUID, update_data: Dict[str, Any]) -> ProductionSchedule:
"""Update a production schedule"""
try:
schedule = await self.get(schedule_id)
if not schedule:
raise ValidationError(f"Schedule {schedule_id} not found")
# Add updated timestamp
update_data["updated_at"] = datetime.utcnow()
# Update the schedule
schedule = await self.update(schedule_id, update_data)
logger.info("Updated production schedule",
schedule_id=str(schedule_id),
update_fields=list(update_data.keys()))
return schedule
except ValidationError:
raise
except Exception as e:
logger.error("Error updating production schedule", error=str(e))
raise DatabaseError(f"Failed to update production schedule: {str(e)}")
async def delete_schedule(self, schedule_id: UUID) -> bool:
"""Delete a production schedule"""
try:
schedule = await self.get(schedule_id)
if not schedule:
raise ValidationError(f"Schedule {schedule_id} not found")
# Check if schedule can be deleted (not finalized)
if schedule.is_finalized:
raise ValidationError("Cannot delete finalized schedule")
success = await self.delete(schedule_id)
logger.info("Deleted production schedule",
schedule_id=str(schedule_id),
success=success)
return success
except ValidationError:
raise
except Exception as e:
logger.error("Error deleting production schedule", error=str(e))
raise DatabaseError(f"Failed to delete production schedule: {str(e)}")
async def get_todays_schedule(self, tenant_id: str) -> Optional[ProductionSchedule]:
"""Get today's production schedule for a tenant"""
try:
today = datetime.utcnow().date()
return await self.get_schedule_by_date(tenant_id, today)
except Exception as e:
logger.error("Error fetching today's schedule", error=str(e))
raise DatabaseError(f"Failed to fetch today's schedule: {str(e)}")

View File

@@ -316,4 +316,60 @@ class QualityCheckRepository(ProductionBaseRepository):
except Exception as e:
logger.error("Error calculating quality trends", error=str(e))
raise DatabaseError(f"Failed to calculate quality trends: {str(e)}")
raise DatabaseError(f"Failed to calculate quality trends: {str(e)}")
async def get_quality_checks_with_filters(
self,
tenant_id: str,
batch_id: Optional[UUID] = None,
product_id: Optional[UUID] = None,
start_date: Optional[date] = None,
end_date: Optional[date] = None,
pass_fail: Optional[bool] = None,
page: int = 1,
page_size: int = 50
) -> tuple[List[QualityCheck], int]:
"""Get quality checks with filters and pagination"""
try:
filters = {"tenant_id": tenant_id}
if batch_id:
filters["batch_id"] = batch_id
if product_id:
# Note: This would require a join with production batches to filter by product_id
# For now, we'll skip this filter or implement it differently
pass
if start_date:
start_datetime = datetime.combine(start_date, datetime.min.time())
filters["check_time__gte"] = start_datetime
if end_date:
end_datetime = datetime.combine(end_date, datetime.max.time())
filters["check_time__lte"] = end_datetime
if pass_fail is not None:
filters["pass_fail"] = pass_fail
# Get total count
total_count = await self.count(filters)
# Get paginated results
offset = (page - 1) * page_size
checks = await self.get_multi(
filters=filters,
order_by="check_time",
order_desc=True,
limit=page_size,
offset=offset
)
logger.info("Retrieved quality checks with filters",
count=len(checks),
total_count=total_count,
page=page,
page_size=page_size,
tenant_id=tenant_id)
return checks, total_count
except Exception as e:
logger.error("Error fetching quality checks with filters", error=str(e))
raise DatabaseError(f"Failed to fetch quality checks with filters: {str(e)}")