Improve teh securty of teh DB
This commit is contained in:
@@ -206,7 +206,7 @@ class AlertProcessorService:
|
||||
raise
|
||||
|
||||
async def store_item(self, item: dict) -> dict:
|
||||
"""Store alert or recommendation in database"""
|
||||
"""Store alert or recommendation in database and cache in Redis"""
|
||||
from app.models.alerts import Alert, AlertSeverity, AlertStatus
|
||||
from sqlalchemy import select
|
||||
|
||||
@@ -234,7 +234,7 @@ class AlertProcessorService:
|
||||
logger.debug("Item stored in database", item_id=item['id'])
|
||||
|
||||
# Convert to dict for return
|
||||
return {
|
||||
alert_dict = {
|
||||
'id': str(alert.id),
|
||||
'tenant_id': str(alert.tenant_id),
|
||||
'item_type': alert.item_type,
|
||||
@@ -248,6 +248,60 @@ class AlertProcessorService:
|
||||
'metadata': alert.alert_metadata,
|
||||
'created_at': alert.created_at
|
||||
}
|
||||
|
||||
# Cache active alerts in Redis for SSE initial_items
|
||||
await self._cache_active_alerts(str(alert.tenant_id))
|
||||
|
||||
return alert_dict
|
||||
|
||||
async def _cache_active_alerts(self, tenant_id: str):
|
||||
"""Cache all active alerts for a tenant in Redis for quick SSE access"""
|
||||
try:
|
||||
from app.models.alerts import Alert, AlertStatus
|
||||
from sqlalchemy import select
|
||||
|
||||
async with self.db_manager.get_session() as session:
|
||||
# Query all active alerts for this tenant
|
||||
query = select(Alert).where(
|
||||
Alert.tenant_id == tenant_id,
|
||||
Alert.status == AlertStatus.ACTIVE
|
||||
).order_by(Alert.created_at.desc()).limit(50)
|
||||
|
||||
result = await session.execute(query)
|
||||
alerts = result.scalars().all()
|
||||
|
||||
# Convert to JSON-serializable format
|
||||
active_items = []
|
||||
for alert in alerts:
|
||||
active_items.append({
|
||||
'id': str(alert.id),
|
||||
'item_type': alert.item_type,
|
||||
'type': alert.alert_type,
|
||||
'severity': alert.severity.value,
|
||||
'title': alert.title,
|
||||
'message': alert.message,
|
||||
'actions': alert.actions or [],
|
||||
'metadata': alert.alert_metadata or {},
|
||||
'timestamp': alert.created_at.isoformat() if alert.created_at else datetime.utcnow().isoformat(),
|
||||
'status': alert.status.value
|
||||
})
|
||||
|
||||
# Cache in Redis with 1 hour TTL
|
||||
cache_key = f"active_alerts:{tenant_id}"
|
||||
await self.redis.setex(
|
||||
cache_key,
|
||||
3600, # 1 hour TTL
|
||||
json.dumps(active_items)
|
||||
)
|
||||
|
||||
logger.debug("Cached active alerts in Redis",
|
||||
tenant_id=tenant_id,
|
||||
count=len(active_items))
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to cache active alerts",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
|
||||
async def stream_to_sse(self, tenant_id: str, item: dict):
|
||||
"""Publish item to Redis for SSE streaming"""
|
||||
|
||||
@@ -20,6 +20,7 @@ sys.path.insert(0, str(Path(__file__).parent.parent.parent.parent.parent))
|
||||
from app.core.database import get_db
|
||||
from app.models.inventory import Ingredient, Stock
|
||||
from shared.utils.demo_dates import adjust_date_for_demo, BASE_REFERENCE_DATE
|
||||
from shared.messaging.rabbitmq import RabbitMQClient
|
||||
|
||||
logger = structlog.get_logger()
|
||||
router = APIRouter(prefix="/internal/demo", tags=["internal"])
|
||||
@@ -231,16 +232,42 @@ async def clone_demo_data(
|
||||
# Commit all changes
|
||||
await db.commit()
|
||||
|
||||
# Generate inventory alerts
|
||||
# Generate inventory alerts with RabbitMQ publishing
|
||||
rabbitmq_client = None
|
||||
try:
|
||||
from shared.utils.alert_generator import generate_inventory_alerts
|
||||
alerts_count = await generate_inventory_alerts(db, virtual_uuid, session_created_at)
|
||||
|
||||
# Initialize RabbitMQ client for alert publishing
|
||||
rabbitmq_host = os.getenv("RABBITMQ_HOST", "rabbitmq-service")
|
||||
rabbitmq_user = os.getenv("RABBITMQ_USER", "bakery")
|
||||
rabbitmq_password = os.getenv("RABBITMQ_PASSWORD", "forecast123")
|
||||
rabbitmq_port = os.getenv("RABBITMQ_PORT", "5672")
|
||||
rabbitmq_vhost = os.getenv("RABBITMQ_VHOST", "/")
|
||||
rabbitmq_url = f"amqp://{rabbitmq_user}:{rabbitmq_password}@{rabbitmq_host}:{rabbitmq_port}{rabbitmq_vhost}"
|
||||
|
||||
rabbitmq_client = RabbitMQClient(rabbitmq_url, service_name="inventory")
|
||||
await rabbitmq_client.connect()
|
||||
|
||||
# Generate alerts and publish to RabbitMQ
|
||||
alerts_count = await generate_inventory_alerts(
|
||||
db,
|
||||
virtual_uuid,
|
||||
session_created_at,
|
||||
rabbitmq_client=rabbitmq_client
|
||||
)
|
||||
stats["alerts_generated"] = alerts_count
|
||||
await db.commit() # Commit alerts
|
||||
await db.commit()
|
||||
logger.info(f"Generated {alerts_count} inventory alerts", virtual_tenant_id=virtual_tenant_id)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to generate alerts: {str(e)}", exc_info=True)
|
||||
stats["alerts_generated"] = 0
|
||||
finally:
|
||||
# Clean up RabbitMQ connection
|
||||
if rabbitmq_client:
|
||||
try:
|
||||
await rabbitmq_client.disconnect()
|
||||
except Exception as cleanup_error:
|
||||
logger.warning(f"Error disconnecting RabbitMQ: {cleanup_error}")
|
||||
|
||||
total_records = sum(stats.values())
|
||||
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
||||
|
||||
@@ -226,27 +226,39 @@ class SSEService:
|
||||
error=str(e))
|
||||
|
||||
async def get_active_items(self, tenant_id: str) -> list:
|
||||
"""Fetch active alerts and recommendations from database"""
|
||||
"""
|
||||
Fetch active alerts and recommendations from Redis cache.
|
||||
|
||||
NOTE: We use Redis as the source of truth for active alerts to maintain
|
||||
microservices architecture. The alert_processor service caches active alerts
|
||||
in Redis when they are created, and we read from that cache here.
|
||||
This avoids direct database coupling between services.
|
||||
"""
|
||||
try:
|
||||
# This would integrate with the actual database
|
||||
# For now, return empty list as placeholder
|
||||
# In real implementation, this would query the alerts table
|
||||
|
||||
# Example query:
|
||||
# query = """
|
||||
# SELECT id, item_type, alert_type, severity, title, message,
|
||||
# actions, metadata, created_at, status
|
||||
# FROM alerts
|
||||
# WHERE tenant_id = $1
|
||||
# AND status = 'active'
|
||||
# ORDER BY severity_weight DESC, created_at DESC
|
||||
# LIMIT 50
|
||||
# """
|
||||
|
||||
return [] # Placeholder
|
||||
|
||||
if not self.redis:
|
||||
logger.warning("Redis not available, returning empty list", tenant_id=tenant_id)
|
||||
return []
|
||||
|
||||
# Try to get cached active alerts for this tenant from Redis
|
||||
cache_key = f"active_alerts:{tenant_id}"
|
||||
cached_data = await self.redis.get(cache_key)
|
||||
|
||||
if cached_data:
|
||||
active_items = json.loads(cached_data)
|
||||
logger.info("Fetched active alerts from Redis cache",
|
||||
tenant_id=tenant_id,
|
||||
count=len(active_items))
|
||||
return active_items
|
||||
else:
|
||||
logger.info("No cached alerts found for tenant",
|
||||
tenant_id=tenant_id)
|
||||
return []
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error fetching active items", tenant_id=tenant_id, error=str(e))
|
||||
logger.error("Error fetching active items from Redis",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e),
|
||||
exc_info=True)
|
||||
return []
|
||||
|
||||
def get_metrics(self) -> Dict[str, Any]:
|
||||
|
||||
@@ -19,6 +19,7 @@ from app.models.procurement import ProcurementPlan, ProcurementRequirement
|
||||
from app.models.customer import Customer
|
||||
from shared.utils.demo_dates import adjust_date_for_demo, BASE_REFERENCE_DATE
|
||||
from shared.utils.alert_generator import generate_order_alerts
|
||||
from shared.messaging.rabbitmq import RabbitMQClient
|
||||
|
||||
logger = structlog.get_logger()
|
||||
router = APIRouter(prefix="/internal/demo", tags=["internal"])
|
||||
@@ -385,14 +386,39 @@ async def clone_demo_data(
|
||||
# Commit cloned data first
|
||||
await db.commit()
|
||||
|
||||
# Generate order alerts (urgent, delayed, upcoming deliveries)
|
||||
# Generate order alerts (urgent, delayed, upcoming deliveries) with RabbitMQ publishing
|
||||
rabbitmq_client = None
|
||||
try:
|
||||
alerts_count = await generate_order_alerts(db, virtual_uuid, session_time)
|
||||
# Initialize RabbitMQ client for alert publishing
|
||||
rabbitmq_host = os.getenv("RABBITMQ_HOST", "rabbitmq-service")
|
||||
rabbitmq_user = os.getenv("RABBITMQ_USER", "bakery")
|
||||
rabbitmq_password = os.getenv("RABBITMQ_PASSWORD", "forecast123")
|
||||
rabbitmq_port = os.getenv("RABBITMQ_PORT", "5672")
|
||||
rabbitmq_vhost = os.getenv("RABBITMQ_VHOST", "/")
|
||||
rabbitmq_url = f"amqp://{rabbitmq_user}:{rabbitmq_password}@{rabbitmq_host}:{rabbitmq_port}{rabbitmq_vhost}"
|
||||
|
||||
rabbitmq_client = RabbitMQClient(rabbitmq_url, service_name="orders")
|
||||
await rabbitmq_client.connect()
|
||||
|
||||
# Generate alerts and publish to RabbitMQ
|
||||
alerts_count = await generate_order_alerts(
|
||||
db,
|
||||
virtual_uuid,
|
||||
session_time,
|
||||
rabbitmq_client=rabbitmq_client
|
||||
)
|
||||
stats["alerts_generated"] += alerts_count
|
||||
await db.commit()
|
||||
logger.info(f"Generated {alerts_count} order alerts")
|
||||
except Exception as alert_error:
|
||||
logger.warning(f"Alert generation failed: {alert_error}", exc_info=True)
|
||||
finally:
|
||||
# Clean up RabbitMQ connection
|
||||
if rabbitmq_client:
|
||||
try:
|
||||
await rabbitmq_client.disconnect()
|
||||
except Exception as cleanup_error:
|
||||
logger.warning(f"Error disconnecting RabbitMQ: {cleanup_error}")
|
||||
|
||||
total_records = sum(stats.values())
|
||||
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
||||
|
||||
@@ -142,9 +142,9 @@ class ProcurementPlanBase(ProcurementBase):
|
||||
plan_period_start: date
|
||||
plan_period_end: date
|
||||
planning_horizon_days: int = Field(default=14, gt=0)
|
||||
|
||||
|
||||
plan_type: str = Field(default="regular", pattern="^(regular|emergency|seasonal|urgent)$")
|
||||
priority: str = Field(default="normal", pattern="^(high|normal|low)$")
|
||||
priority: str = Field(default="normal", pattern="^(critical|high|normal|low)$")
|
||||
|
||||
business_model: Optional[str] = Field(None, pattern="^(individual_bakery|central_bakery)$")
|
||||
procurement_strategy: str = Field(default="just_in_time", pattern="^(just_in_time|bulk|mixed|bulk_order)$")
|
||||
@@ -166,7 +166,7 @@ class ProcurementPlanCreate(ProcurementPlanBase):
|
||||
class ProcurementPlanUpdate(ProcurementBase):
|
||||
"""Schema for updating procurement plans"""
|
||||
status: Optional[str] = Field(None, pattern="^(draft|pending_approval|approved|in_execution|completed|cancelled)$")
|
||||
priority: Optional[str] = Field(None, pattern="^(high|normal|low)$")
|
||||
priority: Optional[str] = Field(None, pattern="^(critical|high|normal|low)$")
|
||||
|
||||
approved_at: Optional[datetime] = None
|
||||
approved_by: Optional[uuid.UUID] = None
|
||||
|
||||
229
services/production/app/api/equipment.py
Normal file
229
services/production/app/api/equipment.py
Normal file
@@ -0,0 +1,229 @@
|
||||
# services/production/app/api/equipment.py
|
||||
"""
|
||||
Equipment API - CRUD operations on Equipment model
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Path, Query
|
||||
from typing import Optional
|
||||
from uuid import UUID
|
||||
import structlog
|
||||
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from shared.auth.access_control import require_user_role
|
||||
from shared.routing import RouteBuilder
|
||||
from shared.security import create_audit_logger, AuditSeverity, AuditAction
|
||||
from app.core.database import get_db
|
||||
from app.services.production_service import ProductionService
|
||||
from app.schemas.equipment import (
|
||||
EquipmentCreate,
|
||||
EquipmentUpdate,
|
||||
EquipmentResponse,
|
||||
EquipmentListResponse
|
||||
)
|
||||
from app.models.production import EquipmentStatus, EquipmentType
|
||||
from app.core.config import settings
|
||||
|
||||
logger = structlog.get_logger()
|
||||
route_builder = RouteBuilder('production')
|
||||
router = APIRouter(tags=["production-equipment"])
|
||||
|
||||
# Initialize audit logger
|
||||
audit_logger = create_audit_logger("production-service")
|
||||
|
||||
|
||||
def get_production_service() -> ProductionService:
|
||||
"""Dependency injection for production service"""
|
||||
from app.core.database import database_manager
|
||||
return ProductionService(database_manager, settings)
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_base_route("equipment"),
|
||||
response_model=EquipmentListResponse
|
||||
)
|
||||
async def list_equipment(
|
||||
tenant_id: UUID = Path(...),
|
||||
status: Optional[EquipmentStatus] = Query(None, description="Filter by status"),
|
||||
type: Optional[EquipmentType] = Query(None, description="Filter by equipment type"),
|
||||
is_active: Optional[bool] = Query(None, description="Filter by active status"),
|
||||
page: int = Query(1, ge=1, description="Page number"),
|
||||
page_size: int = Query(50, ge=1, le=100, description="Page size"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
production_service: ProductionService = Depends(get_production_service)
|
||||
):
|
||||
"""List equipment with filters: status, type, active status"""
|
||||
try:
|
||||
filters = {
|
||||
"status": status,
|
||||
"type": type,
|
||||
"is_active": is_active
|
||||
}
|
||||
|
||||
equipment_list = await production_service.get_equipment_list(tenant_id, filters, page, page_size)
|
||||
|
||||
logger.info("Retrieved equipment list",
|
||||
tenant_id=str(tenant_id), filters=filters)
|
||||
|
||||
return equipment_list
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error listing equipment",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise HTTPException(status_code=500, detail="Failed to list equipment")
|
||||
|
||||
|
||||
@router.post(
|
||||
route_builder.build_base_route("equipment"),
|
||||
response_model=EquipmentResponse
|
||||
)
|
||||
async def create_equipment(
|
||||
equipment_data: EquipmentCreate,
|
||||
tenant_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
production_service: ProductionService = Depends(get_production_service)
|
||||
):
|
||||
"""Create a new equipment item"""
|
||||
try:
|
||||
equipment = await production_service.create_equipment(tenant_id, equipment_data)
|
||||
|
||||
logger.info("Created equipment",
|
||||
equipment_id=str(equipment.id), tenant_id=str(tenant_id))
|
||||
|
||||
# Audit log
|
||||
await audit_logger.log(
|
||||
action=AuditAction.CREATE,
|
||||
resource_type="equipment",
|
||||
resource_id=str(equipment.id),
|
||||
user_id=current_user.get('user_id'),
|
||||
tenant_id=str(tenant_id),
|
||||
severity=AuditSeverity.INFO,
|
||||
details={"equipment_name": equipment.name, "equipment_type": equipment.type.value}
|
||||
)
|
||||
|
||||
return EquipmentResponse.model_validate(equipment)
|
||||
|
||||
except ValueError as e:
|
||||
logger.warning("Validation error creating equipment",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error creating equipment",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise HTTPException(status_code=500, detail="Failed to create equipment")
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_base_route("equipment/{equipment_id}"),
|
||||
response_model=EquipmentResponse
|
||||
)
|
||||
async def get_equipment(
|
||||
tenant_id: UUID = Path(...),
|
||||
equipment_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
production_service: ProductionService = Depends(get_production_service)
|
||||
):
|
||||
"""Get a specific equipment item"""
|
||||
try:
|
||||
equipment = await production_service.get_equipment(tenant_id, equipment_id)
|
||||
|
||||
if not equipment:
|
||||
raise HTTPException(status_code=404, detail="Equipment not found")
|
||||
|
||||
logger.info("Retrieved equipment",
|
||||
equipment_id=str(equipment_id), tenant_id=str(tenant_id))
|
||||
|
||||
return EquipmentResponse.model_validate(equipment)
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Error retrieving equipment",
|
||||
error=str(e), equipment_id=str(equipment_id), tenant_id=str(tenant_id))
|
||||
raise HTTPException(status_code=500, detail="Failed to retrieve equipment")
|
||||
|
||||
|
||||
@router.put(
|
||||
route_builder.build_base_route("equipment/{equipment_id}"),
|
||||
response_model=EquipmentResponse
|
||||
)
|
||||
async def update_equipment(
|
||||
equipment_data: EquipmentUpdate,
|
||||
tenant_id: UUID = Path(...),
|
||||
equipment_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
production_service: ProductionService = Depends(get_production_service)
|
||||
):
|
||||
"""Update an equipment item"""
|
||||
try:
|
||||
equipment = await production_service.update_equipment(tenant_id, equipment_id, equipment_data)
|
||||
|
||||
if not equipment:
|
||||
raise HTTPException(status_code=404, detail="Equipment not found")
|
||||
|
||||
logger.info("Updated equipment",
|
||||
equipment_id=str(equipment_id), tenant_id=str(tenant_id))
|
||||
|
||||
# Audit log
|
||||
await audit_logger.log(
|
||||
action=AuditAction.UPDATE,
|
||||
resource_type="equipment",
|
||||
resource_id=str(equipment_id),
|
||||
user_id=current_user.get('user_id'),
|
||||
tenant_id=str(tenant_id),
|
||||
severity=AuditSeverity.INFO,
|
||||
details={"updates": equipment_data.model_dump(exclude_unset=True)}
|
||||
)
|
||||
|
||||
return EquipmentResponse.model_validate(equipment)
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except ValueError as e:
|
||||
logger.warning("Validation error updating equipment",
|
||||
error=str(e), equipment_id=str(equipment_id), tenant_id=str(tenant_id))
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
except Exception as e:
|
||||
logger.error("Error updating equipment",
|
||||
error=str(e), equipment_id=str(equipment_id), tenant_id=str(tenant_id))
|
||||
raise HTTPException(status_code=500, detail="Failed to update equipment")
|
||||
|
||||
|
||||
@router.delete(
|
||||
route_builder.build_base_route("equipment/{equipment_id}")
|
||||
)
|
||||
async def delete_equipment(
|
||||
tenant_id: UUID = Path(...),
|
||||
equipment_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
production_service: ProductionService = Depends(get_production_service)
|
||||
):
|
||||
"""Delete (soft delete) an equipment item"""
|
||||
try:
|
||||
success = await production_service.delete_equipment(tenant_id, equipment_id)
|
||||
|
||||
if not success:
|
||||
raise HTTPException(status_code=404, detail="Equipment not found")
|
||||
|
||||
logger.info("Deleted equipment",
|
||||
equipment_id=str(equipment_id), tenant_id=str(tenant_id))
|
||||
|
||||
# Audit log
|
||||
await audit_logger.log(
|
||||
action=AuditAction.DELETE,
|
||||
resource_type="equipment",
|
||||
resource_id=str(equipment_id),
|
||||
user_id=current_user.get('user_id'),
|
||||
tenant_id=str(tenant_id),
|
||||
severity=AuditSeverity.WARNING,
|
||||
details={"action": "soft_delete"}
|
||||
)
|
||||
|
||||
return {"message": "Equipment deleted successfully"}
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Error deleting equipment",
|
||||
error=str(e), equipment_id=str(equipment_id), tenant_id=str(tenant_id))
|
||||
raise HTTPException(status_code=500, detail="Failed to delete equipment")
|
||||
@@ -21,6 +21,7 @@ from app.models.production import (
|
||||
)
|
||||
from shared.utils.demo_dates import adjust_date_for_demo, BASE_REFERENCE_DATE
|
||||
from shared.utils.alert_generator import generate_equipment_alerts
|
||||
from shared.messaging.rabbitmq import RabbitMQClient
|
||||
|
||||
logger = structlog.get_logger()
|
||||
router = APIRouter(prefix="/internal/demo", tags=["internal"])
|
||||
@@ -432,14 +433,39 @@ async def clone_demo_data(
|
||||
# Commit cloned data first
|
||||
await db.commit()
|
||||
|
||||
# Generate equipment maintenance and status alerts
|
||||
# Generate equipment maintenance and status alerts with RabbitMQ publishing
|
||||
rabbitmq_client = None
|
||||
try:
|
||||
alerts_count = await generate_equipment_alerts(db, virtual_uuid, session_time)
|
||||
# Initialize RabbitMQ client for alert publishing
|
||||
rabbitmq_host = os.getenv("RABBITMQ_HOST", "rabbitmq-service")
|
||||
rabbitmq_user = os.getenv("RABBITMQ_USER", "bakery")
|
||||
rabbitmq_password = os.getenv("RABBITMQ_PASSWORD", "forecast123")
|
||||
rabbitmq_port = os.getenv("RABBITMQ_PORT", "5672")
|
||||
rabbitmq_vhost = os.getenv("RABBITMQ_VHOST", "/")
|
||||
rabbitmq_url = f"amqp://{rabbitmq_user}:{rabbitmq_password}@{rabbitmq_host}:{rabbitmq_port}{rabbitmq_vhost}"
|
||||
|
||||
rabbitmq_client = RabbitMQClient(rabbitmq_url, service_name="production")
|
||||
await rabbitmq_client.connect()
|
||||
|
||||
# Generate alerts and publish to RabbitMQ
|
||||
alerts_count = await generate_equipment_alerts(
|
||||
db,
|
||||
virtual_uuid,
|
||||
session_time,
|
||||
rabbitmq_client=rabbitmq_client
|
||||
)
|
||||
stats["alerts_generated"] += alerts_count
|
||||
await db.commit()
|
||||
logger.info(f"Generated {alerts_count} equipment alerts")
|
||||
except Exception as alert_error:
|
||||
logger.warning(f"Alert generation failed: {alert_error}", exc_info=True)
|
||||
finally:
|
||||
# Clean up RabbitMQ connection
|
||||
if rabbitmq_client:
|
||||
try:
|
||||
await rabbitmq_client.disconnect()
|
||||
except Exception as cleanup_error:
|
||||
logger.warning(f"Error disconnecting RabbitMQ: {cleanup_error}")
|
||||
|
||||
total_records = sum(stats.values())
|
||||
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
||||
|
||||
@@ -23,6 +23,7 @@ from app.api import (
|
||||
production_dashboard,
|
||||
analytics,
|
||||
quality_templates,
|
||||
equipment,
|
||||
internal_demo
|
||||
)
|
||||
|
||||
@@ -166,6 +167,7 @@ service.setup_custom_middleware()
|
||||
# Include standardized routers
|
||||
# NOTE: Register more specific routes before generic parameterized routes
|
||||
service.add_router(quality_templates.router) # Register first to avoid route conflicts
|
||||
service.add_router(equipment.router)
|
||||
service.add_router(production_batches.router)
|
||||
service.add_router(production_schedules.router)
|
||||
service.add_router(production_operations.router)
|
||||
|
||||
152
services/production/app/repositories/equipment_repository.py
Normal file
152
services/production/app/repositories/equipment_repository.py
Normal file
@@ -0,0 +1,152 @@
|
||||
"""
|
||||
Equipment Repository
|
||||
"""
|
||||
|
||||
from typing import Optional, List, Dict, Any
|
||||
from sqlalchemy import select, func, and_
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from uuid import UUID
|
||||
import structlog
|
||||
|
||||
from app.repositories.base import ProductionBaseRepository
|
||||
from app.models.production import Equipment, EquipmentStatus, EquipmentType
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class EquipmentRepository(ProductionBaseRepository):
|
||||
"""Repository for equipment operations"""
|
||||
|
||||
def __init__(self, session: AsyncSession):
|
||||
super().__init__(Equipment, session)
|
||||
|
||||
async def get_equipment_filtered(
|
||||
self,
|
||||
filters: Dict[str, Any],
|
||||
page: int = 1,
|
||||
page_size: int = 50
|
||||
) -> List[Equipment]:
|
||||
"""Get equipment list with filters and pagination"""
|
||||
try:
|
||||
# Build base query
|
||||
query = select(Equipment).filter(Equipment.tenant_id == UUID(filters.get("tenant_id")))
|
||||
|
||||
# Apply status filter
|
||||
if "status" in filters and filters["status"]:
|
||||
query = query.filter(Equipment.status == filters["status"])
|
||||
|
||||
# Apply type filter
|
||||
if "type" in filters and filters["type"]:
|
||||
query = query.filter(Equipment.type == filters["type"])
|
||||
|
||||
# Apply active filter
|
||||
if "is_active" in filters and filters["is_active"] is not None:
|
||||
query = query.filter(Equipment.is_active == filters["is_active"])
|
||||
|
||||
# Apply pagination
|
||||
query = query.order_by(Equipment.created_at.desc())
|
||||
query = query.offset((page - 1) * page_size).limit(page_size)
|
||||
|
||||
result = await self.session.execute(query)
|
||||
return list(result.scalars().all())
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting filtered equipment", error=str(e), filters=filters)
|
||||
raise
|
||||
|
||||
async def count_equipment_filtered(self, filters: Dict[str, Any]) -> int:
|
||||
"""Count equipment matching filters"""
|
||||
try:
|
||||
# Build base query
|
||||
query = select(func.count(Equipment.id)).filter(
|
||||
Equipment.tenant_id == UUID(filters.get("tenant_id"))
|
||||
)
|
||||
|
||||
# Apply status filter
|
||||
if "status" in filters and filters["status"]:
|
||||
query = query.filter(Equipment.status == filters["status"])
|
||||
|
||||
# Apply type filter
|
||||
if "type" in filters and filters["type"]:
|
||||
query = query.filter(Equipment.type == filters["type"])
|
||||
|
||||
# Apply active filter
|
||||
if "is_active" in filters and filters["is_active"] is not None:
|
||||
query = query.filter(Equipment.is_active == filters["is_active"])
|
||||
|
||||
result = await self.session.execute(query)
|
||||
return result.scalar() or 0
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error counting filtered equipment", error=str(e), filters=filters)
|
||||
raise
|
||||
|
||||
async def get_equipment_by_id(self, tenant_id: UUID, equipment_id: UUID) -> Optional[Equipment]:
|
||||
"""Get equipment by ID and tenant"""
|
||||
try:
|
||||
query = select(Equipment).filter(
|
||||
and_(
|
||||
Equipment.id == equipment_id,
|
||||
Equipment.tenant_id == tenant_id
|
||||
)
|
||||
)
|
||||
result = await self.session.execute(query)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting equipment by ID",
|
||||
error=str(e),
|
||||
equipment_id=str(equipment_id),
|
||||
tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def create_equipment(self, equipment_data: Dict[str, Any]) -> Equipment:
|
||||
"""Create new equipment"""
|
||||
try:
|
||||
equipment = Equipment(**equipment_data)
|
||||
self.session.add(equipment)
|
||||
await self.session.flush()
|
||||
await self.session.refresh(equipment)
|
||||
return equipment
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error creating equipment", error=str(e), data=equipment_data)
|
||||
raise
|
||||
|
||||
async def update_equipment(
|
||||
self,
|
||||
equipment_id: UUID,
|
||||
updates: Dict[str, Any]
|
||||
) -> Optional[Equipment]:
|
||||
"""Update equipment"""
|
||||
try:
|
||||
equipment = await self.get(equipment_id)
|
||||
if not equipment:
|
||||
return None
|
||||
|
||||
for key, value in updates.items():
|
||||
if hasattr(equipment, key) and value is not None:
|
||||
setattr(equipment, key, value)
|
||||
|
||||
await self.session.flush()
|
||||
await self.session.refresh(equipment)
|
||||
return equipment
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error updating equipment", error=str(e), equipment_id=str(equipment_id))
|
||||
raise
|
||||
|
||||
async def delete_equipment(self, equipment_id: UUID) -> bool:
|
||||
"""Soft delete equipment (set is_active to False)"""
|
||||
try:
|
||||
equipment = await self.get(equipment_id)
|
||||
if not equipment:
|
||||
return False
|
||||
|
||||
equipment.is_active = False
|
||||
await self.session.flush()
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error deleting equipment", error=str(e), equipment_id=str(equipment_id))
|
||||
raise
|
||||
171
services/production/app/schemas/equipment.py
Normal file
171
services/production/app/schemas/equipment.py
Normal file
@@ -0,0 +1,171 @@
|
||||
# services/production/app/schemas/equipment.py
|
||||
"""
|
||||
Equipment schemas for Production Service
|
||||
"""
|
||||
|
||||
from pydantic import BaseModel, Field, ConfigDict
|
||||
from typing import Optional, List
|
||||
from datetime import datetime
|
||||
from uuid import UUID
|
||||
|
||||
from app.models.production import EquipmentType, EquipmentStatus
|
||||
|
||||
|
||||
class EquipmentCreate(BaseModel):
|
||||
"""Schema for creating new equipment"""
|
||||
name: str = Field(..., min_length=1, max_length=255, description="Equipment name")
|
||||
type: EquipmentType = Field(..., description="Equipment type")
|
||||
model: Optional[str] = Field(None, max_length=100, description="Equipment model")
|
||||
serial_number: Optional[str] = Field(None, max_length=100, description="Serial number")
|
||||
location: Optional[str] = Field(None, max_length=255, description="Physical location")
|
||||
status: EquipmentStatus = Field(default=EquipmentStatus.OPERATIONAL, description="Equipment status")
|
||||
|
||||
# Installation and maintenance
|
||||
install_date: Optional[datetime] = Field(None, description="Installation date")
|
||||
last_maintenance_date: Optional[datetime] = Field(None, description="Last maintenance date")
|
||||
next_maintenance_date: Optional[datetime] = Field(None, description="Next scheduled maintenance date")
|
||||
maintenance_interval_days: Optional[int] = Field(None, ge=1, description="Maintenance interval in days")
|
||||
|
||||
# Performance metrics
|
||||
efficiency_percentage: Optional[float] = Field(None, ge=0, le=100, description="Current efficiency percentage")
|
||||
uptime_percentage: Optional[float] = Field(None, ge=0, le=100, description="Overall uptime percentage")
|
||||
energy_usage_kwh: Optional[float] = Field(None, ge=0, description="Current energy usage in kWh")
|
||||
|
||||
# Specifications
|
||||
power_kw: Optional[float] = Field(None, ge=0, description="Power consumption in kilowatts")
|
||||
capacity: Optional[float] = Field(None, ge=0, description="Equipment capacity")
|
||||
weight_kg: Optional[float] = Field(None, ge=0, description="Weight in kilograms")
|
||||
|
||||
# Temperature monitoring
|
||||
current_temperature: Optional[float] = Field(None, description="Current temperature")
|
||||
target_temperature: Optional[float] = Field(None, description="Target temperature")
|
||||
|
||||
# Notes
|
||||
notes: Optional[str] = Field(None, description="Additional notes")
|
||||
|
||||
model_config = ConfigDict(
|
||||
json_schema_extra={
|
||||
"example": {
|
||||
"name": "Horno Principal #1",
|
||||
"type": "oven",
|
||||
"model": "Miwe Condo CO 4.1212",
|
||||
"serial_number": "MCO-2021-001",
|
||||
"location": "Área de Horneado - Zona A",
|
||||
"status": "operational",
|
||||
"install_date": "2021-03-15T00:00:00Z",
|
||||
"maintenance_interval_days": 90,
|
||||
"efficiency_percentage": 92.0,
|
||||
"uptime_percentage": 98.5,
|
||||
"power_kw": 45.0,
|
||||
"capacity": 24.0
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class EquipmentUpdate(BaseModel):
|
||||
"""Schema for updating equipment"""
|
||||
name: Optional[str] = Field(None, min_length=1, max_length=255)
|
||||
type: Optional[EquipmentType] = None
|
||||
model: Optional[str] = Field(None, max_length=100)
|
||||
serial_number: Optional[str] = Field(None, max_length=100)
|
||||
location: Optional[str] = Field(None, max_length=255)
|
||||
status: Optional[EquipmentStatus] = None
|
||||
|
||||
# Installation and maintenance
|
||||
install_date: Optional[datetime] = None
|
||||
last_maintenance_date: Optional[datetime] = None
|
||||
next_maintenance_date: Optional[datetime] = None
|
||||
maintenance_interval_days: Optional[int] = Field(None, ge=1)
|
||||
|
||||
# Performance metrics
|
||||
efficiency_percentage: Optional[float] = Field(None, ge=0, le=100)
|
||||
uptime_percentage: Optional[float] = Field(None, ge=0, le=100)
|
||||
energy_usage_kwh: Optional[float] = Field(None, ge=0)
|
||||
|
||||
# Specifications
|
||||
power_kw: Optional[float] = Field(None, ge=0)
|
||||
capacity: Optional[float] = Field(None, ge=0)
|
||||
weight_kg: Optional[float] = Field(None, ge=0)
|
||||
|
||||
# Temperature monitoring
|
||||
current_temperature: Optional[float] = None
|
||||
target_temperature: Optional[float] = None
|
||||
|
||||
# Notes
|
||||
notes: Optional[str] = None
|
||||
|
||||
# Status flag
|
||||
is_active: Optional[bool] = None
|
||||
|
||||
model_config = ConfigDict(
|
||||
json_schema_extra={
|
||||
"example": {
|
||||
"status": "maintenance",
|
||||
"last_maintenance_date": "2024-01-15T00:00:00Z",
|
||||
"next_maintenance_date": "2024-04-15T00:00:00Z",
|
||||
"efficiency_percentage": 88.0
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class EquipmentResponse(BaseModel):
|
||||
"""Schema for equipment response"""
|
||||
id: UUID
|
||||
tenant_id: UUID
|
||||
name: str
|
||||
type: EquipmentType
|
||||
model: Optional[str] = None
|
||||
serial_number: Optional[str] = None
|
||||
location: Optional[str] = None
|
||||
status: EquipmentStatus
|
||||
|
||||
# Installation and maintenance
|
||||
install_date: Optional[datetime] = None
|
||||
last_maintenance_date: Optional[datetime] = None
|
||||
next_maintenance_date: Optional[datetime] = None
|
||||
maintenance_interval_days: Optional[int] = None
|
||||
|
||||
# Performance metrics
|
||||
efficiency_percentage: Optional[float] = None
|
||||
uptime_percentage: Optional[float] = None
|
||||
energy_usage_kwh: Optional[float] = None
|
||||
|
||||
# Specifications
|
||||
power_kw: Optional[float] = None
|
||||
capacity: Optional[float] = None
|
||||
weight_kg: Optional[float] = None
|
||||
|
||||
# Temperature monitoring
|
||||
current_temperature: Optional[float] = None
|
||||
target_temperature: Optional[float] = None
|
||||
|
||||
# Status
|
||||
is_active: bool
|
||||
notes: Optional[str] = None
|
||||
|
||||
# Timestamps
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
|
||||
class EquipmentListResponse(BaseModel):
|
||||
"""Schema for paginated equipment list response"""
|
||||
equipment: List[EquipmentResponse]
|
||||
total_count: int
|
||||
page: int
|
||||
page_size: int
|
||||
|
||||
model_config = ConfigDict(
|
||||
json_schema_extra={
|
||||
"example": {
|
||||
"equipment": [],
|
||||
"total_count": 10,
|
||||
"page": 1,
|
||||
"page_size": 50
|
||||
}
|
||||
}
|
||||
)
|
||||
@@ -1386,4 +1386,146 @@ class ProductionService:
|
||||
except Exception as e:
|
||||
logger.error("Error getting batch with transformations",
|
||||
error=str(e), batch_id=str(batch_id), tenant_id=str(tenant_id))
|
||||
return {}
|
||||
return {}
|
||||
|
||||
# ================================================================
|
||||
# EQUIPMENT MANAGEMENT METHODS
|
||||
# ================================================================
|
||||
|
||||
async def get_equipment_list(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
filters: Dict[str, Any],
|
||||
page: int = 1,
|
||||
page_size: int = 50
|
||||
) -> Dict[str, Any]:
|
||||
"""Get list of equipment with filtering and pagination"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
from app.repositories.equipment_repository import EquipmentRepository
|
||||
equipment_repo = EquipmentRepository(session)
|
||||
|
||||
# Apply filters
|
||||
filter_dict = {k: v for k, v in filters.items() if v is not None}
|
||||
filter_dict["tenant_id"] = str(tenant_id)
|
||||
|
||||
# Get equipment with pagination
|
||||
equipment_list = await equipment_repo.get_equipment_filtered(filter_dict, page, page_size)
|
||||
total_count = await equipment_repo.count_equipment_filtered(filter_dict)
|
||||
|
||||
# Convert to response format
|
||||
from app.schemas.equipment import EquipmentResponse
|
||||
equipment_responses = [
|
||||
EquipmentResponse.model_validate(eq) for eq in equipment_list
|
||||
]
|
||||
|
||||
return {
|
||||
"equipment": equipment_responses,
|
||||
"total_count": total_count,
|
||||
"page": page,
|
||||
"page_size": page_size
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting equipment list",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_equipment(self, tenant_id: UUID, equipment_id: UUID):
|
||||
"""Get a specific equipment item"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
from app.repositories.equipment_repository import EquipmentRepository
|
||||
equipment_repo = EquipmentRepository(session)
|
||||
|
||||
equipment = await equipment_repo.get_equipment_by_id(tenant_id, equipment_id)
|
||||
|
||||
if not equipment:
|
||||
return None
|
||||
|
||||
logger.info("Retrieved equipment",
|
||||
equipment_id=str(equipment_id), tenant_id=str(tenant_id))
|
||||
|
||||
return equipment
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting equipment",
|
||||
error=str(e), equipment_id=str(equipment_id), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def create_equipment(self, tenant_id: UUID, equipment_data):
|
||||
"""Create a new equipment item"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
from app.repositories.equipment_repository import EquipmentRepository
|
||||
equipment_repo = EquipmentRepository(session)
|
||||
|
||||
# Prepare equipment data
|
||||
equipment_dict = equipment_data.model_dump()
|
||||
equipment_dict["tenant_id"] = tenant_id
|
||||
|
||||
# Create equipment
|
||||
equipment = await equipment_repo.create_equipment(equipment_dict)
|
||||
|
||||
logger.info("Created equipment",
|
||||
equipment_id=str(equipment.id), tenant_id=str(tenant_id))
|
||||
|
||||
return equipment
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error creating equipment",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def update_equipment(self, tenant_id: UUID, equipment_id: UUID, equipment_update):
|
||||
"""Update an equipment item"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
from app.repositories.equipment_repository import EquipmentRepository
|
||||
equipment_repo = EquipmentRepository(session)
|
||||
|
||||
# First verify equipment belongs to tenant
|
||||
equipment = await equipment_repo.get_equipment_by_id(tenant_id, equipment_id)
|
||||
if not equipment:
|
||||
return None
|
||||
|
||||
# Update equipment
|
||||
updated_equipment = await equipment_repo.update_equipment(
|
||||
equipment_id,
|
||||
equipment_update.model_dump(exclude_none=True)
|
||||
)
|
||||
|
||||
logger.info("Updated equipment",
|
||||
equipment_id=str(equipment_id), tenant_id=str(tenant_id))
|
||||
|
||||
return updated_equipment
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error updating equipment",
|
||||
error=str(e), equipment_id=str(equipment_id), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def delete_equipment(self, tenant_id: UUID, equipment_id: UUID) -> bool:
|
||||
"""Delete (soft delete) an equipment item"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
from app.repositories.equipment_repository import EquipmentRepository
|
||||
equipment_repo = EquipmentRepository(session)
|
||||
|
||||
# First verify equipment belongs to tenant
|
||||
equipment = await equipment_repo.get_equipment_by_id(tenant_id, equipment_id)
|
||||
if not equipment:
|
||||
return False
|
||||
|
||||
# Soft delete equipment
|
||||
success = await equipment_repo.delete_equipment(equipment_id)
|
||||
|
||||
logger.info("Deleted equipment",
|
||||
equipment_id=str(equipment_id), tenant_id=str(tenant_id))
|
||||
|
||||
return success
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error deleting equipment",
|
||||
error=str(e), equipment_id=str(equipment_id), tenant_id=str(tenant_id))
|
||||
raise
|
||||
@@ -119,9 +119,46 @@ class EnhancedBakeryMLTrainer:
|
||||
logger.info("Multiple products detected for training",
|
||||
products_count=len(products))
|
||||
|
||||
# Event 1: Training Started (0%) - update with actual product count
|
||||
# Note: Initial event was already published by API endpoint, this updates with real count
|
||||
await publish_training_started(job_id, tenant_id, len(products))
|
||||
# Event 1: Training Started (0%) - update with actual product count AND time estimates
|
||||
# Calculate accurate time estimates now that we know the actual product count
|
||||
from app.utils.time_estimation import (
|
||||
calculate_initial_estimate,
|
||||
calculate_estimated_completion_time,
|
||||
get_historical_average_estimate
|
||||
)
|
||||
|
||||
# Try to get historical average for more accurate estimates
|
||||
try:
|
||||
historical_avg = await asyncio.get_event_loop().run_in_executor(
|
||||
None,
|
||||
get_historical_average_estimate,
|
||||
db_session,
|
||||
tenant_id
|
||||
)
|
||||
avg_time_per_product = historical_avg if historical_avg else 60.0
|
||||
logger.info("Using historical average for time estimation",
|
||||
avg_time_per_product=avg_time_per_product,
|
||||
has_historical_data=historical_avg is not None)
|
||||
except Exception as e:
|
||||
logger.warning("Could not get historical average, using default",
|
||||
error=str(e))
|
||||
avg_time_per_product = 60.0
|
||||
|
||||
estimated_duration_minutes = calculate_initial_estimate(
|
||||
total_products=len(products),
|
||||
avg_training_time_per_product=avg_time_per_product
|
||||
)
|
||||
estimated_completion_time = calculate_estimated_completion_time(estimated_duration_minutes)
|
||||
|
||||
# Note: Initial event was already published by API endpoint with estimated product count,
|
||||
# this updates with real count and recalculated time estimates based on actual data
|
||||
await publish_training_started(
|
||||
job_id=job_id,
|
||||
tenant_id=tenant_id,
|
||||
total_products=len(products),
|
||||
estimated_duration_minutes=estimated_duration_minutes,
|
||||
estimated_completion_time=estimated_completion_time.isoformat()
|
||||
)
|
||||
|
||||
# Create initial training log entry
|
||||
await repos['training_log'].update_log_progress(
|
||||
@@ -135,10 +172,25 @@ class EnhancedBakeryMLTrainer:
|
||||
)
|
||||
|
||||
# Event 2: Data Analysis (20%)
|
||||
# Recalculate time remaining based on elapsed time
|
||||
elapsed_seconds = (datetime.now(timezone.utc) - repos['training_log']._get_start_time(job_id) if hasattr(repos['training_log'], '_get_start_time') else 0) or 0
|
||||
|
||||
# Estimate remaining time: we've done ~20% of work (data analysis)
|
||||
# Remaining 80% includes training all products
|
||||
products_to_train = len(processed_data)
|
||||
estimated_remaining_seconds = int(products_to_train * avg_time_per_product)
|
||||
|
||||
# Recalculate estimated completion time
|
||||
estimated_completion_time_data_analysis = calculate_estimated_completion_time(
|
||||
estimated_remaining_seconds / 60
|
||||
)
|
||||
|
||||
await publish_data_analysis(
|
||||
job_id,
|
||||
tenant_id,
|
||||
f"Data analysis completed for {len(processed_data)} products"
|
||||
f"Data analysis completed for {len(processed_data)} products",
|
||||
estimated_time_remaining_seconds=estimated_remaining_seconds,
|
||||
estimated_completion_time=estimated_completion_time_data_analysis.isoformat()
|
||||
)
|
||||
|
||||
# Train models for each processed product with progress aggregation
|
||||
|
||||
Reference in New Issue
Block a user