Improve the demo feature of the project
This commit is contained in:
@@ -27,8 +27,7 @@ COPY --from=shared /shared /app/shared
|
||||
# Copy application code
|
||||
COPY services/production/ .
|
||||
|
||||
# Copy scripts directory
|
||||
COPY scripts/ /app/scripts/
|
||||
|
||||
|
||||
# Add shared libraries to Python path
|
||||
ENV PYTHONPATH="/app:/app/shared:${PYTHONPATH:-}"
|
||||
|
||||
462
services/production/app/api/internal_demo.py
Normal file
462
services/production/app/api/internal_demo.py
Normal file
@@ -0,0 +1,462 @@
|
||||
"""
|
||||
Internal Demo Cloning API for Production Service
|
||||
Service-to-service endpoint for cloning production data
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Header
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select
|
||||
import structlog
|
||||
import uuid
|
||||
from datetime import datetime, timezone, timedelta
|
||||
from typing import Optional
|
||||
import os
|
||||
|
||||
from app.core.database import get_db
|
||||
from app.models.production import (
|
||||
ProductionBatch, ProductionSchedule, ProductionCapacity,
|
||||
QualityCheckTemplate, QualityCheck, Equipment,
|
||||
ProductionStatus, ProductionPriority, ProcessStage,
|
||||
EquipmentStatus, EquipmentType
|
||||
)
|
||||
|
||||
logger = structlog.get_logger()
|
||||
router = APIRouter(prefix="/internal/demo", tags=["internal"])
|
||||
|
||||
# Internal API key for service-to-service auth
|
||||
INTERNAL_API_KEY = os.getenv("INTERNAL_API_KEY", "dev-internal-key-change-in-production")
|
||||
|
||||
# Base demo tenant IDs
|
||||
DEMO_TENANT_SAN_PABLO = "a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6"
|
||||
DEMO_TENANT_LA_ESPIGA = "b2c3d4e5-f6a7-48b9-c0d1-e2f3a4b5c6d7"
|
||||
|
||||
|
||||
def verify_internal_api_key(x_internal_api_key: Optional[str] = Header(None)):
|
||||
"""Verify internal API key for service-to-service communication"""
|
||||
if x_internal_api_key != INTERNAL_API_KEY:
|
||||
logger.warning("Unauthorized internal API access attempted")
|
||||
raise HTTPException(status_code=403, detail="Invalid internal API key")
|
||||
return True
|
||||
|
||||
|
||||
@router.post("/clone")
|
||||
async def clone_demo_data(
|
||||
base_tenant_id: str,
|
||||
virtual_tenant_id: str,
|
||||
demo_account_type: str,
|
||||
session_id: Optional[str] = None,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
_: bool = Depends(verify_internal_api_key)
|
||||
):
|
||||
"""
|
||||
Clone production service data for a virtual demo tenant
|
||||
|
||||
Clones:
|
||||
- Production batches (historical production runs)
|
||||
- Production schedules (daily planning)
|
||||
- Production capacity records
|
||||
- Quality check templates
|
||||
- Quality checks (inspection records)
|
||||
- Equipment (machines and tools)
|
||||
|
||||
Args:
|
||||
base_tenant_id: Template tenant UUID to clone from
|
||||
virtual_tenant_id: Target virtual tenant UUID
|
||||
demo_account_type: Type of demo account
|
||||
session_id: Originating session ID for tracing
|
||||
|
||||
Returns:
|
||||
Cloning status and record counts
|
||||
"""
|
||||
start_time = datetime.now(timezone.utc)
|
||||
|
||||
logger.info(
|
||||
"Starting production data cloning",
|
||||
base_tenant_id=base_tenant_id,
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
demo_account_type=demo_account_type,
|
||||
session_id=session_id
|
||||
)
|
||||
|
||||
try:
|
||||
# Validate UUIDs
|
||||
base_uuid = uuid.UUID(base_tenant_id)
|
||||
virtual_uuid = uuid.UUID(virtual_tenant_id)
|
||||
|
||||
# Track cloning statistics
|
||||
stats = {
|
||||
"production_batches": 0,
|
||||
"production_schedules": 0,
|
||||
"production_capacity": 0,
|
||||
"quality_check_templates": 0,
|
||||
"quality_checks": 0,
|
||||
"equipment": 0
|
||||
}
|
||||
|
||||
# ID mappings
|
||||
batch_id_map = {}
|
||||
template_id_map = {}
|
||||
equipment_id_map = {}
|
||||
|
||||
# Clone Equipment first (no dependencies)
|
||||
result = await db.execute(
|
||||
select(Equipment).where(Equipment.tenant_id == base_uuid)
|
||||
)
|
||||
base_equipment = result.scalars().all()
|
||||
|
||||
logger.info(
|
||||
"Found equipment to clone",
|
||||
count=len(base_equipment),
|
||||
base_tenant=str(base_uuid)
|
||||
)
|
||||
|
||||
for equipment in base_equipment:
|
||||
new_equipment_id = uuid.uuid4()
|
||||
equipment_id_map[equipment.id] = new_equipment_id
|
||||
|
||||
new_equipment = Equipment(
|
||||
id=new_equipment_id,
|
||||
tenant_id=virtual_uuid,
|
||||
name=equipment.name,
|
||||
type=equipment.type,
|
||||
model=equipment.model,
|
||||
serial_number=equipment.serial_number,
|
||||
location=equipment.location,
|
||||
status=equipment.status,
|
||||
install_date=equipment.install_date,
|
||||
last_maintenance_date=equipment.last_maintenance_date,
|
||||
next_maintenance_date=equipment.next_maintenance_date,
|
||||
maintenance_interval_days=equipment.maintenance_interval_days,
|
||||
efficiency_percentage=equipment.efficiency_percentage,
|
||||
uptime_percentage=equipment.uptime_percentage,
|
||||
energy_usage_kwh=equipment.energy_usage_kwh,
|
||||
power_kw=equipment.power_kw,
|
||||
capacity=equipment.capacity,
|
||||
weight_kg=equipment.weight_kg,
|
||||
current_temperature=equipment.current_temperature,
|
||||
target_temperature=equipment.target_temperature,
|
||||
is_active=equipment.is_active,
|
||||
notes=equipment.notes,
|
||||
created_at=datetime.now(timezone.utc),
|
||||
updated_at=datetime.now(timezone.utc)
|
||||
)
|
||||
db.add(new_equipment)
|
||||
stats["equipment"] += 1
|
||||
|
||||
# Flush to get equipment IDs
|
||||
await db.flush()
|
||||
|
||||
# Clone Quality Check Templates
|
||||
result = await db.execute(
|
||||
select(QualityCheckTemplate).where(QualityCheckTemplate.tenant_id == base_uuid)
|
||||
)
|
||||
base_templates = result.scalars().all()
|
||||
|
||||
logger.info(
|
||||
"Found quality check templates to clone",
|
||||
count=len(base_templates),
|
||||
base_tenant=str(base_uuid)
|
||||
)
|
||||
|
||||
for template in base_templates:
|
||||
new_template_id = uuid.uuid4()
|
||||
template_id_map[template.id] = new_template_id
|
||||
|
||||
new_template = QualityCheckTemplate(
|
||||
id=new_template_id,
|
||||
tenant_id=virtual_uuid,
|
||||
name=template.name,
|
||||
template_code=template.template_code,
|
||||
check_type=template.check_type,
|
||||
category=template.category,
|
||||
description=template.description,
|
||||
instructions=template.instructions,
|
||||
parameters=template.parameters,
|
||||
thresholds=template.thresholds,
|
||||
scoring_criteria=template.scoring_criteria,
|
||||
is_active=template.is_active,
|
||||
is_required=template.is_required,
|
||||
is_critical=template.is_critical,
|
||||
weight=template.weight,
|
||||
min_value=template.min_value,
|
||||
max_value=template.max_value,
|
||||
target_value=template.target_value,
|
||||
unit=template.unit,
|
||||
tolerance_percentage=template.tolerance_percentage,
|
||||
applicable_stages=template.applicable_stages,
|
||||
created_by=template.created_by,
|
||||
created_at=datetime.now(timezone.utc),
|
||||
updated_at=datetime.now(timezone.utc)
|
||||
)
|
||||
db.add(new_template)
|
||||
stats["quality_check_templates"] += 1
|
||||
|
||||
# Flush to get template IDs
|
||||
await db.flush()
|
||||
|
||||
# Clone Production Batches
|
||||
result = await db.execute(
|
||||
select(ProductionBatch).where(ProductionBatch.tenant_id == base_uuid)
|
||||
)
|
||||
base_batches = result.scalars().all()
|
||||
|
||||
logger.info(
|
||||
"Found production batches to clone",
|
||||
count=len(base_batches),
|
||||
base_tenant=str(base_uuid)
|
||||
)
|
||||
|
||||
# Calculate date offset to make production recent
|
||||
if base_batches:
|
||||
max_date = max(batch.planned_start_time for batch in base_batches if batch.planned_start_time)
|
||||
today = datetime.now(timezone.utc)
|
||||
date_offset = today - max_date
|
||||
else:
|
||||
date_offset = timedelta(days=0)
|
||||
|
||||
for batch in base_batches:
|
||||
new_batch_id = uuid.uuid4()
|
||||
batch_id_map[batch.id] = new_batch_id
|
||||
|
||||
new_batch = ProductionBatch(
|
||||
id=new_batch_id,
|
||||
tenant_id=virtual_uuid,
|
||||
batch_number=f"BATCH-{uuid.uuid4().hex[:8].upper()}", # New batch number
|
||||
product_id=batch.product_id, # Keep product reference
|
||||
product_name=batch.product_name,
|
||||
recipe_id=batch.recipe_id, # Keep recipe reference
|
||||
planned_start_time=batch.planned_start_time + date_offset if batch.planned_start_time else None,
|
||||
planned_end_time=batch.planned_end_time + date_offset if batch.planned_end_time else None,
|
||||
planned_quantity=batch.planned_quantity,
|
||||
planned_duration_minutes=batch.planned_duration_minutes,
|
||||
actual_start_time=batch.actual_start_time + date_offset if batch.actual_start_time else None,
|
||||
actual_end_time=batch.actual_end_time + date_offset if batch.actual_end_time else None,
|
||||
actual_quantity=batch.actual_quantity,
|
||||
actual_duration_minutes=batch.actual_duration_minutes,
|
||||
status=batch.status,
|
||||
priority=batch.priority,
|
||||
current_process_stage=batch.current_process_stage,
|
||||
process_stage_history=batch.process_stage_history,
|
||||
pending_quality_checks=batch.pending_quality_checks,
|
||||
completed_quality_checks=batch.completed_quality_checks,
|
||||
estimated_cost=batch.estimated_cost,
|
||||
actual_cost=batch.actual_cost,
|
||||
labor_cost=batch.labor_cost,
|
||||
material_cost=batch.material_cost,
|
||||
overhead_cost=batch.overhead_cost,
|
||||
yield_percentage=batch.yield_percentage,
|
||||
quality_score=batch.quality_score,
|
||||
waste_quantity=batch.waste_quantity,
|
||||
defect_quantity=batch.defect_quantity,
|
||||
equipment_used=batch.equipment_used,
|
||||
staff_assigned=batch.staff_assigned,
|
||||
station_id=batch.station_id,
|
||||
order_id=batch.order_id,
|
||||
forecast_id=batch.forecast_id,
|
||||
is_rush_order=batch.is_rush_order,
|
||||
is_special_recipe=batch.is_special_recipe,
|
||||
production_notes=batch.production_notes,
|
||||
quality_notes=batch.quality_notes,
|
||||
delay_reason=batch.delay_reason,
|
||||
cancellation_reason=batch.cancellation_reason,
|
||||
created_at=datetime.now(timezone.utc),
|
||||
updated_at=datetime.now(timezone.utc),
|
||||
completed_at=batch.completed_at + date_offset if batch.completed_at else None
|
||||
)
|
||||
db.add(new_batch)
|
||||
stats["production_batches"] += 1
|
||||
|
||||
# Flush to get batch IDs
|
||||
await db.flush()
|
||||
|
||||
# Clone Quality Checks
|
||||
result = await db.execute(
|
||||
select(QualityCheck).where(QualityCheck.tenant_id == base_uuid)
|
||||
)
|
||||
base_checks = result.scalars().all()
|
||||
|
||||
logger.info(
|
||||
"Found quality checks to clone",
|
||||
count=len(base_checks),
|
||||
base_tenant=str(base_uuid)
|
||||
)
|
||||
|
||||
for check in base_checks:
|
||||
new_batch_id = batch_id_map.get(check.batch_id, check.batch_id)
|
||||
new_template_id = template_id_map.get(check.template_id, check.template_id) if check.template_id else None
|
||||
|
||||
new_check = QualityCheck(
|
||||
id=uuid.uuid4(),
|
||||
tenant_id=virtual_uuid,
|
||||
batch_id=new_batch_id,
|
||||
template_id=new_template_id,
|
||||
check_type=check.check_type,
|
||||
process_stage=check.process_stage,
|
||||
check_time=check.check_time + date_offset,
|
||||
checker_id=check.checker_id,
|
||||
quality_score=check.quality_score,
|
||||
pass_fail=check.pass_fail,
|
||||
defect_count=check.defect_count,
|
||||
defect_types=check.defect_types,
|
||||
measured_weight=check.measured_weight,
|
||||
measured_temperature=check.measured_temperature,
|
||||
measured_moisture=check.measured_moisture,
|
||||
measured_dimensions=check.measured_dimensions,
|
||||
stage_specific_data=check.stage_specific_data,
|
||||
target_weight=check.target_weight,
|
||||
target_temperature=check.target_temperature,
|
||||
target_moisture=check.target_moisture,
|
||||
tolerance_percentage=check.tolerance_percentage,
|
||||
within_tolerance=check.within_tolerance,
|
||||
corrective_action_needed=check.corrective_action_needed,
|
||||
corrective_actions=check.corrective_actions,
|
||||
template_results=check.template_results,
|
||||
criteria_scores=check.criteria_scores,
|
||||
check_notes=check.check_notes,
|
||||
photos_urls=check.photos_urls,
|
||||
certificate_url=check.certificate_url,
|
||||
created_at=datetime.now(timezone.utc),
|
||||
updated_at=datetime.now(timezone.utc)
|
||||
)
|
||||
db.add(new_check)
|
||||
stats["quality_checks"] += 1
|
||||
|
||||
# Clone Production Schedules
|
||||
result = await db.execute(
|
||||
select(ProductionSchedule).where(ProductionSchedule.tenant_id == base_uuid)
|
||||
)
|
||||
base_schedules = result.scalars().all()
|
||||
|
||||
logger.info(
|
||||
"Found production schedules to clone",
|
||||
count=len(base_schedules),
|
||||
base_tenant=str(base_uuid)
|
||||
)
|
||||
|
||||
for schedule in base_schedules:
|
||||
new_schedule = ProductionSchedule(
|
||||
id=uuid.uuid4(),
|
||||
tenant_id=virtual_uuid,
|
||||
schedule_date=schedule.schedule_date + date_offset,
|
||||
shift_start=schedule.shift_start + date_offset,
|
||||
shift_end=schedule.shift_end + date_offset,
|
||||
total_capacity_hours=schedule.total_capacity_hours,
|
||||
planned_capacity_hours=schedule.planned_capacity_hours,
|
||||
actual_capacity_hours=schedule.actual_capacity_hours,
|
||||
overtime_hours=schedule.overtime_hours,
|
||||
staff_count=schedule.staff_count,
|
||||
equipment_capacity=schedule.equipment_capacity,
|
||||
station_assignments=schedule.station_assignments,
|
||||
total_batches_planned=schedule.total_batches_planned,
|
||||
total_batches_completed=schedule.total_batches_completed,
|
||||
total_quantity_planned=schedule.total_quantity_planned,
|
||||
total_quantity_produced=schedule.total_quantity_produced,
|
||||
is_finalized=schedule.is_finalized,
|
||||
is_active=schedule.is_active,
|
||||
efficiency_percentage=schedule.efficiency_percentage,
|
||||
utilization_percentage=schedule.utilization_percentage,
|
||||
on_time_completion_rate=schedule.on_time_completion_rate,
|
||||
schedule_notes=schedule.schedule_notes,
|
||||
schedule_adjustments=schedule.schedule_adjustments,
|
||||
created_at=datetime.now(timezone.utc),
|
||||
updated_at=datetime.now(timezone.utc),
|
||||
finalized_at=schedule.finalized_at + date_offset if schedule.finalized_at else None
|
||||
)
|
||||
db.add(new_schedule)
|
||||
stats["production_schedules"] += 1
|
||||
|
||||
# Clone Production Capacity
|
||||
result = await db.execute(
|
||||
select(ProductionCapacity).where(ProductionCapacity.tenant_id == base_uuid)
|
||||
)
|
||||
base_capacity = result.scalars().all()
|
||||
|
||||
for capacity in base_capacity:
|
||||
new_capacity = ProductionCapacity(
|
||||
id=uuid.uuid4(),
|
||||
tenant_id=virtual_uuid,
|
||||
resource_type=capacity.resource_type,
|
||||
resource_id=capacity.resource_id,
|
||||
resource_name=capacity.resource_name,
|
||||
date=capacity.date + date_offset,
|
||||
start_time=capacity.start_time + date_offset,
|
||||
end_time=capacity.end_time + date_offset,
|
||||
total_capacity_units=capacity.total_capacity_units,
|
||||
allocated_capacity_units=capacity.allocated_capacity_units,
|
||||
remaining_capacity_units=capacity.remaining_capacity_units,
|
||||
is_available=capacity.is_available,
|
||||
is_maintenance=capacity.is_maintenance,
|
||||
is_reserved=capacity.is_reserved,
|
||||
equipment_type=capacity.equipment_type,
|
||||
max_batch_size=capacity.max_batch_size,
|
||||
min_batch_size=capacity.min_batch_size,
|
||||
setup_time_minutes=capacity.setup_time_minutes,
|
||||
cleanup_time_minutes=capacity.cleanup_time_minutes,
|
||||
efficiency_rating=capacity.efficiency_rating,
|
||||
maintenance_status=capacity.maintenance_status,
|
||||
last_maintenance_date=capacity.last_maintenance_date + date_offset if capacity.last_maintenance_date else None,
|
||||
notes=capacity.notes,
|
||||
restrictions=capacity.restrictions,
|
||||
created_at=datetime.now(timezone.utc),
|
||||
updated_at=datetime.now(timezone.utc)
|
||||
)
|
||||
db.add(new_capacity)
|
||||
stats["production_capacity"] += 1
|
||||
|
||||
# Commit all changes
|
||||
await db.commit()
|
||||
|
||||
total_records = sum(stats.values())
|
||||
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
||||
|
||||
logger.info(
|
||||
"Production data cloning completed",
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
total_records=total_records,
|
||||
stats=stats,
|
||||
duration_ms=duration_ms
|
||||
)
|
||||
|
||||
return {
|
||||
"service": "production",
|
||||
"status": "completed",
|
||||
"records_cloned": total_records,
|
||||
"duration_ms": duration_ms,
|
||||
"details": stats
|
||||
}
|
||||
|
||||
except ValueError as e:
|
||||
logger.error("Invalid UUID format", error=str(e))
|
||||
raise HTTPException(status_code=400, detail=f"Invalid UUID: {str(e)}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to clone production data",
|
||||
error=str(e),
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
# Rollback on error
|
||||
await db.rollback()
|
||||
|
||||
return {
|
||||
"service": "production",
|
||||
"status": "failed",
|
||||
"records_cloned": 0,
|
||||
"duration_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000),
|
||||
"error": str(e)
|
||||
}
|
||||
|
||||
|
||||
@router.get("/clone/health")
|
||||
async def clone_health_check(_: bool = Depends(verify_internal_api_key)):
|
||||
"""
|
||||
Health check for internal cloning endpoint
|
||||
Used by orchestrator to verify service availability
|
||||
"""
|
||||
return {
|
||||
"service": "production",
|
||||
"clone_endpoint": "available",
|
||||
"version": "2.0.0"
|
||||
}
|
||||
490
services/production/app/api/quality_templates.py
Normal file
490
services/production/app/api/quality_templates.py
Normal file
@@ -0,0 +1,490 @@
|
||||
# services/production/app/api/quality_templates.py
|
||||
"""
|
||||
Quality Check Templates API - CRUD operations on quality check templates
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Path, Query, status
|
||||
from typing import Optional
|
||||
from uuid import UUID
|
||||
import structlog
|
||||
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from shared.auth.access_control import require_user_role
|
||||
from shared.routing import RouteBuilder, RouteCategory
|
||||
from app.core.database import get_db
|
||||
from app.repositories.quality_template_repository import QualityTemplateRepository
|
||||
from app.models.production import ProcessStage, QualityCheckTemplate
|
||||
from app.schemas.quality_templates import (
|
||||
QualityCheckTemplateCreate,
|
||||
QualityCheckTemplateUpdate,
|
||||
QualityCheckTemplateResponse,
|
||||
QualityCheckTemplateList,
|
||||
QualityCheckType
|
||||
)
|
||||
|
||||
logger = structlog.get_logger()
|
||||
route_builder = RouteBuilder('production')
|
||||
router = APIRouter(tags=["quality-templates"])
|
||||
|
||||
|
||||
# ===== Quality Template CRUD Endpoints =====
|
||||
|
||||
@router.get(
|
||||
route_builder.build_base_route("quality-templates"),
|
||||
response_model=QualityCheckTemplateList
|
||||
)
|
||||
async def list_quality_templates(
|
||||
tenant_id: UUID = Path(...),
|
||||
stage: Optional[ProcessStage] = Query(None, description="Filter by process stage"),
|
||||
check_type: Optional[QualityCheckType] = Query(None, description="Filter by check type"),
|
||||
is_active: Optional[bool] = Query(True, description="Filter by active status"),
|
||||
skip: int = Query(0, ge=0, description="Number of templates to skip"),
|
||||
limit: int = Query(100, ge=1, le=1000, description="Number of templates to return"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
List quality check templates with filtering and pagination
|
||||
|
||||
Filters:
|
||||
- stage: Filter by applicable process stage
|
||||
- check_type: Filter by type of quality check
|
||||
- is_active: Filter by active status (default: True)
|
||||
"""
|
||||
try:
|
||||
repo = QualityTemplateRepository(db)
|
||||
|
||||
templates, total = await repo.get_templates_by_tenant(
|
||||
tenant_id=str(tenant_id),
|
||||
stage=stage,
|
||||
check_type=check_type.value if check_type else None,
|
||||
is_active=is_active,
|
||||
skip=skip,
|
||||
limit=limit
|
||||
)
|
||||
|
||||
logger.info("Retrieved quality templates",
|
||||
tenant_id=str(tenant_id),
|
||||
total=total,
|
||||
filters={"stage": stage, "check_type": check_type, "is_active": is_active})
|
||||
|
||||
return QualityCheckTemplateList(
|
||||
templates=[QualityCheckTemplateResponse.from_orm(t) for t in templates],
|
||||
total=total,
|
||||
skip=skip,
|
||||
limit=limit
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error listing quality templates",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to retrieve quality templates"
|
||||
)
|
||||
|
||||
|
||||
@router.post(
|
||||
route_builder.build_base_route("quality-templates"),
|
||||
response_model=QualityCheckTemplateResponse,
|
||||
status_code=status.HTTP_201_CREATED
|
||||
)
|
||||
@require_user_role(['admin', 'owner', 'member'])
|
||||
async def create_quality_template(
|
||||
template_data: QualityCheckTemplateCreate,
|
||||
tenant_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db = Depends(get_db)
|
||||
):
|
||||
"""Create a new quality check template"""
|
||||
try:
|
||||
repo = QualityTemplateRepository(db)
|
||||
|
||||
# Check if template code already exists (if provided)
|
||||
if template_data.template_code:
|
||||
code_exists = await repo.check_template_code_exists(
|
||||
tenant_id=str(tenant_id),
|
||||
template_code=template_data.template_code
|
||||
)
|
||||
if code_exists:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=f"Template code '{template_data.template_code}' already exists"
|
||||
)
|
||||
|
||||
# Create template
|
||||
template_dict = template_data.dict()
|
||||
template_dict['tenant_id'] = str(tenant_id)
|
||||
template_dict['created_by'] = UUID(current_user["sub"])
|
||||
|
||||
template = QualityCheckTemplate(**template_dict)
|
||||
db.add(template)
|
||||
await db.commit()
|
||||
await db.refresh(template)
|
||||
|
||||
logger.info("Created quality template",
|
||||
template_id=str(template.id),
|
||||
template_name=template.name,
|
||||
tenant_id=str(tenant_id))
|
||||
|
||||
return QualityCheckTemplateResponse.from_orm(template)
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
await db.rollback()
|
||||
logger.error("Error creating quality template",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to create quality template"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_resource_detail_route("quality-templates", "template_id"),
|
||||
response_model=QualityCheckTemplateResponse
|
||||
)
|
||||
async def get_quality_template(
|
||||
tenant_id: UUID = Path(...),
|
||||
template_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db = Depends(get_db)
|
||||
):
|
||||
"""Get a specific quality check template"""
|
||||
try:
|
||||
repo = QualityTemplateRepository(db)
|
||||
|
||||
template = await repo.get_by_tenant_and_id(
|
||||
tenant_id=str(tenant_id),
|
||||
template_id=template_id
|
||||
)
|
||||
|
||||
if not template:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Quality template not found"
|
||||
)
|
||||
|
||||
return QualityCheckTemplateResponse.from_orm(template)
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Error getting quality template",
|
||||
error=str(e),
|
||||
template_id=str(template_id),
|
||||
tenant_id=str(tenant_id))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to retrieve quality template"
|
||||
)
|
||||
|
||||
|
||||
@router.put(
|
||||
route_builder.build_resource_detail_route("quality-templates", "template_id"),
|
||||
response_model=QualityCheckTemplateResponse
|
||||
)
|
||||
@require_user_role(['admin', 'owner', 'member'])
|
||||
async def update_quality_template(
|
||||
template_data: QualityCheckTemplateUpdate,
|
||||
tenant_id: UUID = Path(...),
|
||||
template_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db = Depends(get_db)
|
||||
):
|
||||
"""Update a quality check template"""
|
||||
try:
|
||||
repo = QualityTemplateRepository(db)
|
||||
|
||||
# Get existing template
|
||||
template = await repo.get_by_tenant_and_id(
|
||||
tenant_id=str(tenant_id),
|
||||
template_id=template_id
|
||||
)
|
||||
|
||||
if not template:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Quality template not found"
|
||||
)
|
||||
|
||||
# Check if template code already exists (if being updated)
|
||||
if template_data.template_code and template_data.template_code != template.template_code:
|
||||
code_exists = await repo.check_template_code_exists(
|
||||
tenant_id=str(tenant_id),
|
||||
template_code=template_data.template_code,
|
||||
exclude_id=template_id
|
||||
)
|
||||
if code_exists:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=f"Template code '{template_data.template_code}' already exists"
|
||||
)
|
||||
|
||||
# Update template fields
|
||||
update_data = template_data.dict(exclude_unset=True)
|
||||
for field, value in update_data.items():
|
||||
setattr(template, field, value)
|
||||
|
||||
await db.commit()
|
||||
await db.refresh(template)
|
||||
|
||||
logger.info("Updated quality template",
|
||||
template_id=str(template_id),
|
||||
tenant_id=str(tenant_id))
|
||||
|
||||
return QualityCheckTemplateResponse.from_orm(template)
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
await db.rollback()
|
||||
logger.error("Error updating quality template",
|
||||
error=str(e),
|
||||
template_id=str(template_id),
|
||||
tenant_id=str(tenant_id))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to update quality template"
|
||||
)
|
||||
|
||||
|
||||
@router.delete(
|
||||
route_builder.build_resource_detail_route("quality-templates", "template_id"),
|
||||
status_code=status.HTTP_204_NO_CONTENT
|
||||
)
|
||||
@require_user_role(['admin', 'owner'])
|
||||
async def delete_quality_template(
|
||||
tenant_id: UUID = Path(...),
|
||||
template_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Delete a quality check template (soft delete by setting is_active to False)
|
||||
|
||||
Note: For safety, this performs a soft delete. Hard deletes would require
|
||||
checking for dependencies in recipes and production batches.
|
||||
"""
|
||||
try:
|
||||
repo = QualityTemplateRepository(db)
|
||||
|
||||
# Get existing template
|
||||
template = await repo.get_by_tenant_and_id(
|
||||
tenant_id=str(tenant_id),
|
||||
template_id=template_id
|
||||
)
|
||||
|
||||
if not template:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Quality template not found"
|
||||
)
|
||||
|
||||
# Soft delete by marking as inactive
|
||||
template.is_active = False
|
||||
await db.commit()
|
||||
|
||||
logger.info("Deleted quality template (soft delete)",
|
||||
template_id=str(template_id),
|
||||
tenant_id=str(tenant_id))
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
await db.rollback()
|
||||
logger.error("Error deleting quality template",
|
||||
error=str(e),
|
||||
template_id=str(template_id),
|
||||
tenant_id=str(tenant_id))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to delete quality template"
|
||||
)
|
||||
|
||||
|
||||
# ===== Additional Quality Template Operations =====
|
||||
|
||||
@router.get(
|
||||
route_builder.build_custom_route(
|
||||
RouteCategory.BASE,
|
||||
["quality-templates", "stages", "{stage}"]
|
||||
),
|
||||
response_model=QualityCheckTemplateList
|
||||
)
|
||||
async def get_templates_for_stage(
|
||||
tenant_id: UUID = Path(...),
|
||||
stage: ProcessStage = Path(...),
|
||||
is_active: bool = Query(True, description="Filter by active status"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db = Depends(get_db)
|
||||
):
|
||||
"""Get all quality templates applicable to a specific process stage"""
|
||||
try:
|
||||
repo = QualityTemplateRepository(db)
|
||||
|
||||
templates = await repo.get_templates_for_stage(
|
||||
tenant_id=str(tenant_id),
|
||||
stage=stage,
|
||||
is_active=is_active
|
||||
)
|
||||
|
||||
logger.info("Retrieved templates for stage",
|
||||
tenant_id=str(tenant_id),
|
||||
stage=stage,
|
||||
count=len(templates))
|
||||
|
||||
return QualityCheckTemplateList(
|
||||
templates=[QualityCheckTemplateResponse.from_orm(t) for t in templates],
|
||||
total=len(templates),
|
||||
skip=0,
|
||||
limit=len(templates)
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting templates for stage",
|
||||
error=str(e),
|
||||
stage=stage,
|
||||
tenant_id=str(tenant_id))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to retrieve templates for stage"
|
||||
)
|
||||
|
||||
|
||||
@router.post(
|
||||
route_builder.build_resource_action_route("quality-templates", "template_id", "duplicate"),
|
||||
response_model=QualityCheckTemplateResponse,
|
||||
status_code=status.HTTP_201_CREATED
|
||||
)
|
||||
@require_user_role(['admin', 'owner', 'member'])
|
||||
async def duplicate_quality_template(
|
||||
tenant_id: UUID = Path(...),
|
||||
template_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db = Depends(get_db)
|
||||
):
|
||||
"""Duplicate an existing quality check template"""
|
||||
try:
|
||||
repo = QualityTemplateRepository(db)
|
||||
|
||||
# Get existing template
|
||||
original = await repo.get_by_tenant_and_id(
|
||||
tenant_id=str(tenant_id),
|
||||
template_id=template_id
|
||||
)
|
||||
|
||||
if not original:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Quality template not found"
|
||||
)
|
||||
|
||||
# Create duplicate
|
||||
duplicate_data = {
|
||||
'tenant_id': original.tenant_id,
|
||||
'name': f"{original.name} (Copy)",
|
||||
'template_code': f"{original.template_code}_copy" if original.template_code else None,
|
||||
'check_type': original.check_type,
|
||||
'category': original.category,
|
||||
'description': original.description,
|
||||
'instructions': original.instructions,
|
||||
'parameters': original.parameters,
|
||||
'thresholds': original.thresholds,
|
||||
'scoring_criteria': original.scoring_criteria,
|
||||
'is_active': original.is_active,
|
||||
'is_required': original.is_required,
|
||||
'is_critical': original.is_critical,
|
||||
'weight': original.weight,
|
||||
'min_value': original.min_value,
|
||||
'max_value': original.max_value,
|
||||
'target_value': original.target_value,
|
||||
'unit': original.unit,
|
||||
'tolerance_percentage': original.tolerance_percentage,
|
||||
'applicable_stages': original.applicable_stages,
|
||||
'created_by': UUID(current_user["sub"])
|
||||
}
|
||||
|
||||
duplicate = QualityCheckTemplate(**duplicate_data)
|
||||
db.add(duplicate)
|
||||
await db.commit()
|
||||
await db.refresh(duplicate)
|
||||
|
||||
logger.info("Duplicated quality template",
|
||||
original_id=str(template_id),
|
||||
duplicate_id=str(duplicate.id),
|
||||
tenant_id=str(tenant_id))
|
||||
|
||||
return QualityCheckTemplateResponse.from_orm(duplicate)
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
await db.rollback()
|
||||
logger.error("Error duplicating quality template",
|
||||
error=str(e),
|
||||
template_id=str(template_id),
|
||||
tenant_id=str(tenant_id))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to duplicate quality template"
|
||||
)
|
||||
|
||||
|
||||
@router.post(
|
||||
route_builder.build_operations_route("quality-templates/validate"),
|
||||
response_model=dict
|
||||
)
|
||||
@require_user_role(['admin', 'owner', 'member'])
|
||||
async def validate_quality_template(
|
||||
template_data: dict,
|
||||
tenant_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
):
|
||||
"""
|
||||
Validate quality template configuration without creating it
|
||||
|
||||
Returns validation result with any errors found
|
||||
"""
|
||||
try:
|
||||
errors = []
|
||||
|
||||
# Basic validation
|
||||
if not template_data.get('name'):
|
||||
errors.append("Template name is required")
|
||||
|
||||
if not template_data.get('check_type'):
|
||||
errors.append("Check type is required")
|
||||
|
||||
# Validate measurement fields
|
||||
check_type = template_data.get('check_type')
|
||||
if check_type in ['measurement', 'temperature', 'weight']:
|
||||
if template_data.get('min_value') is not None and template_data.get('max_value') is not None:
|
||||
if template_data['min_value'] >= template_data['max_value']:
|
||||
errors.append("Minimum value must be less than maximum value")
|
||||
|
||||
# Validate weight
|
||||
weight = template_data.get('weight', 1.0)
|
||||
if weight < 0 or weight > 10:
|
||||
errors.append("Weight must be between 0 and 10")
|
||||
|
||||
is_valid = len(errors) == 0
|
||||
|
||||
logger.info("Validated quality template",
|
||||
tenant_id=str(tenant_id),
|
||||
valid=is_valid,
|
||||
error_count=len(errors))
|
||||
|
||||
return {
|
||||
"valid": is_valid,
|
||||
"errors": errors
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error validating quality template",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
return {
|
||||
"valid": False,
|
||||
"errors": [f"Validation error: {str(e)}"]
|
||||
}
|
||||
@@ -21,7 +21,9 @@ from app.api import (
|
||||
production_schedules,
|
||||
production_operations,
|
||||
production_dashboard,
|
||||
analytics
|
||||
analytics,
|
||||
quality_templates,
|
||||
internal_demo
|
||||
)
|
||||
|
||||
|
||||
@@ -162,11 +164,14 @@ service.setup_standard_endpoints()
|
||||
service.setup_custom_middleware()
|
||||
|
||||
# Include standardized routers
|
||||
# NOTE: Register more specific routes before generic parameterized routes
|
||||
service.add_router(quality_templates.router) # Register first to avoid route conflicts
|
||||
service.add_router(production_batches.router)
|
||||
service.add_router(production_schedules.router)
|
||||
service.add_router(production_operations.router)
|
||||
service.add_router(production_dashboard.router)
|
||||
service.add_router(analytics.router)
|
||||
service.add_router(internal_demo.router)
|
||||
|
||||
|
||||
@app.post("/test/production-scheduler")
|
||||
|
||||
@@ -1,246 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Test script for transformation integration between production and inventory services.
|
||||
This script verifies that the transformation API is properly integrated.
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import sys
|
||||
import os
|
||||
from uuid import uuid4, UUID
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
# Add the service directory to path
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__)))
|
||||
|
||||
from app.services.production_service import ProductionService
|
||||
from shared.clients.inventory_client import InventoryServiceClient
|
||||
from shared.config.base import BaseServiceSettings
|
||||
|
||||
class MockConfig(BaseServiceSettings):
|
||||
"""Mock configuration for testing"""
|
||||
service_name: str = "production"
|
||||
debug: bool = True
|
||||
gateway_base_url: str = "http://localhost:8000"
|
||||
service_auth_token: str = "test-token"
|
||||
|
||||
async def test_inventory_client_transformation():
|
||||
"""Test the inventory client transformation methods"""
|
||||
print("🧪 Testing inventory client transformation methods...")
|
||||
|
||||
config = MockConfig()
|
||||
inventory_client = InventoryServiceClient(config)
|
||||
|
||||
tenant_id = "test-tenant-123"
|
||||
|
||||
# Test data
|
||||
test_transformation_data = {
|
||||
"source_ingredient_id": str(uuid4()),
|
||||
"target_ingredient_id": str(uuid4()),
|
||||
"source_stage": "PAR_BAKED",
|
||||
"target_stage": "FULLY_BAKED",
|
||||
"source_quantity": 10.0,
|
||||
"target_quantity": 10.0,
|
||||
"expiration_calculation_method": "days_from_transformation",
|
||||
"expiration_days_offset": 1,
|
||||
"process_notes": "Test transformation from production service",
|
||||
"target_batch_number": "TEST-BATCH-001"
|
||||
}
|
||||
|
||||
try:
|
||||
# Test 1: Create transformation (this will fail if inventory service is not running)
|
||||
print(" Creating transformation...")
|
||||
transformation_result = await inventory_client.create_transformation(
|
||||
test_transformation_data, tenant_id
|
||||
)
|
||||
print(f" ✅ Transformation creation method works (would call inventory service)")
|
||||
|
||||
# Test 2: Par-bake convenience method
|
||||
print(" Testing par-bake convenience method...")
|
||||
par_bake_result = await inventory_client.create_par_bake_transformation(
|
||||
source_ingredient_id=test_transformation_data["source_ingredient_id"],
|
||||
target_ingredient_id=test_transformation_data["target_ingredient_id"],
|
||||
quantity=5.0,
|
||||
tenant_id=tenant_id,
|
||||
notes="Test par-bake transformation"
|
||||
)
|
||||
print(f" ✅ Par-bake transformation method works (would call inventory service)")
|
||||
|
||||
# Test 3: Get transformations
|
||||
print(" Testing get transformations...")
|
||||
transformations = await inventory_client.get_transformations(
|
||||
tenant_id=tenant_id,
|
||||
source_stage="PAR_BAKED",
|
||||
target_stage="FULLY_BAKED",
|
||||
days_back=7
|
||||
)
|
||||
print(f" ✅ Get transformations method works (would call inventory service)")
|
||||
|
||||
print("✅ All inventory client transformation methods are properly implemented")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f" ⚠️ Expected errors due to service not running: {str(e)}")
|
||||
print(" ✅ Methods are implemented correctly (would work with running services)")
|
||||
return True
|
||||
|
||||
async def test_production_service_integration():
|
||||
"""Test the production service transformation integration"""
|
||||
print("\n🧪 Testing production service transformation integration...")
|
||||
|
||||
try:
|
||||
config = MockConfig()
|
||||
|
||||
# Mock database manager
|
||||
class MockDatabaseManager:
|
||||
async def get_session(self):
|
||||
class MockSession:
|
||||
async def __aenter__(self):
|
||||
return self
|
||||
async def __aexit__(self, *args):
|
||||
pass
|
||||
return MockSession()
|
||||
|
||||
database_manager = MockDatabaseManager()
|
||||
production_service = ProductionService(database_manager, config)
|
||||
|
||||
tenant_id = UUID("12345678-1234-5678-9abc-123456789012")
|
||||
|
||||
# Test transformation methods exist and are callable
|
||||
print(" Checking transformation methods...")
|
||||
|
||||
# Test 1: Transform par-baked products method
|
||||
print(" ✅ transform_par_baked_products method exists")
|
||||
|
||||
# Test 2: Get production transformations method
|
||||
print(" ✅ get_production_transformations method exists")
|
||||
|
||||
# Test 3: Get transformation efficiency metrics method
|
||||
print(" ✅ get_transformation_efficiency_metrics method exists")
|
||||
|
||||
# Test 4: Get batch with transformations method
|
||||
print(" ✅ get_batch_with_transformations method exists")
|
||||
|
||||
print("✅ All production service transformation methods are properly implemented")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f" ❌ Production service integration error: {str(e)}")
|
||||
return False
|
||||
|
||||
def test_api_endpoints_structure():
|
||||
"""Test that API endpoints are properly structured"""
|
||||
print("\n🧪 Testing API endpoint structure...")
|
||||
|
||||
try:
|
||||
# Import the API module to check endpoints exist
|
||||
from app.api.production import router
|
||||
|
||||
# Check that the router has the expected paths
|
||||
endpoint_paths = []
|
||||
for route in router.routes:
|
||||
if hasattr(route, 'path'):
|
||||
endpoint_paths.append(route.path)
|
||||
|
||||
expected_endpoints = [
|
||||
"/tenants/{tenant_id}/production/batches/{batch_id}/complete-with-transformation",
|
||||
"/tenants/{tenant_id}/production/transformations/par-baked-to-fresh",
|
||||
"/tenants/{tenant_id}/production/transformations",
|
||||
"/tenants/{tenant_id}/production/analytics/transformation-efficiency",
|
||||
"/tenants/{tenant_id}/production/batches/{batch_id}/transformations"
|
||||
]
|
||||
|
||||
for expected in expected_endpoints:
|
||||
if expected in endpoint_paths:
|
||||
print(f" ✅ {expected}")
|
||||
else:
|
||||
print(f" ❌ Missing: {expected}")
|
||||
|
||||
print("✅ API endpoints are properly structured")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f" ❌ API endpoint structure error: {str(e)}")
|
||||
return False
|
||||
|
||||
def print_integration_summary():
|
||||
"""Print a summary of the integration"""
|
||||
print("\n" + "="*80)
|
||||
print("🎯 INTEGRATION SUMMARY")
|
||||
print("="*80)
|
||||
print()
|
||||
print("✅ COMPLETED INTEGRATIONS:")
|
||||
print()
|
||||
print("1. 📦 INVENTORY SERVICE CLIENT ENHANCEMENTS:")
|
||||
print(" • create_transformation() - Generic transformation creation")
|
||||
print(" • create_par_bake_transformation() - Convenience method for par-baked → fresh")
|
||||
print(" • get_transformations() - Retrieve transformations with filtering")
|
||||
print(" • get_transformation_by_id() - Get specific transformation")
|
||||
print(" • get_transformation_summary() - Dashboard summary data")
|
||||
print()
|
||||
print("2. 🏭 PRODUCTION SERVICE ENHANCEMENTS:")
|
||||
print(" • complete_production_batch_with_transformation() - Complete batch + transform")
|
||||
print(" • transform_par_baked_products() - Transform par-baked to finished products")
|
||||
print(" • get_production_transformations() - Get production-related transformations")
|
||||
print(" • get_transformation_efficiency_metrics() - Analytics for transformations")
|
||||
print(" • get_batch_with_transformations() - Batch details with transformations")
|
||||
print()
|
||||
print("3. 🌐 NEW API ENDPOINTS:")
|
||||
print(" • POST /production/batches/{batch_id}/complete-with-transformation")
|
||||
print(" • POST /production/transformations/par-baked-to-fresh")
|
||||
print(" • GET /production/transformations")
|
||||
print(" • GET /production/analytics/transformation-efficiency")
|
||||
print(" • GET /production/batches/{batch_id}/transformations")
|
||||
print()
|
||||
print("4. 💼 BUSINESS PROCESS INTEGRATION:")
|
||||
print(" • Central bakery model: Receives par-baked products from central baker")
|
||||
print(" • Production batches: Can complete with automatic transformation")
|
||||
print(" • Oven operations: Transform par-baked → finished products for clients")
|
||||
print(" • Inventory tracking: Automatic stock movements and expiration dates")
|
||||
print(" • Analytics: Track transformation efficiency and metrics")
|
||||
print()
|
||||
print("🔄 WORKFLOW ENABLED:")
|
||||
print(" 1. Central baker produces par-baked products")
|
||||
print(" 2. Local bakery receives par-baked inventory")
|
||||
print(" 3. Production service creates batch for transformation")
|
||||
print(" 4. Oven process transforms par-baked → fresh products")
|
||||
print(" 5. Inventory service handles stock movements and tracking")
|
||||
print(" 6. Analytics track transformation efficiency")
|
||||
print()
|
||||
print("="*80)
|
||||
|
||||
async def main():
|
||||
"""Main test runner"""
|
||||
print("🚀 TESTING TRANSFORMATION API INTEGRATION")
|
||||
print("="*60)
|
||||
|
||||
results = []
|
||||
|
||||
# Run tests
|
||||
results.append(await test_inventory_client_transformation())
|
||||
results.append(await test_production_service_integration())
|
||||
results.append(test_api_endpoints_structure())
|
||||
|
||||
# Print results
|
||||
print("\n" + "="*60)
|
||||
print("📊 TEST RESULTS")
|
||||
print("="*60)
|
||||
|
||||
passed = sum(results)
|
||||
total = len(results)
|
||||
|
||||
if passed == total:
|
||||
print(f"✅ ALL TESTS PASSED ({passed}/{total})")
|
||||
print("🎉 Integration is ready for use!")
|
||||
else:
|
||||
print(f"⚠️ {passed}/{total} tests passed")
|
||||
print("Some issues need to be resolved before production use.")
|
||||
|
||||
# Print integration summary
|
||||
print_integration_summary()
|
||||
|
||||
return passed == total
|
||||
|
||||
if __name__ == "__main__":
|
||||
success = asyncio.run(main())
|
||||
sys.exit(0 if success else 1)
|
||||
@@ -1,221 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Verify that the transformation integration has been properly implemented.
|
||||
This script checks the code structure without requiring complex imports.
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
from typing import List, Dict
|
||||
|
||||
|
||||
def check_file_exists(file_path: str) -> bool:
|
||||
"""Check if file exists"""
|
||||
return os.path.exists(file_path)
|
||||
|
||||
|
||||
def search_in_file(file_path: str, patterns: List[str]) -> Dict[str, bool]:
|
||||
"""Search for patterns in file"""
|
||||
results = {}
|
||||
|
||||
try:
|
||||
with open(file_path, 'r', encoding='utf-8') as f:
|
||||
content = f.read()
|
||||
|
||||
for pattern in patterns:
|
||||
results[pattern] = bool(re.search(pattern, content))
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error reading {file_path}: {e}")
|
||||
for pattern in patterns:
|
||||
results[pattern] = False
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def verify_inventory_client():
|
||||
"""Verify inventory client has transformation methods"""
|
||||
print("🔍 Verifying Inventory Service Client...")
|
||||
|
||||
file_path = "../../shared/clients/inventory_client.py"
|
||||
|
||||
if not check_file_exists(file_path):
|
||||
print(f" ❌ File not found: {file_path}")
|
||||
return False
|
||||
|
||||
patterns = [
|
||||
r"async def create_transformation\(",
|
||||
r"async def create_par_bake_transformation\(",
|
||||
r"async def get_transformations\(",
|
||||
r"async def get_transformation_by_id\(",
|
||||
r"async def get_transformation_summary\(",
|
||||
r"# PRODUCT TRANSFORMATION",
|
||||
]
|
||||
|
||||
results = search_in_file(file_path, patterns)
|
||||
|
||||
all_found = True
|
||||
for pattern, found in results.items():
|
||||
status = "✅" if found else "❌"
|
||||
method_name = pattern.replace(r"async def ", "").replace(r"\(", "").replace("# ", "")
|
||||
print(f" {status} {method_name}")
|
||||
if not found:
|
||||
all_found = False
|
||||
|
||||
return all_found
|
||||
|
||||
|
||||
def verify_production_service():
|
||||
"""Verify production service has transformation integration"""
|
||||
print("\n🔍 Verifying Production Service...")
|
||||
|
||||
file_path = "app/services/production_service.py"
|
||||
|
||||
if not check_file_exists(file_path):
|
||||
print(f" ❌ File not found: {file_path}")
|
||||
return False
|
||||
|
||||
patterns = [
|
||||
r"async def complete_production_batch_with_transformation\(",
|
||||
r"async def transform_par_baked_products\(",
|
||||
r"async def get_production_transformations\(",
|
||||
r"async def get_transformation_efficiency_metrics\(",
|
||||
r"async def get_batch_with_transformations\(",
|
||||
r"async def _apply_batch_transformation\(",
|
||||
r"# TRANSFORMATION METHODS FOR PRODUCTION",
|
||||
]
|
||||
|
||||
results = search_in_file(file_path, patterns)
|
||||
|
||||
all_found = True
|
||||
for pattern, found in results.items():
|
||||
status = "✅" if found else "❌"
|
||||
method_name = pattern.replace(r"async def ", "").replace(r"\(", "").replace("# ", "")
|
||||
print(f" {status} {method_name}")
|
||||
if not found:
|
||||
all_found = False
|
||||
|
||||
return all_found
|
||||
|
||||
|
||||
def verify_production_api():
|
||||
"""Verify production API has transformation endpoints"""
|
||||
print("\n🔍 Verifying Production API Endpoints...")
|
||||
|
||||
file_path = "app/api/production.py"
|
||||
|
||||
if not check_file_exists(file_path):
|
||||
print(f" ❌ File not found: {file_path}")
|
||||
return False
|
||||
|
||||
patterns = [
|
||||
r"complete-with-transformation",
|
||||
r"par-baked-to-fresh",
|
||||
r"get_production_transformations",
|
||||
r"get_transformation_efficiency_analytics",
|
||||
r"get_batch_transformations",
|
||||
r"# TRANSFORMATION ENDPOINTS",
|
||||
]
|
||||
|
||||
results = search_in_file(file_path, patterns)
|
||||
|
||||
all_found = True
|
||||
for pattern, found in results.items():
|
||||
status = "✅" if found else "❌"
|
||||
print(f" {status} {pattern}")
|
||||
if not found:
|
||||
all_found = False
|
||||
|
||||
return all_found
|
||||
|
||||
|
||||
def verify_integration_completeness():
|
||||
"""Verify that all integration components are present"""
|
||||
print("\n🔍 Verifying Integration Completeness...")
|
||||
|
||||
# Check that inventory service client calls are present in production service
|
||||
file_path = "app/services/production_service.py"
|
||||
|
||||
patterns = [
|
||||
r"self\.inventory_client\.create_par_bake_transformation",
|
||||
r"self\.inventory_client\.get_transformations",
|
||||
r"self\.inventory_client\.get_transformation_summary",
|
||||
]
|
||||
|
||||
results = search_in_file(file_path, patterns)
|
||||
|
||||
all_found = True
|
||||
for pattern, found in results.items():
|
||||
status = "✅" if found else "❌"
|
||||
call_name = pattern.replace(r"self\.inventory_client\.", "inventory_client.")
|
||||
print(f" {status} {call_name}")
|
||||
if not found:
|
||||
all_found = False
|
||||
|
||||
return all_found
|
||||
|
||||
|
||||
def print_summary(results: List[bool]):
|
||||
"""Print verification summary"""
|
||||
print("\n" + "="*80)
|
||||
print("📋 VERIFICATION SUMMARY")
|
||||
print("="*80)
|
||||
|
||||
passed = sum(results)
|
||||
total = len(results)
|
||||
|
||||
components = [
|
||||
"Inventory Service Client",
|
||||
"Production Service",
|
||||
"Production API",
|
||||
"Integration Completeness"
|
||||
]
|
||||
|
||||
for i, (component, result) in enumerate(zip(components, results)):
|
||||
status = "✅ PASS" if result else "❌ FAIL"
|
||||
print(f"{i+1}. {component}: {status}")
|
||||
|
||||
print(f"\nOverall: {passed}/{total} components verified successfully")
|
||||
|
||||
if passed == total:
|
||||
print("\n🎉 ALL VERIFICATIONS PASSED!")
|
||||
print("The transformation API integration is properly implemented.")
|
||||
else:
|
||||
print(f"\n⚠️ {total - passed} components need attention.")
|
||||
print("Some integration parts may be missing or incomplete.")
|
||||
|
||||
print("\n" + "="*80)
|
||||
print("🎯 INTEGRATION FEATURES IMPLEMENTED:")
|
||||
print("="*80)
|
||||
print("✅ Par-baked to fresh product transformation")
|
||||
print("✅ Production batch completion with transformation")
|
||||
print("✅ Transformation efficiency analytics")
|
||||
print("✅ Batch-to-transformation linking")
|
||||
print("✅ Inventory service client integration")
|
||||
print("✅ RESTful API endpoints for transformations")
|
||||
print("✅ Central bakery business model support")
|
||||
print("="*80)
|
||||
|
||||
|
||||
def main():
|
||||
"""Main verification runner"""
|
||||
print("🔍 VERIFYING TRANSFORMATION API INTEGRATION")
|
||||
print("="*60)
|
||||
|
||||
results = []
|
||||
|
||||
# Run verifications
|
||||
results.append(verify_inventory_client())
|
||||
results.append(verify_production_service())
|
||||
results.append(verify_production_api())
|
||||
results.append(verify_integration_completeness())
|
||||
|
||||
# Print summary
|
||||
print_summary(results)
|
||||
|
||||
return all(results)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
success = main()
|
||||
exit(0 if success else 1)
|
||||
Reference in New Issue
Block a user