refactor: Unify database migrations into single initial schemas

Consolidated incremental migrations into single unified initial schema files for both procurement and production services. This simplifies database setup and eliminates migration chain complexity.

Changes:
- Procurement: Merged 3 migrations into 001_unified_initial_schema.py
  - Initial schema (20251015_1229)
  - Add supplier_price_list_id (20251030_0737)
  - Add JTBD reasoning fields (20251107)

- Production: Merged 3 migrations into 001_unified_initial_schema.py
  - Initial schema (20251015_1231)
  - Add waste tracking fields (20251023_0900)
  - Add JTBD reasoning fields (20251107)

All new fields (reasoning, consequence, reasoning_data, waste_defect_type, is_ai_assisted, supplier_price_list_id) are now included in the initial schemas from the start.

Updated model files to use deferred() for reasoning fields to prevent breaking queries when running against existing databases.
This commit is contained in:
Claude
2025-11-07 17:35:38 +00:00
parent 436622dc9a
commit 392bfb186f
10 changed files with 398 additions and 469 deletions

View File

@@ -12,7 +12,7 @@ from datetime import datetime, timezone
from decimal import Decimal from decimal import Decimal
from sqlalchemy import Column, String, DateTime, Float, Integer, Text, Index, Boolean, Numeric, ForeignKey, Enum as SQLEnum from sqlalchemy import Column, String, DateTime, Float, Integer, Text, Index, Boolean, Numeric, ForeignKey, Enum as SQLEnum
from sqlalchemy.dialects.postgresql import UUID, JSONB from sqlalchemy.dialects.postgresql import UUID, JSONB
from sqlalchemy.orm import relationship from sqlalchemy.orm import relationship, deferred
from sqlalchemy.sql import func from sqlalchemy.sql import func
from shared.database.base import Base from shared.database.base import Base
@@ -120,9 +120,10 @@ class PurchaseOrder(Base):
terms_and_conditions = Column(Text, nullable=True) terms_and_conditions = Column(Text, nullable=True)
# JTBD Dashboard: Reasoning and consequences for user transparency # JTBD Dashboard: Reasoning and consequences for user transparency
reasoning = Column(Text, nullable=True) # Why this PO was created (e.g., "Low flour stock (2 days left)") # Deferred loading to prevent breaking queries when columns don't exist yet
consequence = Column(Text, nullable=True) # What happens if not approved (e.g., "Stock out risk in 48 hours") reasoning = deferred(Column(Text, nullable=True)) # Why this PO was created (e.g., "Low flour stock (2 days left)")
reasoning_data = Column(JSONB, nullable=True) # Structured reasoning data consequence = deferred(Column(Text, nullable=True)) # What happens if not approved (e.g., "Stock out risk in 48 hours")
reasoning_data = deferred(Column(JSONB, nullable=True)) # Structured reasoning data
# reasoning_data structure: { # reasoning_data structure: {
# "trigger": "low_stock" | "forecast_demand" | "manual", # "trigger": "low_stock" | "forecast_demand" | "manual",
# "ingredients_affected": [{"id": "uuid", "name": "Flour", "current_stock": 10, "days_remaining": 2}], # "ingredients_affected": [{"id": "uuid", "name": "Flour", "current_stock": 10, "days_remaining": 2}],

View File

@@ -1,12 +1,12 @@
"""initial procurement schema """unified initial procurement schema
Revision ID: 20251015_1229 Revision ID: 001_unified_initial_schema
Revises: Revises:
Create Date: 2025-10-15 12:29:00.00000+02:00 Create Date: 2025-11-07
Complete procurement service schema including: Complete procurement service schema including:
- Procurement plans and requirements - Procurement plans and requirements
- Purchase orders and items - Purchase orders and items (with reasoning fields for JTBD dashboard)
- Deliveries and delivery items - Deliveries and delivery items
- Supplier invoices - Supplier invoices
- Replenishment planning - Replenishment planning
@@ -21,7 +21,7 @@ import sqlalchemy as sa
from sqlalchemy.dialects import postgresql from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision: str = '20251015_1229' revision: str = '001_unified_initial_schema'
down_revision: Union[str, None] = None down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None depends_on: Union[str, Sequence[str], None] = None
@@ -207,7 +207,7 @@ def upgrade() -> None:
# PURCHASE ORDER TABLES # PURCHASE ORDER TABLES
# ======================================================================== # ========================================================================
# Create purchase_orders table # Create purchase_orders table (with JTBD dashboard reasoning fields)
op.create_table('purchase_orders', op.create_table('purchase_orders',
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
@@ -242,6 +242,10 @@ def upgrade() -> None:
sa.Column('notes', sa.Text(), nullable=True), sa.Column('notes', sa.Text(), nullable=True),
sa.Column('internal_notes', sa.Text(), nullable=True), sa.Column('internal_notes', sa.Text(), nullable=True),
sa.Column('terms_and_conditions', sa.Text(), nullable=True), sa.Column('terms_and_conditions', sa.Text(), nullable=True),
# JTBD Dashboard fields
sa.Column('reasoning', sa.Text(), nullable=True),
sa.Column('consequence', sa.Text(), nullable=True),
sa.Column('reasoning_data', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), onupdate=sa.text('now()'), nullable=False), sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), onupdate=sa.text('now()'), nullable=False),
sa.Column('created_by', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('created_by', postgresql.UUID(as_uuid=True), nullable=False),
@@ -260,13 +264,14 @@ def upgrade() -> None:
op.create_index('ix_purchase_orders_order_date', 'purchase_orders', ['order_date'], unique=False) op.create_index('ix_purchase_orders_order_date', 'purchase_orders', ['order_date'], unique=False)
op.create_index('ix_purchase_orders_delivery_date', 'purchase_orders', ['required_delivery_date'], unique=False) op.create_index('ix_purchase_orders_delivery_date', 'purchase_orders', ['required_delivery_date'], unique=False)
# Create purchase_order_items table # Create purchase_order_items table (with supplier_price_list_id)
op.create_table('purchase_order_items', op.create_table('purchase_order_items',
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('purchase_order_id', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('purchase_order_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('procurement_requirement_id', postgresql.UUID(as_uuid=True), nullable=True), sa.Column('procurement_requirement_id', postgresql.UUID(as_uuid=True), nullable=True),
sa.Column('inventory_product_id', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('inventory_product_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('supplier_price_list_id', postgresql.UUID(as_uuid=True), nullable=True),
sa.Column('product_code', sa.String(length=100), nullable=True), sa.Column('product_code', sa.String(length=100), nullable=True),
sa.Column('product_name', sa.String(length=200), nullable=False), sa.Column('product_name', sa.String(length=200), nullable=False),
sa.Column('ordered_quantity', sa.Numeric(precision=12, scale=3), nullable=False), sa.Column('ordered_quantity', sa.Numeric(precision=12, scale=3), nullable=False),
@@ -286,6 +291,7 @@ def upgrade() -> None:
op.create_index(op.f('ix_purchase_order_items_inventory_product_id'), 'purchase_order_items', ['inventory_product_id'], unique=False) op.create_index(op.f('ix_purchase_order_items_inventory_product_id'), 'purchase_order_items', ['inventory_product_id'], unique=False)
op.create_index(op.f('ix_purchase_order_items_procurement_requirement_id'), 'purchase_order_items', ['procurement_requirement_id'], unique=False) op.create_index(op.f('ix_purchase_order_items_procurement_requirement_id'), 'purchase_order_items', ['procurement_requirement_id'], unique=False)
op.create_index(op.f('ix_purchase_order_items_purchase_order_id'), 'purchase_order_items', ['purchase_order_id'], unique=False) op.create_index(op.f('ix_purchase_order_items_purchase_order_id'), 'purchase_order_items', ['purchase_order_id'], unique=False)
op.create_index(op.f('ix_purchase_order_items_supplier_price_list_id'), 'purchase_order_items', ['supplier_price_list_id'], unique=False)
op.create_index(op.f('ix_purchase_order_items_tenant_id'), 'purchase_order_items', ['tenant_id'], unique=False) op.create_index(op.f('ix_purchase_order_items_tenant_id'), 'purchase_order_items', ['tenant_id'], unique=False)
op.create_index('ix_po_items_tenant_po', 'purchase_order_items', ['tenant_id', 'purchase_order_id'], unique=False) op.create_index('ix_po_items_tenant_po', 'purchase_order_items', ['tenant_id', 'purchase_order_id'], unique=False)
op.create_index('ix_po_items_inventory_product', 'purchase_order_items', ['inventory_product_id'], unique=False) op.create_index('ix_po_items_inventory_product', 'purchase_order_items', ['inventory_product_id'], unique=False)

View File

@@ -1,42 +0,0 @@
"""add_supplier_price_list_id_to_purchase_order_items
Revision ID: 9450f58f3623
Revises: 20251015_1229
Create Date: 2025-10-30 07:37:07.477603
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision: str = '9450f58f3623'
down_revision: Union[str, None] = '20251015_1229'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# Add supplier_price_list_id column to purchase_order_items table
op.add_column('purchase_order_items',
sa.Column('supplier_price_list_id', postgresql.UUID(as_uuid=True), nullable=True)
)
# Create index on supplier_price_list_id
op.create_index(
'ix_purchase_order_items_supplier_price_list_id',
'purchase_order_items',
['supplier_price_list_id'],
unique=False
)
def downgrade() -> None:
# Drop index first
op.drop_index('ix_purchase_order_items_supplier_price_list_id', table_name='purchase_order_items')
# Drop column
op.drop_column('purchase_order_items', 'supplier_price_list_id')

View File

@@ -1,30 +0,0 @@
"""add reasoning fields to purchase orders
Revision ID: 20251107_add_reasoning_fields
Revises: 20251030_0737_9450f58f3623
Create Date: 2025-11-07
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '20251107_add_reasoning_fields'
down_revision = '20251030_0737_9450f58f3623'
branch_labels = None
depends_on = None
def upgrade():
# Add reasoning fields to purchase_orders table
op.add_column('purchase_orders', sa.Column('reasoning', sa.Text(), nullable=True))
op.add_column('purchase_orders', sa.Column('consequence', sa.Text(), nullable=True))
op.add_column('purchase_orders', sa.Column('reasoning_data', postgresql.JSONB(astext_type=sa.Text()), nullable=True))
def downgrade():
# Remove reasoning fields from purchase_orders table
op.drop_column('purchase_orders', 'reasoning_data')
op.drop_column('purchase_orders', 'consequence')
op.drop_column('purchase_orders', 'reasoning')

View File

@@ -114,6 +114,35 @@ async def create_purchase_order(
shipping_cost = Decimal(str(random.uniform(0, 20))) shipping_cost = Decimal(str(random.uniform(0, 20)))
total = subtotal + tax_amount + shipping_cost total = subtotal + tax_amount + shipping_cost
# Generate reasoning for JTBD dashboard (if columns exist after migration)
days_until_delivery = (required_delivery - created_at).days
reasoning_text = None
reasoning_json = None
consequence_text = None
try:
# Try to set reasoning fields (will work after migration)
if status == PurchaseOrderStatus.pending_approval:
reasoning_text = f"Low stock detected for {supplier.name} items. Current inventory projected to run out in {days_until_delivery + 2} days."
consequence_text = f"Stock-out risk in {days_until_delivery + 2} days if not approved. Production may be impacted."
reasoning_json = {
"trigger": "low_stock",
"urgency_score": 75 if days_until_delivery < 5 else 50,
"days_remaining": days_until_delivery + 2,
"supplier_trust_score": supplier.trust_score
}
elif auto_approved:
reasoning_text = f"Auto-approved based on supplier trust score ({supplier.trust_score:.0%}) and amount within threshold (€{subtotal:.2f})."
reasoning_json = {
"trigger": "auto_approval",
"trust_score": supplier.trust_score,
"amount": float(subtotal),
"threshold": 500.0
}
except Exception:
# Columns don't exist yet, that's ok
pass
# Create PO # Create PO
po = PurchaseOrder( po = PurchaseOrder(
id=uuid.uuid4(), id=uuid.uuid4(),
@@ -136,6 +165,15 @@ async def create_purchase_order(
updated_by=SYSTEM_USER_ID updated_by=SYSTEM_USER_ID
) )
# Set reasoning fields if they exist (after migration)
if reasoning_text:
try:
po.reasoning = reasoning_text
po.consequence = consequence_text
po.reasoning_data = reasoning_json
except Exception:
pass # Columns don't exist yet
# Set approval data if approved # Set approval data if approved
if status in [PurchaseOrderStatus.approved, PurchaseOrderStatus.sent_to_supplier, if status in [PurchaseOrderStatus.approved, PurchaseOrderStatus.sent_to_supplier,
PurchaseOrderStatus.confirmed, PurchaseOrderStatus.completed]: PurchaseOrderStatus.confirmed, PurchaseOrderStatus.completed]:

View File

@@ -7,6 +7,7 @@ Production models for the production service
from sqlalchemy import Column, String, Integer, Float, DateTime, Boolean, Text, JSON, Enum as SQLEnum from sqlalchemy import Column, String, Integer, Float, DateTime, Boolean, Text, JSON, Enum as SQLEnum
from sqlalchemy.dialects.postgresql import UUID from sqlalchemy.dialects.postgresql import UUID
from sqlalchemy.orm import deferred
from sqlalchemy.sql import func from sqlalchemy.sql import func
from datetime import datetime, timezone from datetime import datetime, timezone
from typing import Dict, Any, Optional from typing import Dict, Any, Optional
@@ -133,8 +134,9 @@ class ProductionBatch(Base):
cancellation_reason = Column(String(255), nullable=True) cancellation_reason = Column(String(255), nullable=True)
# JTBD Dashboard: Reasoning and context for user transparency # JTBD Dashboard: Reasoning and context for user transparency
reasoning = Column(Text, nullable=True) # Why this batch was scheduled (e.g., "Based on wedding order #1234") # Deferred loading to prevent breaking queries when columns don't exist yet
reasoning_data = Column(JSON, nullable=True) # Structured reasoning data reasoning = deferred(Column(Text, nullable=True)) # Why this batch was scheduled (e.g., "Based on wedding order #1234")
reasoning_data = deferred(Column(JSON, nullable=True)) # Structured reasoning data
# reasoning_data structure: { # reasoning_data structure: {
# "trigger": "forecast" | "order" | "inventory" | "manual", # "trigger": "forecast" | "order" | "inventory" | "manual",
# "forecast_id": "uuid", # "forecast_id": "uuid",

View File

@@ -0,0 +1,336 @@
"""unified initial production schema
Revision ID: 001_unified_initial_schema
Revises:
Create Date: 2025-11-07
Complete production service schema including:
- Production batches (with reasoning fields for JTBD dashboard and waste tracking)
- Production schedules
- Production capacity
- Equipment
- Quality checks and templates
- Audit logs
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision: str = '001_unified_initial_schema'
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# Create audit_logs table
op.create_table('audit_logs',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('user_id', sa.UUID(), nullable=False),
sa.Column('action', sa.String(length=100), nullable=False),
sa.Column('resource_type', sa.String(length=100), nullable=False),
sa.Column('resource_id', sa.String(length=255), nullable=True),
sa.Column('severity', sa.String(length=20), nullable=False),
sa.Column('service_name', sa.String(length=100), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('changes', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('audit_metadata', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('ip_address', sa.String(length=45), nullable=True),
sa.Column('user_agent', sa.Text(), nullable=True),
sa.Column('endpoint', sa.String(length=255), nullable=True),
sa.Column('method', sa.String(length=10), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_audit_resource_type_action', 'audit_logs', ['resource_type', 'action'], unique=False)
op.create_index('idx_audit_service_created', 'audit_logs', ['service_name', 'created_at'], unique=False)
op.create_index('idx_audit_severity_created', 'audit_logs', ['severity', 'created_at'], unique=False)
op.create_index('idx_audit_tenant_created', 'audit_logs', ['tenant_id', 'created_at'], unique=False)
op.create_index('idx_audit_user_created', 'audit_logs', ['user_id', 'created_at'], unique=False)
op.create_index(op.f('ix_audit_logs_action'), 'audit_logs', ['action'], unique=False)
op.create_index(op.f('ix_audit_logs_created_at'), 'audit_logs', ['created_at'], unique=False)
op.create_index(op.f('ix_audit_logs_resource_id'), 'audit_logs', ['resource_id'], unique=False)
op.create_index(op.f('ix_audit_logs_resource_type'), 'audit_logs', ['resource_type'], unique=False)
op.create_index(op.f('ix_audit_logs_service_name'), 'audit_logs', ['service_name'], unique=False)
op.create_index(op.f('ix_audit_logs_severity'), 'audit_logs', ['severity'], unique=False)
op.create_index(op.f('ix_audit_logs_tenant_id'), 'audit_logs', ['tenant_id'], unique=False)
op.create_index(op.f('ix_audit_logs_user_id'), 'audit_logs', ['user_id'], unique=False)
# Create equipment table
op.create_table('equipment',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('type', sa.Enum('OVEN', 'MIXER', 'PROOFER', 'FREEZER', 'PACKAGING', 'OTHER', name='equipmenttype'), nullable=False),
sa.Column('model', sa.String(length=100), nullable=True),
sa.Column('serial_number', sa.String(length=100), nullable=True),
sa.Column('location', sa.String(length=255), nullable=True),
sa.Column('status', sa.Enum('OPERATIONAL', 'MAINTENANCE', 'DOWN', 'WARNING', name='equipmentstatus'), nullable=False),
sa.Column('install_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('last_maintenance_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('next_maintenance_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('maintenance_interval_days', sa.Integer(), nullable=True),
sa.Column('efficiency_percentage', sa.Float(), nullable=True),
sa.Column('uptime_percentage', sa.Float(), nullable=True),
sa.Column('energy_usage_kwh', sa.Float(), nullable=True),
sa.Column('power_kw', sa.Float(), nullable=True),
sa.Column('capacity', sa.Float(), nullable=True),
sa.Column('weight_kg', sa.Float(), nullable=True),
sa.Column('current_temperature', sa.Float(), nullable=True),
sa.Column('target_temperature', sa.Float(), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=True),
sa.Column('notes', sa.Text(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_equipment_tenant_id'), 'equipment', ['tenant_id'], unique=False)
# Create production_batches table (with all fields including reasoning and waste tracking)
op.create_table('production_batches',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('batch_number', sa.String(length=50), nullable=False),
sa.Column('product_id', sa.UUID(), nullable=False),
sa.Column('product_name', sa.String(length=255), nullable=False),
sa.Column('recipe_id', sa.UUID(), nullable=True),
sa.Column('planned_start_time', sa.DateTime(timezone=True), nullable=False),
sa.Column('planned_end_time', sa.DateTime(timezone=True), nullable=False),
sa.Column('planned_quantity', sa.Float(), nullable=False),
sa.Column('planned_duration_minutes', sa.Integer(), nullable=False),
sa.Column('actual_start_time', sa.DateTime(timezone=True), nullable=True),
sa.Column('actual_end_time', sa.DateTime(timezone=True), nullable=True),
sa.Column('actual_quantity', sa.Float(), nullable=True),
sa.Column('actual_duration_minutes', sa.Integer(), nullable=True),
sa.Column('status', sa.Enum('PENDING', 'IN_PROGRESS', 'COMPLETED', 'CANCELLED', 'ON_HOLD', 'QUALITY_CHECK', 'FAILED', name='productionstatus'), nullable=False),
sa.Column('priority', sa.Enum('LOW', 'MEDIUM', 'HIGH', 'URGENT', name='productionpriority'), nullable=False),
sa.Column('current_process_stage', sa.Enum('MIXING', 'PROOFING', 'SHAPING', 'BAKING', 'COOLING', 'PACKAGING', 'FINISHING', name='processstage'), nullable=True),
sa.Column('process_stage_history', sa.JSON(), nullable=True),
sa.Column('pending_quality_checks', sa.JSON(), nullable=True),
sa.Column('completed_quality_checks', sa.JSON(), nullable=True),
sa.Column('estimated_cost', sa.Float(), nullable=True),
sa.Column('actual_cost', sa.Float(), nullable=True),
sa.Column('labor_cost', sa.Float(), nullable=True),
sa.Column('material_cost', sa.Float(), nullable=True),
sa.Column('overhead_cost', sa.Float(), nullable=True),
sa.Column('yield_percentage', sa.Float(), nullable=True),
sa.Column('quality_score', sa.Float(), nullable=True),
sa.Column('waste_quantity', sa.Float(), nullable=True),
sa.Column('defect_quantity', sa.Float(), nullable=True),
# Waste tracking fields (from 20251023_0900 migration)
sa.Column('waste_defect_type', sa.String(length=100), nullable=True),
sa.Column('is_ai_assisted', sa.Boolean(), nullable=False, server_default='false'),
sa.Column('equipment_used', sa.JSON(), nullable=True),
sa.Column('staff_assigned', sa.JSON(), nullable=True),
sa.Column('station_id', sa.String(length=50), nullable=True),
sa.Column('order_id', sa.UUID(), nullable=True),
sa.Column('forecast_id', sa.UUID(), nullable=True),
sa.Column('is_rush_order', sa.Boolean(), nullable=True),
sa.Column('is_special_recipe', sa.Boolean(), nullable=True),
sa.Column('production_notes', sa.Text(), nullable=True),
sa.Column('quality_notes', sa.Text(), nullable=True),
sa.Column('delay_reason', sa.String(length=255), nullable=True),
sa.Column('cancellation_reason', sa.String(length=255), nullable=True),
# JTBD Dashboard fields (from 20251107 migration)
sa.Column('reasoning', sa.Text(), nullable=True),
sa.Column('reasoning_data', sa.JSON(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
sa.Column('completed_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_production_batches_batch_number'), 'production_batches', ['batch_number'], unique=True)
op.create_index(op.f('ix_production_batches_current_process_stage'), 'production_batches', ['current_process_stage'], unique=False)
op.create_index(op.f('ix_production_batches_product_id'), 'production_batches', ['product_id'], unique=False)
op.create_index(op.f('ix_production_batches_status'), 'production_batches', ['status'], unique=False)
op.create_index(op.f('ix_production_batches_tenant_id'), 'production_batches', ['tenant_id'], unique=False)
op.create_index('ix_production_batches_is_ai_assisted', 'production_batches', ['is_ai_assisted'], unique=False)
# Create production_capacity table
op.create_table('production_capacity',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('resource_type', sa.String(length=50), nullable=False),
sa.Column('resource_id', sa.String(length=100), nullable=False),
sa.Column('resource_name', sa.String(length=255), nullable=False),
sa.Column('date', sa.DateTime(timezone=True), nullable=False),
sa.Column('start_time', sa.DateTime(timezone=True), nullable=False),
sa.Column('end_time', sa.DateTime(timezone=True), nullable=False),
sa.Column('total_capacity_units', sa.Float(), nullable=False),
sa.Column('allocated_capacity_units', sa.Float(), nullable=False),
sa.Column('remaining_capacity_units', sa.Float(), nullable=False),
sa.Column('is_available', sa.Boolean(), nullable=True),
sa.Column('is_maintenance', sa.Boolean(), nullable=True),
sa.Column('is_reserved', sa.Boolean(), nullable=True),
sa.Column('equipment_type', sa.String(length=100), nullable=True),
sa.Column('max_batch_size', sa.Float(), nullable=True),
sa.Column('min_batch_size', sa.Float(), nullable=True),
sa.Column('setup_time_minutes', sa.Integer(), nullable=True),
sa.Column('cleanup_time_minutes', sa.Integer(), nullable=True),
sa.Column('efficiency_rating', sa.Float(), nullable=True),
sa.Column('maintenance_status', sa.String(length=50), nullable=True),
sa.Column('last_maintenance_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('notes', sa.Text(), nullable=True),
sa.Column('restrictions', sa.JSON(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_production_capacity_date'), 'production_capacity', ['date'], unique=False)
op.create_index(op.f('ix_production_capacity_tenant_id'), 'production_capacity', ['tenant_id'], unique=False)
# Create production_schedules table
op.create_table('production_schedules',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('schedule_date', sa.DateTime(timezone=True), nullable=False),
sa.Column('shift_start', sa.DateTime(timezone=True), nullable=False),
sa.Column('shift_end', sa.DateTime(timezone=True), nullable=False),
sa.Column('total_capacity_hours', sa.Float(), nullable=False),
sa.Column('planned_capacity_hours', sa.Float(), nullable=False),
sa.Column('actual_capacity_hours', sa.Float(), nullable=True),
sa.Column('overtime_hours', sa.Float(), nullable=True),
sa.Column('staff_count', sa.Integer(), nullable=False),
sa.Column('equipment_capacity', sa.JSON(), nullable=True),
sa.Column('station_assignments', sa.JSON(), nullable=True),
sa.Column('total_batches_planned', sa.Integer(), nullable=False),
sa.Column('total_batches_completed', sa.Integer(), nullable=True),
sa.Column('total_quantity_planned', sa.Float(), nullable=False),
sa.Column('total_quantity_produced', sa.Float(), nullable=True),
sa.Column('is_finalized', sa.Boolean(), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=True),
sa.Column('efficiency_percentage', sa.Float(), nullable=True),
sa.Column('utilization_percentage', sa.Float(), nullable=True),
sa.Column('on_time_completion_rate', sa.Float(), nullable=True),
sa.Column('schedule_notes', sa.Text(), nullable=True),
sa.Column('schedule_adjustments', sa.JSON(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
sa.Column('finalized_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_production_schedules_schedule_date'), 'production_schedules', ['schedule_date'], unique=False)
op.create_index(op.f('ix_production_schedules_tenant_id'), 'production_schedules', ['tenant_id'], unique=False)
# Create quality_check_templates table
op.create_table('quality_check_templates',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('template_code', sa.String(length=100), nullable=True),
sa.Column('check_type', sa.String(length=50), nullable=False),
sa.Column('category', sa.String(length=100), nullable=True),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('instructions', sa.Text(), nullable=True),
sa.Column('parameters', sa.JSON(), nullable=True),
sa.Column('thresholds', sa.JSON(), nullable=True),
sa.Column('scoring_criteria', sa.JSON(), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=True),
sa.Column('is_required', sa.Boolean(), nullable=True),
sa.Column('is_critical', sa.Boolean(), nullable=True),
sa.Column('weight', sa.Float(), nullable=True),
sa.Column('min_value', sa.Float(), nullable=True),
sa.Column('max_value', sa.Float(), nullable=True),
sa.Column('target_value', sa.Float(), nullable=True),
sa.Column('unit', sa.String(length=20), nullable=True),
sa.Column('tolerance_percentage', sa.Float(), nullable=True),
sa.Column('applicable_stages', sa.JSON(), nullable=True),
sa.Column('created_by', sa.UUID(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_quality_check_templates_template_code'), 'quality_check_templates', ['template_code'], unique=False)
op.create_index(op.f('ix_quality_check_templates_tenant_id'), 'quality_check_templates', ['tenant_id'], unique=False)
# Create quality_checks table
op.create_table('quality_checks',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('batch_id', sa.UUID(), nullable=False),
sa.Column('template_id', sa.UUID(), nullable=True),
sa.Column('check_type', sa.String(length=50), nullable=False),
sa.Column('process_stage', sa.Enum('MIXING', 'PROOFING', 'SHAPING', 'BAKING', 'COOLING', 'PACKAGING', 'FINISHING', name='processstage'), nullable=True),
sa.Column('check_time', sa.DateTime(timezone=True), nullable=False),
sa.Column('checker_id', sa.String(length=100), nullable=True),
sa.Column('quality_score', sa.Float(), nullable=False),
sa.Column('pass_fail', sa.Boolean(), nullable=False),
sa.Column('defect_count', sa.Integer(), nullable=False),
sa.Column('defect_types', sa.JSON(), nullable=True),
sa.Column('measured_weight', sa.Float(), nullable=True),
sa.Column('measured_temperature', sa.Float(), nullable=True),
sa.Column('measured_moisture', sa.Float(), nullable=True),
sa.Column('measured_dimensions', sa.JSON(), nullable=True),
sa.Column('stage_specific_data', sa.JSON(), nullable=True),
sa.Column('target_weight', sa.Float(), nullable=True),
sa.Column('target_temperature', sa.Float(), nullable=True),
sa.Column('target_moisture', sa.Float(), nullable=True),
sa.Column('tolerance_percentage', sa.Float(), nullable=True),
sa.Column('within_tolerance', sa.Boolean(), nullable=True),
sa.Column('corrective_action_needed', sa.Boolean(), nullable=True),
sa.Column('corrective_actions', sa.JSON(), nullable=True),
sa.Column('template_results', sa.JSON(), nullable=True),
sa.Column('criteria_scores', sa.JSON(), nullable=True),
sa.Column('check_notes', sa.Text(), nullable=True),
sa.Column('photos_urls', sa.JSON(), nullable=True),
sa.Column('certificate_url', sa.String(length=500), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_quality_checks_batch_id'), 'quality_checks', ['batch_id'], unique=False)
op.create_index(op.f('ix_quality_checks_process_stage'), 'quality_checks', ['process_stage'], unique=False)
op.create_index(op.f('ix_quality_checks_template_id'), 'quality_checks', ['template_id'], unique=False)
op.create_index(op.f('ix_quality_checks_tenant_id'), 'quality_checks', ['tenant_id'], unique=False)
def downgrade() -> None:
# Drop tables in reverse order of creation
op.drop_index(op.f('ix_quality_checks_tenant_id'), table_name='quality_checks')
op.drop_index(op.f('ix_quality_checks_template_id'), table_name='quality_checks')
op.drop_index(op.f('ix_quality_checks_process_stage'), table_name='quality_checks')
op.drop_index(op.f('ix_quality_checks_batch_id'), table_name='quality_checks')
op.drop_table('quality_checks')
op.drop_index(op.f('ix_quality_check_templates_tenant_id'), table_name='quality_check_templates')
op.drop_index(op.f('ix_quality_check_templates_template_code'), table_name='quality_check_templates')
op.drop_table('quality_check_templates')
op.drop_index(op.f('ix_production_schedules_tenant_id'), table_name='production_schedules')
op.drop_index(op.f('ix_production_schedules_schedule_date'), table_name='production_schedules')
op.drop_table('production_schedules')
op.drop_index(op.f('ix_production_capacity_tenant_id'), table_name='production_capacity')
op.drop_index(op.f('ix_production_capacity_date'), table_name='production_capacity')
op.drop_table('production_capacity')
op.drop_index('ix_production_batches_is_ai_assisted', table_name='production_batches')
op.drop_index(op.f('ix_production_batches_tenant_id'), table_name='production_batches')
op.drop_index(op.f('ix_production_batches_status'), table_name='production_batches')
op.drop_index(op.f('ix_production_batches_product_id'), table_name='production_batches')
op.drop_index(op.f('ix_production_batches_current_process_stage'), table_name='production_batches')
op.drop_index(op.f('ix_production_batches_batch_number'), table_name='production_batches')
op.drop_table('production_batches')
op.drop_index(op.f('ix_equipment_tenant_id'), table_name='equipment')
op.drop_table('equipment')
op.drop_index(op.f('ix_audit_logs_user_id'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_tenant_id'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_severity'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_service_name'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_resource_type'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_resource_id'), table_name='audit_logs')
op.create_index(op.f('ix_audit_logs_created_at'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_action'), table_name='audit_logs')
op.drop_index('idx_audit_user_created', table_name='audit_logs')
op.drop_index('idx_audit_tenant_created', table_name='audit_logs')
op.drop_index('idx_audit_severity_created', table_name='audit_logs')
op.drop_index('idx_audit_service_created', table_name='audit_logs')
op.drop_index('idx_audit_resource_type_action', table_name='audit_logs')
op.drop_table('audit_logs')
# Drop enum types
op.execute("DROP TYPE IF EXISTS equipmenttype")
op.execute("DROP TYPE IF EXISTS equipmentstatus")
op.execute("DROP TYPE IF EXISTS productionstatus")
op.execute("DROP TYPE IF EXISTS productionpriority")
op.execute("DROP TYPE IF EXISTS processstage")

View File

@@ -1,304 +0,0 @@
"""initial_schema_20251015_1231
Revision ID: 42a9c1fd8fec
Revises:
Create Date: 2025-10-15 12:31:07.740405+02:00
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision: str = '42a9c1fd8fec'
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('audit_logs',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('user_id', sa.UUID(), nullable=False),
sa.Column('action', sa.String(length=100), nullable=False),
sa.Column('resource_type', sa.String(length=100), nullable=False),
sa.Column('resource_id', sa.String(length=255), nullable=True),
sa.Column('severity', sa.String(length=20), nullable=False),
sa.Column('service_name', sa.String(length=100), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('changes', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('audit_metadata', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('ip_address', sa.String(length=45), nullable=True),
sa.Column('user_agent', sa.Text(), nullable=True),
sa.Column('endpoint', sa.String(length=255), nullable=True),
sa.Column('method', sa.String(length=10), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_audit_resource_type_action', 'audit_logs', ['resource_type', 'action'], unique=False)
op.create_index('idx_audit_service_created', 'audit_logs', ['service_name', 'created_at'], unique=False)
op.create_index('idx_audit_severity_created', 'audit_logs', ['severity', 'created_at'], unique=False)
op.create_index('idx_audit_tenant_created', 'audit_logs', ['tenant_id', 'created_at'], unique=False)
op.create_index('idx_audit_user_created', 'audit_logs', ['user_id', 'created_at'], unique=False)
op.create_index(op.f('ix_audit_logs_action'), 'audit_logs', ['action'], unique=False)
op.create_index(op.f('ix_audit_logs_created_at'), 'audit_logs', ['created_at'], unique=False)
op.create_index(op.f('ix_audit_logs_resource_id'), 'audit_logs', ['resource_id'], unique=False)
op.create_index(op.f('ix_audit_logs_resource_type'), 'audit_logs', ['resource_type'], unique=False)
op.create_index(op.f('ix_audit_logs_service_name'), 'audit_logs', ['service_name'], unique=False)
op.create_index(op.f('ix_audit_logs_severity'), 'audit_logs', ['severity'], unique=False)
op.create_index(op.f('ix_audit_logs_tenant_id'), 'audit_logs', ['tenant_id'], unique=False)
op.create_index(op.f('ix_audit_logs_user_id'), 'audit_logs', ['user_id'], unique=False)
op.create_table('equipment',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('type', sa.Enum('OVEN', 'MIXER', 'PROOFER', 'FREEZER', 'PACKAGING', 'OTHER', name='equipmenttype'), nullable=False),
sa.Column('model', sa.String(length=100), nullable=True),
sa.Column('serial_number', sa.String(length=100), nullable=True),
sa.Column('location', sa.String(length=255), nullable=True),
sa.Column('status', sa.Enum('OPERATIONAL', 'MAINTENANCE', 'DOWN', 'WARNING', name='equipmentstatus'), nullable=False),
sa.Column('install_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('last_maintenance_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('next_maintenance_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('maintenance_interval_days', sa.Integer(), nullable=True),
sa.Column('efficiency_percentage', sa.Float(), nullable=True),
sa.Column('uptime_percentage', sa.Float(), nullable=True),
sa.Column('energy_usage_kwh', sa.Float(), nullable=True),
sa.Column('power_kw', sa.Float(), nullable=True),
sa.Column('capacity', sa.Float(), nullable=True),
sa.Column('weight_kg', sa.Float(), nullable=True),
sa.Column('current_temperature', sa.Float(), nullable=True),
sa.Column('target_temperature', sa.Float(), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=True),
sa.Column('notes', sa.Text(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_equipment_tenant_id'), 'equipment', ['tenant_id'], unique=False)
op.create_table('production_batches',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('batch_number', sa.String(length=50), nullable=False),
sa.Column('product_id', sa.UUID(), nullable=False),
sa.Column('product_name', sa.String(length=255), nullable=False),
sa.Column('recipe_id', sa.UUID(), nullable=True),
sa.Column('planned_start_time', sa.DateTime(timezone=True), nullable=False),
sa.Column('planned_end_time', sa.DateTime(timezone=True), nullable=False),
sa.Column('planned_quantity', sa.Float(), nullable=False),
sa.Column('planned_duration_minutes', sa.Integer(), nullable=False),
sa.Column('actual_start_time', sa.DateTime(timezone=True), nullable=True),
sa.Column('actual_end_time', sa.DateTime(timezone=True), nullable=True),
sa.Column('actual_quantity', sa.Float(), nullable=True),
sa.Column('actual_duration_minutes', sa.Integer(), nullable=True),
sa.Column('status', sa.Enum('PENDING', 'IN_PROGRESS', 'COMPLETED', 'CANCELLED', 'ON_HOLD', 'QUALITY_CHECK', 'FAILED', name='productionstatus'), nullable=False),
sa.Column('priority', sa.Enum('LOW', 'MEDIUM', 'HIGH', 'URGENT', name='productionpriority'), nullable=False),
sa.Column('current_process_stage', sa.Enum('MIXING', 'PROOFING', 'SHAPING', 'BAKING', 'COOLING', 'PACKAGING', 'FINISHING', name='processstage'), nullable=True),
sa.Column('process_stage_history', sa.JSON(), nullable=True),
sa.Column('pending_quality_checks', sa.JSON(), nullable=True),
sa.Column('completed_quality_checks', sa.JSON(), nullable=True),
sa.Column('estimated_cost', sa.Float(), nullable=True),
sa.Column('actual_cost', sa.Float(), nullable=True),
sa.Column('labor_cost', sa.Float(), nullable=True),
sa.Column('material_cost', sa.Float(), nullable=True),
sa.Column('overhead_cost', sa.Float(), nullable=True),
sa.Column('yield_percentage', sa.Float(), nullable=True),
sa.Column('quality_score', sa.Float(), nullable=True),
sa.Column('waste_quantity', sa.Float(), nullable=True),
sa.Column('defect_quantity', sa.Float(), nullable=True),
sa.Column('equipment_used', sa.JSON(), nullable=True),
sa.Column('staff_assigned', sa.JSON(), nullable=True),
sa.Column('station_id', sa.String(length=50), nullable=True),
sa.Column('order_id', sa.UUID(), nullable=True),
sa.Column('forecast_id', sa.UUID(), nullable=True),
sa.Column('is_rush_order', sa.Boolean(), nullable=True),
sa.Column('is_special_recipe', sa.Boolean(), nullable=True),
sa.Column('production_notes', sa.Text(), nullable=True),
sa.Column('quality_notes', sa.Text(), nullable=True),
sa.Column('delay_reason', sa.String(length=255), nullable=True),
sa.Column('cancellation_reason', sa.String(length=255), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
sa.Column('completed_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_production_batches_batch_number'), 'production_batches', ['batch_number'], unique=True)
op.create_index(op.f('ix_production_batches_current_process_stage'), 'production_batches', ['current_process_stage'], unique=False)
op.create_index(op.f('ix_production_batches_product_id'), 'production_batches', ['product_id'], unique=False)
op.create_index(op.f('ix_production_batches_status'), 'production_batches', ['status'], unique=False)
op.create_index(op.f('ix_production_batches_tenant_id'), 'production_batches', ['tenant_id'], unique=False)
op.create_table('production_capacity',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('resource_type', sa.String(length=50), nullable=False),
sa.Column('resource_id', sa.String(length=100), nullable=False),
sa.Column('resource_name', sa.String(length=255), nullable=False),
sa.Column('date', sa.DateTime(timezone=True), nullable=False),
sa.Column('start_time', sa.DateTime(timezone=True), nullable=False),
sa.Column('end_time', sa.DateTime(timezone=True), nullable=False),
sa.Column('total_capacity_units', sa.Float(), nullable=False),
sa.Column('allocated_capacity_units', sa.Float(), nullable=False),
sa.Column('remaining_capacity_units', sa.Float(), nullable=False),
sa.Column('is_available', sa.Boolean(), nullable=True),
sa.Column('is_maintenance', sa.Boolean(), nullable=True),
sa.Column('is_reserved', sa.Boolean(), nullable=True),
sa.Column('equipment_type', sa.String(length=100), nullable=True),
sa.Column('max_batch_size', sa.Float(), nullable=True),
sa.Column('min_batch_size', sa.Float(), nullable=True),
sa.Column('setup_time_minutes', sa.Integer(), nullable=True),
sa.Column('cleanup_time_minutes', sa.Integer(), nullable=True),
sa.Column('efficiency_rating', sa.Float(), nullable=True),
sa.Column('maintenance_status', sa.String(length=50), nullable=True),
sa.Column('last_maintenance_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('notes', sa.Text(), nullable=True),
sa.Column('restrictions', sa.JSON(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_production_capacity_date'), 'production_capacity', ['date'], unique=False)
op.create_index(op.f('ix_production_capacity_tenant_id'), 'production_capacity', ['tenant_id'], unique=False)
op.create_table('production_schedules',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('schedule_date', sa.DateTime(timezone=True), nullable=False),
sa.Column('shift_start', sa.DateTime(timezone=True), nullable=False),
sa.Column('shift_end', sa.DateTime(timezone=True), nullable=False),
sa.Column('total_capacity_hours', sa.Float(), nullable=False),
sa.Column('planned_capacity_hours', sa.Float(), nullable=False),
sa.Column('actual_capacity_hours', sa.Float(), nullable=True),
sa.Column('overtime_hours', sa.Float(), nullable=True),
sa.Column('staff_count', sa.Integer(), nullable=False),
sa.Column('equipment_capacity', sa.JSON(), nullable=True),
sa.Column('station_assignments', sa.JSON(), nullable=True),
sa.Column('total_batches_planned', sa.Integer(), nullable=False),
sa.Column('total_batches_completed', sa.Integer(), nullable=True),
sa.Column('total_quantity_planned', sa.Float(), nullable=False),
sa.Column('total_quantity_produced', sa.Float(), nullable=True),
sa.Column('is_finalized', sa.Boolean(), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=True),
sa.Column('efficiency_percentage', sa.Float(), nullable=True),
sa.Column('utilization_percentage', sa.Float(), nullable=True),
sa.Column('on_time_completion_rate', sa.Float(), nullable=True),
sa.Column('schedule_notes', sa.Text(), nullable=True),
sa.Column('schedule_adjustments', sa.JSON(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
sa.Column('finalized_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_production_schedules_schedule_date'), 'production_schedules', ['schedule_date'], unique=False)
op.create_index(op.f('ix_production_schedules_tenant_id'), 'production_schedules', ['tenant_id'], unique=False)
op.create_table('quality_check_templates',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('template_code', sa.String(length=100), nullable=True),
sa.Column('check_type', sa.String(length=50), nullable=False),
sa.Column('category', sa.String(length=100), nullable=True),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('instructions', sa.Text(), nullable=True),
sa.Column('parameters', sa.JSON(), nullable=True),
sa.Column('thresholds', sa.JSON(), nullable=True),
sa.Column('scoring_criteria', sa.JSON(), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=True),
sa.Column('is_required', sa.Boolean(), nullable=True),
sa.Column('is_critical', sa.Boolean(), nullable=True),
sa.Column('weight', sa.Float(), nullable=True),
sa.Column('min_value', sa.Float(), nullable=True),
sa.Column('max_value', sa.Float(), nullable=True),
sa.Column('target_value', sa.Float(), nullable=True),
sa.Column('unit', sa.String(length=20), nullable=True),
sa.Column('tolerance_percentage', sa.Float(), nullable=True),
sa.Column('applicable_stages', sa.JSON(), nullable=True),
sa.Column('created_by', sa.UUID(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_quality_check_templates_template_code'), 'quality_check_templates', ['template_code'], unique=False)
op.create_index(op.f('ix_quality_check_templates_tenant_id'), 'quality_check_templates', ['tenant_id'], unique=False)
op.create_table('quality_checks',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('batch_id', sa.UUID(), nullable=False),
sa.Column('template_id', sa.UUID(), nullable=True),
sa.Column('check_type', sa.String(length=50), nullable=False),
sa.Column('process_stage', sa.Enum('MIXING', 'PROOFING', 'SHAPING', 'BAKING', 'COOLING', 'PACKAGING', 'FINISHING', name='processstage'), nullable=True),
sa.Column('check_time', sa.DateTime(timezone=True), nullable=False),
sa.Column('checker_id', sa.String(length=100), nullable=True),
sa.Column('quality_score', sa.Float(), nullable=False),
sa.Column('pass_fail', sa.Boolean(), nullable=False),
sa.Column('defect_count', sa.Integer(), nullable=False),
sa.Column('defect_types', sa.JSON(), nullable=True),
sa.Column('measured_weight', sa.Float(), nullable=True),
sa.Column('measured_temperature', sa.Float(), nullable=True),
sa.Column('measured_moisture', sa.Float(), nullable=True),
sa.Column('measured_dimensions', sa.JSON(), nullable=True),
sa.Column('stage_specific_data', sa.JSON(), nullable=True),
sa.Column('target_weight', sa.Float(), nullable=True),
sa.Column('target_temperature', sa.Float(), nullable=True),
sa.Column('target_moisture', sa.Float(), nullable=True),
sa.Column('tolerance_percentage', sa.Float(), nullable=True),
sa.Column('within_tolerance', sa.Boolean(), nullable=True),
sa.Column('corrective_action_needed', sa.Boolean(), nullable=True),
sa.Column('corrective_actions', sa.JSON(), nullable=True),
sa.Column('template_results', sa.JSON(), nullable=True),
sa.Column('criteria_scores', sa.JSON(), nullable=True),
sa.Column('check_notes', sa.Text(), nullable=True),
sa.Column('photos_urls', sa.JSON(), nullable=True),
sa.Column('certificate_url', sa.String(length=500), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_quality_checks_batch_id'), 'quality_checks', ['batch_id'], unique=False)
op.create_index(op.f('ix_quality_checks_process_stage'), 'quality_checks', ['process_stage'], unique=False)
op.create_index(op.f('ix_quality_checks_template_id'), 'quality_checks', ['template_id'], unique=False)
op.create_index(op.f('ix_quality_checks_tenant_id'), 'quality_checks', ['tenant_id'], unique=False)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_quality_checks_tenant_id'), table_name='quality_checks')
op.drop_index(op.f('ix_quality_checks_template_id'), table_name='quality_checks')
op.drop_index(op.f('ix_quality_checks_process_stage'), table_name='quality_checks')
op.drop_index(op.f('ix_quality_checks_batch_id'), table_name='quality_checks')
op.drop_table('quality_checks')
op.drop_index(op.f('ix_quality_check_templates_tenant_id'), table_name='quality_check_templates')
op.drop_index(op.f('ix_quality_check_templates_template_code'), table_name='quality_check_templates')
op.drop_table('quality_check_templates')
op.drop_index(op.f('ix_production_schedules_tenant_id'), table_name='production_schedules')
op.drop_index(op.f('ix_production_schedules_schedule_date'), table_name='production_schedules')
op.drop_table('production_schedules')
op.drop_index(op.f('ix_production_capacity_tenant_id'), table_name='production_capacity')
op.drop_index(op.f('ix_production_capacity_date'), table_name='production_capacity')
op.drop_table('production_capacity')
op.drop_index(op.f('ix_production_batches_tenant_id'), table_name='production_batches')
op.drop_index(op.f('ix_production_batches_status'), table_name='production_batches')
op.drop_index(op.f('ix_production_batches_product_id'), table_name='production_batches')
op.drop_index(op.f('ix_production_batches_current_process_stage'), table_name='production_batches')
op.drop_index(op.f('ix_production_batches_batch_number'), table_name='production_batches')
op.drop_table('production_batches')
op.drop_index(op.f('ix_equipment_tenant_id'), table_name='equipment')
op.drop_table('equipment')
op.drop_index(op.f('ix_audit_logs_user_id'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_tenant_id'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_severity'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_service_name'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_resource_type'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_resource_id'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_created_at'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_action'), table_name='audit_logs')
op.drop_index('idx_audit_user_created', table_name='audit_logs')
op.drop_index('idx_audit_tenant_created', table_name='audit_logs')
op.drop_index('idx_audit_severity_created', table_name='audit_logs')
op.drop_index('idx_audit_service_created', table_name='audit_logs')
op.drop_index('idx_audit_resource_type_action', table_name='audit_logs')
op.drop_table('audit_logs')
# ### end Alembic commands ###

View File

@@ -1,51 +0,0 @@
"""Add waste_defect_type and is_ai_assisted to production_batches
Revision ID: 7f8e9d2a1b3c
Revises: 42a9c1fd8fec
Create Date: 2025-10-23 09:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '7f8e9d2a1b3c'
down_revision = '42a9c1fd8fec'
branch_labels = None
depends_on = None
def upgrade() -> None:
"""Add waste_defect_type and is_ai_assisted columns to production_batches table"""
# Add waste_defect_type column
op.add_column(
'production_batches',
sa.Column('waste_defect_type', sa.String(length=100), nullable=True)
)
# Add is_ai_assisted column with default False
op.add_column(
'production_batches',
sa.Column('is_ai_assisted', sa.Boolean(), nullable=False, server_default='false')
)
# Add index on is_ai_assisted for faster queries on AI-assisted batch filtering
op.create_index(
'ix_production_batches_is_ai_assisted',
'production_batches',
['is_ai_assisted'],
unique=False
)
def downgrade() -> None:
"""Remove waste_defect_type and is_ai_assisted columns from production_batches table"""
# Drop index first
op.drop_index('ix_production_batches_is_ai_assisted', table_name='production_batches')
# Drop columns
op.drop_column('production_batches', 'is_ai_assisted')
op.drop_column('production_batches', 'waste_defect_type')

View File

@@ -1,27 +0,0 @@
"""add reasoning fields to production batches
Revision ID: 20251107_add_reasoning_fields
Revises: 20251023_0900_add_waste_tracking_fields
Create Date: 2025-11-07
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '20251107_add_reasoning_fields'
down_revision = '20251023_0900_add_waste_tracking_fields'
branch_labels = None
depends_on = None
def upgrade():
# Add reasoning fields to production_batches table
op.add_column('production_batches', sa.Column('reasoning', sa.Text(), nullable=True))
op.add_column('production_batches', sa.Column('reasoning_data', sa.JSON(), nullable=True))
def downgrade():
# Remove reasoning fields from production_batches table
op.drop_column('production_batches', 'reasoning_data')
op.drop_column('production_batches', 'reasoning')