Fix DB issue 2s
This commit is contained in:
@@ -1,3 +1,4 @@
|
||||
# Forecasting Dockerfile
|
||||
# Add this stage at the top of each service Dockerfile
|
||||
FROM python:3.11-slim AS shared
|
||||
WORKDIR /shared
|
||||
@@ -32,6 +33,7 @@ COPY scripts/ /app/scripts/
|
||||
# Add shared libraries to Python path
|
||||
ENV PYTHONPATH="/app:/app/shared:${PYTHONPATH:-}"
|
||||
|
||||
|
||||
# Expose port
|
||||
EXPOSE 8000
|
||||
|
||||
@@ -40,4 +42,4 @@ HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
|
||||
CMD curl -f http://localhost:8000/health || exit 1
|
||||
|
||||
# Run application
|
||||
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]
|
||||
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]
|
||||
|
||||
@@ -19,7 +19,7 @@ from shared.service_base import StandardFastAPIService
|
||||
class ForecastingService(StandardFastAPIService):
|
||||
"""Forecasting Service with standardized setup"""
|
||||
|
||||
expected_migration_version = "001_initial_forecasting"
|
||||
expected_migration_version = "00001"
|
||||
|
||||
async def on_startup(self, app):
|
||||
"""Custom startup logic including migration verification"""
|
||||
|
||||
160
services/forecasting/migrations/versions/00001_initial_schema.py
Normal file
160
services/forecasting/migrations/versions/00001_initial_schema.py
Normal file
@@ -0,0 +1,160 @@
|
||||
"""Initial schema for forecasting service
|
||||
|
||||
Revision ID: 000001
|
||||
Revises:
|
||||
Create Date: 2025-09-30 18:00:00.00000
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '0001'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
op.create_table('forecasts',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('inventory_product_id', sa.UUID(), nullable=False),
|
||||
sa.Column('product_name', sa.String(255), nullable=False),
|
||||
sa.Column('location', sa.String(255), nullable=False),
|
||||
sa.Column('forecast_date', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
|
||||
sa.Column('predicted_demand', sa.Float, nullable=False),
|
||||
sa.Column('confidence_lower', sa.Float, nullable=False),
|
||||
sa.Column('confidence_upper', sa.Float, nullable=False),
|
||||
sa.Column('confidence_level', sa.Float, nullable=True),
|
||||
sa.Column('model_id', sa.String(255), nullable=False),
|
||||
sa.Column('model_version', sa.String(50), nullable=False),
|
||||
sa.Column('algorithm', sa.String(50), nullable=True),
|
||||
sa.Column('business_type', sa.String(50), nullable=True),
|
||||
sa.Column('day_of_week', sa.Integer, nullable=False),
|
||||
sa.Column('is_holiday', sa.Boolean, nullable=True),
|
||||
sa.Column('is_weekend', sa.Boolean, nullable=True),
|
||||
sa.Column('weather_temperature', sa.Float, nullable=True),
|
||||
sa.Column('weather_precipitation', sa.Float, nullable=True),
|
||||
sa.Column('weather_description', sa.String(100), nullable=True),
|
||||
sa.Column('traffic_volume', sa.Integer, nullable=True),
|
||||
sa.Column('processing_time_ms', sa.Integer, nullable=True),
|
||||
sa.Column('features_used', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_forecasts_tenant_id'), 'forecasts', ['tenant_id'], unique=False)
|
||||
op.create_index(op.f('ix_forecasts_inventory_product_id'), 'forecasts', ['inventory_product_id'], unique=False)
|
||||
op.create_index(op.f('ix_forecasts_product_name'), 'forecasts', ['product_name'], unique=False)
|
||||
op.create_index(op.f('ix_forecasts_location'), 'forecasts', ['location'], unique=False)
|
||||
op.create_index(op.f('ix_forecasts_forecast_date'), 'forecasts', ['forecast_date'], unique=False)
|
||||
|
||||
op.create_table('predictions',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('model_id', sa.String(100), nullable=False),
|
||||
sa.Column('input_data', postgresql.JSON(astext_type=sa.Text()), nullable=False),
|
||||
sa.Column('prediction_value', sa.Float(), nullable=False),
|
||||
sa.Column('prediction_confidence', sa.Float(), nullable=True),
|
||||
sa.Column('prediction_date', sa.DateTime(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_predictions_tenant_id'), 'predictions', ['tenant_id'], unique=False)
|
||||
op.create_index(op.f('ix_predictions_model_id'), 'predictions', ['model_id'], unique=False)
|
||||
op.create_index(op.f('ix_predictions_prediction_date'), 'predictions', ['prediction_date'], unique=False)
|
||||
|
||||
# Create prediction_batches table
|
||||
op.create_table('prediction_batches',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('batch_name', sa.String(255), nullable=False),
|
||||
sa.Column('requested_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
|
||||
sa.Column('completed_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('status', sa.String(50), nullable=True),
|
||||
sa.Column('total_products', sa.Integer, nullable=True),
|
||||
sa.Column('completed_products', sa.Integer, nullable=True),
|
||||
sa.Column('failed_products', sa.Integer, nullable=True),
|
||||
sa.Column('forecast_days', sa.Integer, nullable=True),
|
||||
sa.Column('business_type', sa.String(50), nullable=True),
|
||||
sa.Column('error_message', sa.Text(), nullable=True),
|
||||
sa.Column('processing_time_ms', sa.Integer, nullable=True),
|
||||
sa.Column('cancelled_by', sa.String(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_prediction_batches_tenant_id'), 'prediction_batches', ['tenant_id'], unique=False)
|
||||
op.create_index(op.f('ix_prediction_batches_status'), 'prediction_batches', ['status'], unique=False)
|
||||
op.create_index(op.f('ix_prediction_batches_requested_at'), 'prediction_batches', ['requested_at'], unique=False)
|
||||
|
||||
# Create model_performance_metrics table
|
||||
op.create_table('model_performance_metrics',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('model_id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('inventory_product_id', sa.UUID(), nullable=False),
|
||||
sa.Column('mae', sa.Float, nullable=True),
|
||||
sa.Column('mape', sa.Float, nullable=True),
|
||||
sa.Column('rmse', sa.Float, nullable=True),
|
||||
sa.Column('accuracy_score', sa.Float, nullable=True),
|
||||
sa.Column('evaluation_date', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('evaluation_period_start', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('evaluation_period_end', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('sample_size', sa.Integer, nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_model_performance_metrics_model_id'), 'model_performance_metrics', ['model_id'], unique=False)
|
||||
op.create_index(op.f('ix_model_performance_metrics_tenant_id'), 'model_performance_metrics', ['tenant_id'], unique=False)
|
||||
op.create_index(op.f('ix_model_performance_metrics_inventory_product_id'), 'model_performance_metrics', ['inventory_product_id'], unique=False)
|
||||
op.create_index(op.f('ix_model_performance_metrics_evaluation_date'), 'model_performance_metrics', ['evaluation_date'], unique=False)
|
||||
|
||||
# Create prediction_cache table
|
||||
op.create_table('prediction_cache',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('cache_key', sa.String(255), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('inventory_product_id', sa.UUID(), nullable=False),
|
||||
sa.Column('location', sa.String(255), nullable=False),
|
||||
sa.Column('forecast_date', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('predicted_demand', sa.Float, nullable=False),
|
||||
sa.Column('confidence_lower', sa.Float, nullable=False),
|
||||
sa.Column('confidence_upper', sa.Float, nullable=False),
|
||||
sa.Column('model_id', sa.UUID(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
|
||||
sa.Column('expires_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('hit_count', sa.Integer, nullable=True),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('cache_key')
|
||||
)
|
||||
op.create_index(op.f('ix_prediction_cache_cache_key'), 'prediction_cache', ['cache_key'], unique=False)
|
||||
op.create_index(op.f('ix_prediction_cache_tenant_id'), 'prediction_cache', ['tenant_id'], unique=False)
|
||||
op.create_index(op.f('ix_prediction_cache_inventory_product_id'), 'prediction_cache', ['inventory_product_id'], unique=False)
|
||||
op.create_index(op.f('ix_prediction_cache_forecast_date'), 'prediction_cache', ['forecast_date'], unique=False)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_index(op.f('ix_prediction_cache_forecast_date'), table_name='prediction_cache')
|
||||
op.drop_index(op.f('ix_prediction_cache_inventory_product_id'), table_name='prediction_cache')
|
||||
op.drop_index(op.f('ix_prediction_cache_tenant_id'), table_name='prediction_cache')
|
||||
op.drop_index(op.f('ix_prediction_cache_cache_key'), table_name='prediction_cache')
|
||||
op.drop_table('prediction_cache')
|
||||
op.drop_index(op.f('ix_model_performance_metrics_evaluation_date'), table_name='model_performance_metrics')
|
||||
op.drop_index(op.f('ix_model_performance_metrics_inventory_product_id'), table_name='model_performance_metrics')
|
||||
op.drop_index(op.f('ix_model_performance_metrics_tenant_id'), table_name='model_performance_metrics')
|
||||
op.drop_index(op.f('ix_model_performance_metrics_model_id'), table_name='model_performance_metrics')
|
||||
op.drop_table('model_performance_metrics')
|
||||
op.drop_index(op.f('ix_prediction_batches_requested_at'), table_name='prediction_batches')
|
||||
op.drop_index(op.f('ix_prediction_batches_status'), table_name='prediction_batches')
|
||||
op.drop_index(op.f('ix_prediction_batches_tenant_id'), table_name='prediction_batches')
|
||||
op.drop_table('prediction_batches')
|
||||
op.drop_index(op.f('ix_predictions_prediction_date'), table_name='predictions')
|
||||
op.drop_index(op.f('ix_predictions_model_id'), table_name='predictions')
|
||||
op.drop_index(op.f('ix_predictions_tenant_id'), table_name='predictions')
|
||||
op.drop_table('predictions')
|
||||
op.drop_index(op.f('ix_forecasts_forecast_date'), table_name='forecasts')
|
||||
op.drop_index(op.f('ix_forecasts_location'), table_name='forecasts')
|
||||
op.drop_index(op.f('ix_forecasts_product_name'), table_name='forecasts')
|
||||
op.drop_index(op.f('ix_forecasts_inventory_product_id'), table_name='forecasts')
|
||||
op.drop_index(op.f('ix_forecasts_tenant_id'), table_name='forecasts')
|
||||
op.drop_table('forecasts')
|
||||
Reference in New Issue
Block a user