Fix DB issue 2s
This commit is contained in:
35
services/external/Dockerfile
vendored
35
services/external/Dockerfile
vendored
@@ -1,4 +1,10 @@
|
||||
# services/external/Dockerfile
|
||||
# External Dockerfile
|
||||
# Add this stage at the top of each service Dockerfile
|
||||
FROM python:3.11-slim AS shared
|
||||
WORKDIR /shared
|
||||
COPY shared/ /shared/
|
||||
|
||||
# Then your main service stage
|
||||
FROM python:3.11-slim
|
||||
|
||||
WORKDIR /app
|
||||
@@ -10,32 +16,31 @@ RUN apt-get update && apt-get install -y \
|
||||
curl \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy requirements and install Python dependencies
|
||||
# Copy requirements
|
||||
COPY services/external/requirements.txt .
|
||||
|
||||
# Install Python dependencies
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy shared modules first
|
||||
COPY shared/ /app/shared/
|
||||
# Copy shared libraries from the shared stage
|
||||
COPY --from=shared /shared /app/shared
|
||||
|
||||
# Copy application code
|
||||
COPY services/external/app/ /app/app/
|
||||
|
||||
# Copy migrations and alembic config
|
||||
COPY services/external/migrations/ /app/migrations/
|
||||
COPY services/external/alembic.ini /app/alembic.ini
|
||||
COPY services/external/ .
|
||||
|
||||
# Copy scripts directory
|
||||
COPY scripts/ /app/scripts/
|
||||
|
||||
# Set Python path to include shared modules
|
||||
ENV PYTHONPATH=/app
|
||||
# Add shared libraries to Python path
|
||||
ENV PYTHONPATH="/app:/app/shared:${PYTHONPATH:-}"
|
||||
|
||||
|
||||
# Expose port
|
||||
EXPOSE 8000
|
||||
|
||||
# Health check
|
||||
HEALTHCHECK --interval=30s --timeout=10s --start-period=60s --retries=3 \
|
||||
CMD python -c "import requests; requests.get('http://localhost:8000/health', timeout=5)" || exit 1
|
||||
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
|
||||
CMD curl -f http://localhost:8000/health || exit 1
|
||||
|
||||
# Run the application
|
||||
CMD ["python", "-m", "uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]
|
||||
# Run application
|
||||
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]
|
||||
|
||||
2
services/external/app/main.py
vendored
2
services/external/app/main.py
vendored
@@ -17,7 +17,7 @@ from app.api.traffic import router as traffic_router
|
||||
class ExternalService(StandardFastAPIService):
|
||||
"""External Data Service with standardized setup"""
|
||||
|
||||
expected_migration_version = "001_initial_external"
|
||||
expected_migration_version = "00001"
|
||||
|
||||
async def on_startup(self, app):
|
||||
"""Custom startup logic including migration verification"""
|
||||
|
||||
236
services/external/migrations/versions/00001_initial_schema.py
vendored
Normal file
236
services/external/migrations/versions/00001_initial_schema.py
vendored
Normal file
@@ -0,0 +1,236 @@
|
||||
"""Initial schema for external service
|
||||
|
||||
Revision ID: 000001
|
||||
Revises:
|
||||
Create Date: 2025-09-30 18:00:00.00000
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '000001'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Create traffic_data table
|
||||
op.create_table('traffic_data',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=True),
|
||||
sa.Column('location_id', sa.String(100), nullable=False),
|
||||
sa.Column('city', sa.String(50), nullable=False),
|
||||
sa.Column('date', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('traffic_volume', sa.Integer(), nullable=True),
|
||||
sa.Column('congestion_level', sa.String(20), nullable=True),
|
||||
sa.Column('average_speed', sa.Float(), nullable=True),
|
||||
sa.Column('occupation_percentage', sa.Float(), nullable=True),
|
||||
sa.Column('load_percentage', sa.Float(), nullable=True),
|
||||
sa.Column('pedestrian_count', sa.Integer(), nullable=True),
|
||||
sa.Column('measurement_point_id', sa.String(100), nullable=True),
|
||||
sa.Column('measurement_point_name', sa.String(500), nullable=True),
|
||||
sa.Column('measurement_point_type', sa.String(50), nullable=True),
|
||||
sa.Column('latitude', sa.Float(), nullable=True),
|
||||
sa.Column('longitude', sa.Float(), nullable=True),
|
||||
sa.Column('district', sa.String(100), nullable=True),
|
||||
sa.Column('zone', sa.String(100), nullable=True),
|
||||
sa.Column('source', sa.String(50), nullable=False),
|
||||
sa.Column('data_quality_score', sa.Float(), nullable=True),
|
||||
sa.Column('is_synthetic', sa.Boolean(), nullable=True),
|
||||
sa.Column('has_pedestrian_inference', sa.Boolean(), nullable=True),
|
||||
sa.Column('city_specific_data', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('raw_data', sa.Text(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_traffic_data_tenant_id'), 'traffic_data', ['tenant_id'], unique=False)
|
||||
op.create_index(op.f('ix_traffic_data_location_id'), 'traffic_data', ['location_id'], unique=False)
|
||||
op.create_index(op.f('ix_traffic_data_city'), 'traffic_data', ['city'], unique=False)
|
||||
op.create_index(op.f('ix_traffic_data_date'), 'traffic_data', ['date'], unique=False)
|
||||
op.create_index('idx_traffic_location_date', 'traffic_data', ['location_id', 'date'], unique=False)
|
||||
op.create_index('idx_traffic_city_date', 'traffic_data', ['city', 'date'], unique=False)
|
||||
op.create_index('idx_traffic_tenant_date', 'traffic_data', ['tenant_id', 'date'], unique=False)
|
||||
op.create_index('idx_traffic_city_location', 'traffic_data', ['city', 'location_id'], unique=False)
|
||||
op.create_index('idx_traffic_measurement_point', 'traffic_data', ['city', 'measurement_point_id'], unique=False)
|
||||
op.create_index('idx_traffic_district_date', 'traffic_data', ['city', 'district', 'date'], unique=False)
|
||||
op.create_index('idx_traffic_training', 'traffic_data', ['tenant_id', 'city', 'date', 'is_synthetic'], unique=False)
|
||||
op.create_index('idx_traffic_quality', 'traffic_data', ['city', 'data_quality_score', 'date'], unique=False)
|
||||
|
||||
# Create traffic_measurement_points table
|
||||
op.create_table('traffic_measurement_points',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('city', sa.String(50), nullable=False),
|
||||
sa.Column('measurement_point_id', sa.String(100), nullable=False),
|
||||
sa.Column('name', sa.String(500), nullable=True),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('latitude', sa.Float(), nullable=False),
|
||||
sa.Column('longitude', sa.Float(), nullable=False),
|
||||
sa.Column('district', sa.String(100), nullable=True),
|
||||
sa.Column('zone', sa.String(100), nullable=True),
|
||||
sa.Column('road_type', sa.String(50), nullable=True),
|
||||
sa.Column('measurement_type', sa.String(50), nullable=True),
|
||||
sa.Column('point_category', sa.String(50), nullable=True),
|
||||
sa.Column('is_active', sa.Boolean(), nullable=True),
|
||||
sa.Column('installation_date', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('last_data_received', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('data_quality_rating', sa.Float(), nullable=True),
|
||||
sa.Column('city_specific_metadata', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('idx_unique_city_point', 'traffic_measurement_points', ['city', 'measurement_point_id'], unique=True)
|
||||
op.create_index(op.f('ix_traffic_measurement_points_city'), 'traffic_measurement_points', ['city'], unique=False)
|
||||
op.create_index(op.f('ix_traffic_measurement_points_measurement_point_id'), 'traffic_measurement_points', ['measurement_point_id'], unique=False)
|
||||
op.create_index('idx_points_city_location', 'traffic_measurement_points', ['city', 'latitude', 'longitude'], unique=False)
|
||||
op.create_index('idx_points_district', 'traffic_measurement_points', ['city', 'district'], unique=False)
|
||||
op.create_index('idx_points_road_type', 'traffic_measurement_points', ['city', 'road_type'], unique=False)
|
||||
op.create_index('idx_points_active', 'traffic_measurement_points', ['city', 'is_active', 'last_data_received'], unique=False)
|
||||
|
||||
# Create traffic_background_jobs table
|
||||
op.create_table('traffic_background_jobs',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('job_type', sa.String(50), nullable=False),
|
||||
sa.Column('city', sa.String(50), nullable=False),
|
||||
sa.Column('location_pattern', sa.String(200), nullable=True),
|
||||
sa.Column('scheduled_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('started_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('completed_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('status', sa.String(20), nullable=False),
|
||||
sa.Column('progress_percentage', sa.Float(), nullable=True),
|
||||
sa.Column('records_processed', sa.Integer(), nullable=True),
|
||||
sa.Column('records_stored', sa.Integer(), nullable=True),
|
||||
sa.Column('data_start_date', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('data_end_date', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('success_count', sa.Integer(), nullable=True),
|
||||
sa.Column('error_count', sa.Integer(), nullable=True),
|
||||
sa.Column('error_message', sa.Text(), nullable=True),
|
||||
sa.Column('job_metadata', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_traffic_background_jobs_city'), 'traffic_background_jobs', ['city'], unique=False)
|
||||
op.create_index(op.f('ix_traffic_background_jobs_tenant_id'), 'traffic_background_jobs', ['tenant_id'], unique=False)
|
||||
op.create_index('idx_jobs_city_status', 'traffic_background_jobs', ['city', 'status', 'scheduled_at'], unique=False)
|
||||
op.create_index('idx_jobs_tenant_status', 'traffic_background_jobs', ['tenant_id', 'status', 'scheduled_at'], unique=False)
|
||||
op.create_index('idx_jobs_type_city', 'traffic_background_jobs', ['job_type', 'city', 'scheduled_at'], unique=False)
|
||||
op.create_index('idx_jobs_completed', 'traffic_background_jobs', ['status', 'completed_at'], unique=False)
|
||||
|
||||
# Create weather_data table
|
||||
op.create_table('weather_data',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('location_id', sa.String(100), nullable=False),
|
||||
sa.Column('city', sa.String(50), nullable=False),
|
||||
sa.Column('station_name', sa.String(200), nullable=True),
|
||||
sa.Column('latitude', sa.Float(), nullable=True),
|
||||
sa.Column('longitude', sa.Float(), nullable=True),
|
||||
sa.Column('date', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('forecast_date', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('temperature', sa.Float(), nullable=True),
|
||||
sa.Column('temperature_min', sa.Float(), nullable=True),
|
||||
sa.Column('temperature_max', sa.Float(), nullable=True),
|
||||
sa.Column('feels_like', sa.Float(), nullable=True),
|
||||
sa.Column('precipitation', sa.Float(), nullable=True),
|
||||
sa.Column('precipitation_probability', sa.Float(), nullable=True),
|
||||
sa.Column('humidity', sa.Float(), nullable=True),
|
||||
sa.Column('wind_speed', sa.Float(), nullable=True),
|
||||
sa.Column('wind_direction', sa.Float(), nullable=True),
|
||||
sa.Column('wind_gust', sa.Float(), nullable=True),
|
||||
sa.Column('pressure', sa.Float(), nullable=True),
|
||||
sa.Column('visibility', sa.Float(), nullable=True),
|
||||
sa.Column('uv_index', sa.Float(), nullable=True),
|
||||
sa.Column('cloud_cover', sa.Float(), nullable=True),
|
||||
sa.Column('condition', sa.String(100), nullable=True),
|
||||
sa.Column('description', sa.String(200), nullable=True),
|
||||
sa.Column('weather_code', sa.String(20), nullable=True),
|
||||
sa.Column('source', sa.String(50), nullable=False),
|
||||
sa.Column('data_type', sa.String(20), nullable=False),
|
||||
sa.Column('is_forecast', sa.Boolean(), nullable=True),
|
||||
sa.Column('data_quality_score', sa.Float(), nullable=True),
|
||||
sa.Column('raw_data', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('processed_data', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_weather_data_location_id'), 'weather_data', ['location_id'], unique=False)
|
||||
op.create_index(op.f('ix_weather_data_city'), 'weather_data', ['city'], unique=False)
|
||||
op.create_index(op.f('ix_weather_data_date'), 'weather_data', ['date'], unique=False)
|
||||
op.create_index(op.f('ix_weather_data_tenant_id'), 'weather_data', ['tenant_id'], unique=False)
|
||||
op.create_index('idx_weather_location_date', 'weather_data', ['location_id', 'date'], unique=False)
|
||||
|
||||
# Create weather_forecasts table
|
||||
op.create_table('weather_forecasts',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('location_id', sa.String(100), nullable=False),
|
||||
sa.Column('forecast_date', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('generated_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=False),
|
||||
sa.Column('temperature', sa.Float(), nullable=True),
|
||||
sa.Column('precipitation', sa.Float(), nullable=True),
|
||||
sa.Column('humidity', sa.Float(), nullable=True),
|
||||
sa.Column('wind_speed', sa.Float(), nullable=True),
|
||||
sa.Column('description', sa.String(200), nullable=True),
|
||||
sa.Column('source', sa.String(50), nullable=False),
|
||||
sa.Column('raw_data', sa.Text(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_weather_forecasts_location_id'), 'weather_forecasts', ['location_id'], unique=False)
|
||||
op.create_index('idx_forecast_location_date', 'weather_forecasts', ['location_id', 'forecast_date'], unique=False)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# Drop weather_forecasts table
|
||||
op.drop_index('idx_forecast_location_date', table_name='weather_forecasts')
|
||||
op.drop_index(op.f('ix_weather_forecasts_location_id'), table_name='weather_forecasts')
|
||||
op.drop_table('weather_forecasts')
|
||||
|
||||
# Drop weather_data table
|
||||
op.drop_index('idx_weather_location_date', table_name='weather_data')
|
||||
op.drop_index(op.f('ix_weather_data_tenant_id'), table_name='weather_data')
|
||||
op.drop_index(op.f('ix_weather_data_date'), table_name='weather_data')
|
||||
op.drop_index(op.f('ix_weather_data_city'), table_name='weather_data')
|
||||
op.drop_index(op.f('ix_weather_data_location_id'), table_name='weather_data')
|
||||
op.drop_table('weather_data')
|
||||
|
||||
# Drop traffic_background_jobs table
|
||||
op.drop_index('idx_jobs_completed', table_name='traffic_background_jobs')
|
||||
op.drop_index('idx_jobs_type_city', table_name='traffic_background_jobs')
|
||||
op.drop_index('idx_jobs_tenant_status', table_name='traffic_background_jobs')
|
||||
op.drop_index('idx_jobs_city_status', table_name='traffic_background_jobs')
|
||||
op.drop_index(op.f('ix_traffic_background_jobs_tenant_id'), table_name='traffic_background_jobs')
|
||||
op.drop_index(op.f('ix_traffic_background_jobs_city'), table_name='traffic_background_jobs')
|
||||
op.drop_table('traffic_background_jobs')
|
||||
|
||||
# Drop traffic_measurement_points table
|
||||
op.drop_index('idx_points_active', table_name='traffic_measurement_points')
|
||||
op.drop_index('idx_points_road_type', table_name='traffic_measurement_points')
|
||||
op.drop_index('idx_points_district', table_name='traffic_measurement_points')
|
||||
op.drop_index('idx_points_city_location', table_name='traffic_measurement_points')
|
||||
op.drop_index('idx_unique_city_point', table_name='traffic_measurement_points')
|
||||
op.drop_index(op.f('ix_traffic_measurement_points_measurement_point_id'), table_name='traffic_measurement_points')
|
||||
op.drop_index(op.f('ix_traffic_measurement_points_city'), table_name='traffic_measurement_points')
|
||||
op.drop_table('traffic_measurement_points')
|
||||
|
||||
# Drop traffic_data table
|
||||
op.drop_index('idx_traffic_quality', table_name='traffic_data')
|
||||
op.drop_index('idx_traffic_training', table_name='traffic_data')
|
||||
op.drop_index('idx_traffic_district_date', table_name='traffic_data')
|
||||
op.drop_index('idx_traffic_measurement_point', table_name='traffic_data')
|
||||
op.drop_index('idx_traffic_city_location', table_name='traffic_data')
|
||||
op.drop_index('idx_traffic_tenant_date', table_name='traffic_data')
|
||||
op.drop_index('idx_traffic_city_date', table_name='traffic_data')
|
||||
op.drop_index('idx_traffic_location_date', table_name='traffic_data')
|
||||
op.drop_index(op.f('ix_traffic_data_date'), table_name='traffic_data')
|
||||
op.drop_index(op.f('ix_traffic_data_city'), table_name='traffic_data')
|
||||
op.drop_index(op.f('ix_traffic_data_location_id'), table_name='traffic_data')
|
||||
op.drop_index(op.f('ix_traffic_data_tenant_id'), table_name='traffic_data')
|
||||
op.drop_table('traffic_data')
|
||||
1
services/external/shared/shared
vendored
1
services/external/shared/shared
vendored
@@ -1 +0,0 @@
|
||||
/Users/urtzialfaro/Documents/bakery-ia/shared
|
||||
Reference in New Issue
Block a user