Fix imports
This commit is contained in:
@@ -4,7 +4,7 @@ Handles routing, authentication, rate limiting, and cross-cutting concerns
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import structlog
|
||||||
from fastapi import FastAPI, Request, HTTPException, Depends
|
from fastapi import FastAPI, Request, HTTPException, Depends
|
||||||
from fastapi.middleware.cors import CORSMiddleware
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
from fastapi.responses import JSONResponse
|
from fastapi.responses import JSONResponse
|
||||||
@@ -23,7 +23,7 @@ from shared.monitoring.metrics import MetricsCollector
|
|||||||
|
|
||||||
# Setup logging
|
# Setup logging
|
||||||
setup_logging("gateway", settings.LOG_LEVEL)
|
setup_logging("gateway", settings.LOG_LEVEL)
|
||||||
logger = logging.getLogger(__name__)
|
logger = structlog.get_logger()
|
||||||
|
|
||||||
# Create FastAPI app
|
# Create FastAPI app
|
||||||
app = FastAPI(
|
app = FastAPI(
|
||||||
|
|||||||
@@ -12,3 +12,4 @@ email-validator==2.0.0
|
|||||||
aio-pika==9.3.0
|
aio-pika==9.3.0
|
||||||
pytz==2023.3
|
pytz==2023.3
|
||||||
python-logstash==0.4.8
|
python-logstash==0.4.8
|
||||||
|
structlog==23.2.0
|
||||||
@@ -7,7 +7,7 @@ Authentication API routes - Enhanced with proper metrics access
|
|||||||
|
|
||||||
from fastapi import APIRouter, Depends, HTTPException, status, Request
|
from fastapi import APIRouter, Depends, HTTPException, status, Request
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
import logging
|
import structlog
|
||||||
|
|
||||||
from app.core.database import get_db
|
from app.core.database import get_db
|
||||||
from app.schemas.auth import (
|
from app.schemas.auth import (
|
||||||
@@ -18,7 +18,7 @@ from app.services.auth_service import AuthService
|
|||||||
from app.core.security import security_manager
|
from app.core.security import security_manager
|
||||||
from shared.monitoring.decorators import track_execution_time, count_calls
|
from shared.monitoring.decorators import track_execution_time, count_calls
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = structlog.get_logger()
|
||||||
router = APIRouter()
|
router = APIRouter()
|
||||||
|
|
||||||
def get_metrics_collector(request: Request):
|
def get_metrics_collector(request: Request):
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ User management API routes
|
|||||||
from fastapi import APIRouter, Depends, HTTPException, status
|
from fastapi import APIRouter, Depends, HTTPException, status
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
from typing import List
|
from typing import List
|
||||||
import logging
|
import structlog
|
||||||
|
|
||||||
from app.core.database import get_db
|
from app.core.database import get_db
|
||||||
from app.schemas.auth import UserResponse, PasswordChangeRequest
|
from app.schemas.auth import UserResponse, PasswordChangeRequest
|
||||||
@@ -13,7 +13,7 @@ from app.services.user_service import UserService
|
|||||||
from app.core.auth import get_current_user
|
from app.core.auth import get_current_user
|
||||||
from app.models.users import User
|
from app.models.users import User
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = structlog.get_logger()
|
||||||
router = APIRouter()
|
router = APIRouter()
|
||||||
|
|
||||||
@router.get("/me", response_model=UserResponse)
|
@router.get("/me", response_model=UserResponse)
|
||||||
|
|||||||
@@ -8,13 +8,13 @@ from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
|
|||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
from sqlalchemy import select
|
from sqlalchemy import select
|
||||||
from jose import JWTError, jwt
|
from jose import JWTError, jwt
|
||||||
import logging
|
import structlog
|
||||||
|
|
||||||
from app.core.config import settings
|
from app.core.config import settings
|
||||||
from app.core.database import get_db
|
from app.core.database import get_db
|
||||||
from app.models.users import User
|
from app.models.users import User
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = structlog.get_logger()
|
||||||
|
|
||||||
security = HTTPBearer()
|
security = HTTPBearer()
|
||||||
|
|
||||||
|
|||||||
@@ -5,14 +5,14 @@
|
|||||||
Database configuration for authentication service
|
Database configuration for authentication service
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import logging
|
import structlog
|
||||||
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession, async_sessionmaker
|
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession, async_sessionmaker
|
||||||
from sqlalchemy.pool import NullPool
|
from sqlalchemy.pool import NullPool
|
||||||
|
|
||||||
from app.core.config import settings
|
from app.core.config import settings
|
||||||
from shared.database.base import Base
|
from shared.database.base import Base
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = structlog.get_logger()
|
||||||
|
|
||||||
# Create async engine
|
# Create async engine
|
||||||
engine = create_async_engine(
|
engine = create_async_engine(
|
||||||
|
|||||||
@@ -12,12 +12,12 @@ from datetime import datetime, timedelta
|
|||||||
from typing import Optional, Dict, Any
|
from typing import Optional, Dict, Any
|
||||||
import redis.asyncio as redis
|
import redis.asyncio as redis
|
||||||
from fastapi import HTTPException, status
|
from fastapi import HTTPException, status
|
||||||
import logging
|
import structlog
|
||||||
|
|
||||||
from app.core.config import settings
|
from app.core.config import settings
|
||||||
from shared.auth.jwt_handler import JWTHandler
|
from shared.auth.jwt_handler import JWTHandler
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = structlog.get_logger()
|
||||||
|
|
||||||
# Initialize JWT handler
|
# Initialize JWT handler
|
||||||
jwt_handler = JWTHandler(settings.JWT_SECRET_KEY, settings.JWT_ALGORITHM)
|
jwt_handler = JWTHandler(settings.JWT_SECRET_KEY, settings.JWT_ALGORITHM)
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
Authentication Service Main Application - Fixed middleware issue
|
Authentication Service Main Application - Fixed middleware issue
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import logging
|
import structlog
|
||||||
from fastapi import FastAPI, Request
|
from fastapi import FastAPI, Request
|
||||||
from fastapi.middleware.cors import CORSMiddleware
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
from fastapi.responses import JSONResponse
|
from fastapi.responses import JSONResponse
|
||||||
@@ -17,7 +17,7 @@ from shared.monitoring.metrics import setup_metrics_early
|
|||||||
|
|
||||||
# Setup logging first
|
# Setup logging first
|
||||||
setup_logging("auth-service", settings.LOG_LEVEL)
|
setup_logging("auth-service", settings.LOG_LEVEL)
|
||||||
logger = logging.getLogger(__name__)
|
logger = structlog.get_logger()
|
||||||
|
|
||||||
# Global variables for lifespan access
|
# Global variables for lifespan access
|
||||||
metrics_collector = None
|
metrics_collector = None
|
||||||
|
|||||||
@@ -5,7 +5,7 @@
|
|||||||
Authentication service business logic - Complete implementation
|
Authentication service business logic - Complete implementation
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import logging
|
import structlog
|
||||||
from datetime import datetime, timedelta, timezone
|
from datetime import datetime, timedelta, timezone
|
||||||
from typing import Optional, Dict, Any
|
from typing import Optional, Dict, Any
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
@@ -18,7 +18,7 @@ from app.schemas.auth import UserRegistration, UserLogin, TokenResponse, UserRes
|
|||||||
from app.core.security import security_manager
|
from app.core.security import security_manager
|
||||||
from app.services.messaging import publish_user_registered, publish_user_login
|
from app.services.messaging import publish_user_registered, publish_user_login
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = structlog.get_logger()
|
||||||
|
|
||||||
class AuthService:
|
class AuthService:
|
||||||
"""Authentication service business logic"""
|
"""Authentication service business logic"""
|
||||||
|
|||||||
@@ -4,9 +4,9 @@ Messaging service for auth service
|
|||||||
"""
|
"""
|
||||||
from shared.messaging.rabbitmq import RabbitMQClient
|
from shared.messaging.rabbitmq import RabbitMQClient
|
||||||
from app.core.config import settings
|
from app.core.config import settings
|
||||||
import logging
|
import structlog
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = structlog.get_logger()
|
||||||
|
|
||||||
# Single global instance
|
# Single global instance
|
||||||
auth_publisher = RabbitMQClient(settings.RABBITMQ_URL, "auth-service")
|
auth_publisher = RabbitMQClient(settings.RABBITMQ_URL, "auth-service")
|
||||||
|
|||||||
@@ -6,12 +6,12 @@ from sqlalchemy.ext.asyncio import AsyncSession
|
|||||||
from sqlalchemy import select, update, delete
|
from sqlalchemy import select, update, delete
|
||||||
from fastapi import HTTPException, status
|
from fastapi import HTTPException, status
|
||||||
from passlib.context import CryptContext
|
from passlib.context import CryptContext
|
||||||
import logging
|
import structlog
|
||||||
|
|
||||||
from app.models.users import User
|
from app.models.users import User
|
||||||
from app.core.config import settings
|
from app.core.config import settings
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = structlog.get_logger()
|
||||||
|
|
||||||
# Password hashing
|
# Password hashing
|
||||||
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
|
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
|
||||||
|
|||||||
@@ -16,3 +16,4 @@ prometheus-client==0.17.1
|
|||||||
python-json-logger==2.0.4
|
python-json-logger==2.0.4
|
||||||
pytz==2023.3
|
pytz==2023.3
|
||||||
python-logstash==0.4.8
|
python-logstash==0.4.8
|
||||||
|
structlog==23.2.0
|
||||||
|
|||||||
@@ -5,7 +5,7 @@
|
|||||||
Data Service Main Application - Fixed middleware issue
|
Data Service Main Application - Fixed middleware issue
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import logging
|
import structlog
|
||||||
from contextlib import asynccontextmanager
|
from contextlib import asynccontextmanager
|
||||||
from fastapi import FastAPI, Request
|
from fastapi import FastAPI, Request
|
||||||
from fastapi.middleware.cors import CORSMiddleware
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
@@ -21,7 +21,7 @@ from shared.monitoring.metrics import setup_metrics_early
|
|||||||
|
|
||||||
# Setup logging first
|
# Setup logging first
|
||||||
setup_logging("data-service", settings.LOG_LEVEL)
|
setup_logging("data-service", settings.LOG_LEVEL)
|
||||||
logger = logging.getLogger(__name__)
|
logger = structlog.get_logger()
|
||||||
|
|
||||||
# Global variables for lifespan access
|
# Global variables for lifespan access
|
||||||
metrics_collector = None
|
metrics_collector = None
|
||||||
|
|||||||
@@ -4,9 +4,9 @@
|
|||||||
|
|
||||||
from shared.messaging.rabbitmq import RabbitMQClient
|
from shared.messaging.rabbitmq import RabbitMQClient
|
||||||
from app.core.config import settings
|
from app.core.config import settings
|
||||||
import logging
|
import structlog
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = structlog.get_logger()
|
||||||
|
|
||||||
# Single global instance
|
# Single global instance
|
||||||
data_publisher = RabbitMQClient(settings.RABBITMQ_URL, "data-service")
|
data_publisher = RabbitMQClient(settings.RABBITMQ_URL, "data-service")
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
uLuforecasting Service
|
uLuforecasting Service
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import logging
|
import structlog
|
||||||
from fastapi import FastAPI
|
from fastapi import FastAPI
|
||||||
from fastapi.middleware.cors import CORSMiddleware
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
|
|
||||||
@@ -13,7 +13,7 @@ from shared.monitoring.metrics import MetricsCollector
|
|||||||
|
|
||||||
# Setup logging
|
# Setup logging
|
||||||
setup_logging("forecasting-service", "INFO")
|
setup_logging("forecasting-service", "INFO")
|
||||||
logger = logging.getLogger(__name__)
|
logger = structlog.get_logger()
|
||||||
|
|
||||||
# Create FastAPI app
|
# Create FastAPI app
|
||||||
app = FastAPI(
|
app = FastAPI(
|
||||||
|
|||||||
@@ -12,3 +12,4 @@ prometheus-client==0.17.1
|
|||||||
python-json-logger==2.0.4
|
python-json-logger==2.0.4
|
||||||
pytz==2023.3
|
pytz==2023.3
|
||||||
python-logstash==0.4.8
|
python-logstash==0.4.8
|
||||||
|
structlog==23.2.0
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
uLunotification Service
|
uLunotification Service
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import logging
|
import structlog
|
||||||
from fastapi import FastAPI
|
from fastapi import FastAPI
|
||||||
from fastapi.middleware.cors import CORSMiddleware
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
|
|
||||||
@@ -13,7 +13,7 @@ from shared.monitoring.metrics import MetricsCollector
|
|||||||
|
|
||||||
# Setup logging
|
# Setup logging
|
||||||
setup_logging("notification-service", "INFO")
|
setup_logging("notification-service", "INFO")
|
||||||
logger = logging.getLogger(__name__)
|
logger = structlog.get_logger()
|
||||||
|
|
||||||
# Create FastAPI app
|
# Create FastAPI app
|
||||||
app = FastAPI(
|
app = FastAPI(
|
||||||
|
|||||||
@@ -12,3 +12,5 @@ prometheus-client==0.17.1
|
|||||||
python-json-logger==2.0.4
|
python-json-logger==2.0.4
|
||||||
pytz==2023.3
|
pytz==2023.3
|
||||||
python-logstash==0.4.8
|
python-logstash==0.4.8
|
||||||
|
structlog==23.2.0
|
||||||
|
structlog==23.2.0
|
||||||
@@ -2,7 +2,7 @@
|
|||||||
uLutenant Service
|
uLutenant Service
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import logging
|
import structlog
|
||||||
from fastapi import FastAPI
|
from fastapi import FastAPI
|
||||||
from fastapi.middleware.cors import CORSMiddleware
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
|
|
||||||
@@ -13,7 +13,7 @@ from shared.monitoring.metrics import MetricsCollector
|
|||||||
|
|
||||||
# Setup logging
|
# Setup logging
|
||||||
setup_logging("tenant-service", "INFO")
|
setup_logging("tenant-service", "INFO")
|
||||||
logger = logging.getLogger(__name__)
|
logger = structlog.get_logger()
|
||||||
|
|
||||||
# Create FastAPI app
|
# Create FastAPI app
|
||||||
app = FastAPI(
|
app = FastAPI(
|
||||||
|
|||||||
@@ -12,3 +12,4 @@ prometheus-client==0.17.1
|
|||||||
python-json-logger==2.0.4
|
python-json-logger==2.0.4
|
||||||
pytz==2023.3
|
pytz==2023.3
|
||||||
python-logstash==0.4.8
|
python-logstash==0.4.8
|
||||||
|
structlog==23.2.0
|
||||||
@@ -5,14 +5,14 @@ Models API endpoints
|
|||||||
from fastapi import APIRouter, Depends, HTTPException, status
|
from fastapi import APIRouter, Depends, HTTPException, status
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
from typing import List
|
from typing import List
|
||||||
import logging
|
import structlog
|
||||||
|
|
||||||
from app.core.database import get_db
|
from app.core.database import get_db
|
||||||
from app.core.auth import verify_token
|
from app.core.auth import verify_token
|
||||||
from app.schemas.training import TrainedModelResponse
|
from app.schemas.training import TrainedModelResponse
|
||||||
from app.services.training_service import TrainingService
|
from app.services.training_service import TrainingService
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = structlog.get_logger()
|
||||||
router = APIRouter()
|
router = APIRouter()
|
||||||
|
|
||||||
training_service = TrainingService()
|
training_service = TrainingService()
|
||||||
|
|||||||
@@ -5,14 +5,14 @@ Training API endpoints
|
|||||||
from fastapi import APIRouter, Depends, HTTPException, status, Query
|
from fastapi import APIRouter, Depends, HTTPException, status, Query
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
from typing import List, Optional
|
from typing import List, Optional
|
||||||
import logging
|
import structlog
|
||||||
|
|
||||||
from app.core.database import get_db
|
from app.core.database import get_db
|
||||||
from app.core.auth import verify_token
|
from app.core.auth import verify_token
|
||||||
from app.schemas.training import TrainingRequest, TrainingJobResponse, TrainedModelResponse
|
from app.schemas.training import TrainingRequest, TrainingJobResponse, TrainedModelResponse
|
||||||
from app.services.training_service import TrainingService
|
from app.services.training_service import TrainingService
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = structlog.get_logger()
|
||||||
router = APIRouter()
|
router = APIRouter()
|
||||||
|
|
||||||
training_service = TrainingService()
|
training_service = TrainingService()
|
||||||
|
|||||||
@@ -5,11 +5,11 @@ Authentication utilities for training service
|
|||||||
import httpx
|
import httpx
|
||||||
from fastapi import HTTPException, status, Depends
|
from fastapi import HTTPException, status, Depends
|
||||||
from fastapi.security import HTTPBearer
|
from fastapi.security import HTTPBearer
|
||||||
import logging
|
import structlog
|
||||||
|
|
||||||
from app.core.config import settings
|
from app.core.config import settings
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = structlog.get_logger()
|
||||||
|
|
||||||
security = HTTPBearer()
|
security = HTTPBearer()
|
||||||
|
|
||||||
|
|||||||
@@ -3,20 +3,20 @@ Training Service
|
|||||||
Handles ML model training for bakery demand forecasting
|
Handles ML model training for bakery demand forecasting
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import logging
|
import structlog
|
||||||
from fastapi import FastAPI, BackgroundTasks
|
from fastapi import FastAPI, BackgroundTasks
|
||||||
from fastapi.middleware.cors import CORSMiddleware
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
|
|
||||||
from app.core.config import settings
|
from app.core.config import settings
|
||||||
from app.core.database import database_manager
|
from app.core.database import database_manager
|
||||||
from app.api import training, models
|
from app.api import training, models
|
||||||
from app.services.messaging import message_publisher
|
from app.services.messaging import setup_messaging, cleanup_messaging
|
||||||
from shared.monitoring.logging import setup_logging
|
from shared.monitoring.logging import setup_logging
|
||||||
from shared.monitoring.metrics import MetricsCollector
|
from shared.monitoring.metrics import MetricsCollector
|
||||||
|
|
||||||
# Setup logging
|
# Setup logging
|
||||||
setup_logging("training-service", settings.LOG_LEVEL)
|
setup_logging("training-service", settings.LOG_LEVEL)
|
||||||
logger = logging.getLogger(__name__)
|
logger = structlog.get_logger()
|
||||||
|
|
||||||
# Create FastAPI app
|
# Create FastAPI app
|
||||||
app = FastAPI(
|
app = FastAPI(
|
||||||
@@ -50,7 +50,7 @@ async def startup_event():
|
|||||||
await database_manager.create_tables()
|
await database_manager.create_tables()
|
||||||
|
|
||||||
# Initialize message publisher
|
# Initialize message publisher
|
||||||
await message_publisher.connect()
|
await setup_messaging()
|
||||||
|
|
||||||
# Start metrics server
|
# Start metrics server
|
||||||
metrics_collector.start_metrics_server(8080)
|
metrics_collector.start_metrics_server(8080)
|
||||||
@@ -63,7 +63,7 @@ async def shutdown_event():
|
|||||||
logger.info("Shutting down Training Service")
|
logger.info("Shutting down Training Service")
|
||||||
|
|
||||||
# Cleanup message publisher
|
# Cleanup message publisher
|
||||||
await message_publisher.disconnect()
|
await cleanup_messaging()
|
||||||
|
|
||||||
logger.info("Training Service shutdown complete")
|
logger.info("Training Service shutdown complete")
|
||||||
|
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ ML Training implementation
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import structlog
|
||||||
from typing import Dict, Any, List
|
from typing import Dict, Any, List
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
@@ -15,7 +15,7 @@ from sklearn.metrics import mean_absolute_error, mean_squared_error, r2_score
|
|||||||
|
|
||||||
from app.core.config import settings
|
from app.core.config import settings
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = structlog.get_logger()
|
||||||
|
|
||||||
class MLTrainer:
|
class MLTrainer:
|
||||||
"""ML training implementation"""
|
"""ML training implementation"""
|
||||||
|
|||||||
@@ -1,12 +1,15 @@
|
|||||||
|
# ================================================================
|
||||||
|
# services/training/app/services/messaging.py
|
||||||
|
# ================================================================
|
||||||
"""
|
"""
|
||||||
Training service messaging - Uses shared RabbitMQ client only
|
Messaging service for training service
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import structlog
|
||||||
from shared.messaging.rabbitmq import RabbitMQClient
|
from shared.messaging.rabbitmq import RabbitMQClient
|
||||||
from app.core.config import settings
|
from app.core.config import settings
|
||||||
import logging
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = structlog.get_logger()
|
||||||
|
|
||||||
# Single global instance
|
# Single global instance
|
||||||
training_publisher = RabbitMQClient(settings.RABBITMQ_URL, "training-service")
|
training_publisher = RabbitMQClient(settings.RABBITMQ_URL, "training-service")
|
||||||
@@ -25,14 +28,22 @@ async def cleanup_messaging():
|
|||||||
logger.info("Training service messaging cleaned up")
|
logger.info("Training service messaging cleaned up")
|
||||||
|
|
||||||
# Convenience functions for training-specific events
|
# Convenience functions for training-specific events
|
||||||
async def publish_training_started(data: dict) -> bool:
|
async def publish_training_started(job_data: dict) -> bool:
|
||||||
"""Publish training started event"""
|
"""Publish training started event"""
|
||||||
return await training_publisher.publish_training_event("started", data)
|
return await training_publisher.publish_training_event("started", job_data)
|
||||||
|
|
||||||
async def publish_training_completed(data: dict) -> bool:
|
async def publish_training_completed(job_data: dict) -> bool:
|
||||||
"""Publish training completed event"""
|
"""Publish training completed event"""
|
||||||
return await training_publisher.publish_training_event("completed", data)
|
return await training_publisher.publish_training_event("completed", job_data)
|
||||||
|
|
||||||
async def publish_training_failed(data: dict) -> bool:
|
async def publish_training_failed(job_data: dict) -> bool:
|
||||||
"""Publish training failed event"""
|
"""Publish training failed event"""
|
||||||
return await training_publisher.publish_training_event("failed", data)
|
return await training_publisher.publish_training_event("failed", job_data)
|
||||||
|
|
||||||
|
async def publish_model_validated(model_data: dict) -> bool:
|
||||||
|
"""Publish model validation event"""
|
||||||
|
return await training_publisher.publish_training_event("model.validated", model_data)
|
||||||
|
|
||||||
|
async def publish_model_saved(model_data: dict) -> bool:
|
||||||
|
"""Publish model saved event"""
|
||||||
|
return await training_publisher.publish_training_event("model.saved", model_data)
|
||||||
@@ -1,9 +1,8 @@
|
|||||||
"""
|
"""
|
||||||
Training service business logic
|
Training service business logic
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import structlog
|
||||||
from datetime import datetime, timedelta, timezone
|
from datetime import datetime, timedelta, timezone
|
||||||
from typing import Dict, Any, List, Optional
|
from typing import Dict, Any, List, Optional
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
@@ -16,10 +15,9 @@ from app.core.config import settings
|
|||||||
from app.models.training import TrainingJob, TrainedModel, TrainingLog
|
from app.models.training import TrainingJob, TrainedModel, TrainingLog
|
||||||
from app.schemas.training import TrainingRequest, TrainingJobResponse, TrainedModelResponse
|
from app.schemas.training import TrainingRequest, TrainingJobResponse, TrainedModelResponse
|
||||||
from app.ml.trainer import MLTrainer
|
from app.ml.trainer import MLTrainer
|
||||||
from app.services.messaging import message_publisher
|
from app.services.messaging import publish_training_started, publish_training_completed, publish_training_failed
|
||||||
from shared.messaging.events import TrainingStartedEvent, TrainingCompletedEvent, TrainingFailedEvent
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = structlog.get_logger()
|
||||||
|
|
||||||
class TrainingService:
|
class TrainingService:
|
||||||
"""Training service business logic"""
|
"""Training service business logic"""
|
||||||
@@ -46,8 +44,8 @@ class TrainingService:
|
|||||||
progress=0,
|
progress=0,
|
||||||
current_step="Queued for training",
|
current_step="Queued for training",
|
||||||
requested_by=user_data.get("user_id"),
|
requested_by=user_data.get("user_id"),
|
||||||
training_data_from= datetime.now(timezone.utc) - timedelta(days=request.training_days),
|
training_data_from=datetime.now(timezone.utc) - timedelta(days=request.training_days),
|
||||||
training_data_to= datetime.now(timezone.utc)
|
training_data_to=datetime.now(timezone.utc)
|
||||||
)
|
)
|
||||||
|
|
||||||
db.add(training_job)
|
db.add(training_job)
|
||||||
@@ -57,24 +55,20 @@ class TrainingService:
|
|||||||
# Start training in background
|
# Start training in background
|
||||||
asyncio.create_task(self._execute_training(training_job.id, request, db))
|
asyncio.create_task(self._execute_training(training_job.id, request, db))
|
||||||
|
|
||||||
# Publish training started event
|
# Publish training started event - SIMPLIFIED
|
||||||
await message_publisher.publish_event(
|
event_data = {
|
||||||
"training_events",
|
"job_id": str(training_job.id),
|
||||||
"training.started",
|
"tenant_id": tenant_id,
|
||||||
TrainingStartedEvent(
|
"requested_by": user_data.get("user_id"),
|
||||||
event_id=str(uuid.uuid4()),
|
"training_days": request.training_days,
|
||||||
service_name="training-service",
|
"timestamp": datetime.now(timezone.utc).isoformat()
|
||||||
timestamp= datetime.now(timezone.utc),
|
}
|
||||||
data={
|
|
||||||
"job_id": str(training_job.id),
|
|
||||||
"tenant_id": tenant_id,
|
|
||||||
"requested_by": user_data.get("user_id"),
|
|
||||||
"training_days": request.training_days
|
|
||||||
}
|
|
||||||
).__dict__
|
|
||||||
)
|
|
||||||
|
|
||||||
logger.info(f"Training job started: {training_job.id} for tenant: {tenant_id}")
|
success = await publish_training_started(event_data)
|
||||||
|
if not success:
|
||||||
|
logger.warning("Failed to publish training started event", job_id=str(training_job.id))
|
||||||
|
|
||||||
|
logger.info("Training job started", job_id=str(training_job.id), tenant_id=tenant_id)
|
||||||
|
|
||||||
return TrainingJobResponse(
|
return TrainingJobResponse(
|
||||||
id=str(training_job.id),
|
id=str(training_job.id),
|
||||||
@@ -206,7 +200,7 @@ class TrainingService:
|
|||||||
async def _execute_training(self, job_id: str, request: TrainingRequest, db: AsyncSession):
|
async def _execute_training(self, job_id: str, request: TrainingRequest, db: AsyncSession):
|
||||||
"""Execute training job"""
|
"""Execute training job"""
|
||||||
|
|
||||||
start_time = datetime.now(timezone.utc)
|
start_time = datetime.now(timezone.utc)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Update job status
|
# Update job status
|
||||||
@@ -229,47 +223,39 @@ class TrainingService:
|
|||||||
await self._save_trained_models(job_id, models_result, validation_result, db)
|
await self._save_trained_models(job_id, models_result, validation_result, db)
|
||||||
|
|
||||||
# Complete job
|
# Complete job
|
||||||
duration = int(( datetime.now(timezone.utc) - start_time).total_seconds())
|
duration = int((datetime.now(timezone.utc) - start_time).total_seconds())
|
||||||
await self._complete_job(job_id, models_result, validation_result, duration, db)
|
await self._complete_job(job_id, models_result, validation_result, duration, db)
|
||||||
|
|
||||||
# Publish completion event
|
# Publish completion event - SIMPLIFIED
|
||||||
await message_publisher.publish_event(
|
event_data = {
|
||||||
"training_events",
|
"job_id": str(job_id),
|
||||||
"training.completed",
|
"models_trained": len(models_result),
|
||||||
TrainingCompletedEvent(
|
"duration_seconds": duration,
|
||||||
event_id=str(uuid.uuid4()),
|
"timestamp": datetime.now(timezone.utc).isoformat()
|
||||||
service_name="training-service",
|
}
|
||||||
timestamp= datetime.now(timezone.utc),
|
|
||||||
data={
|
|
||||||
"job_id": str(job_id),
|
|
||||||
"models_trained": len(models_result),
|
|
||||||
"duration_seconds": duration
|
|
||||||
}
|
|
||||||
).__dict__
|
|
||||||
)
|
|
||||||
|
|
||||||
logger.info(f"Training job completed: {job_id}")
|
success = await publish_training_completed(event_data)
|
||||||
|
if not success:
|
||||||
|
logger.warning("Failed to publish training completed event", job_id=str(job_id))
|
||||||
|
|
||||||
|
logger.info("Training job completed", job_id=str(job_id))
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Training job failed: {job_id} - {e}")
|
logger.error("Training job failed", job_id=str(job_id), error=str(e))
|
||||||
|
|
||||||
# Update job as failed
|
# Update job as failed
|
||||||
await self._update_job_status(job_id, "failed", 0, f"Training failed: {str(e)}", db)
|
await self._update_job_status(job_id, "failed", 0, f"Training failed: {str(e)}", db)
|
||||||
|
|
||||||
# Publish failure event
|
# Publish failure event - SIMPLIFIED
|
||||||
await message_publisher.publish_event(
|
event_data = {
|
||||||
"training_events",
|
"job_id": str(job_id),
|
||||||
"training.failed",
|
"error": str(e),
|
||||||
TrainingFailedEvent(
|
"timestamp": datetime.now(timezone.utc).isoformat()
|
||||||
event_id=str(uuid.uuid4()),
|
}
|
||||||
service_name="training-service",
|
|
||||||
timestamp= datetime.now(timezone.utc),
|
success = await publish_training_failed(event_data)
|
||||||
data={
|
if not success:
|
||||||
"job_id": str(job_id),
|
logger.warning("Failed to publish training failed event", job_id=str(job_id))
|
||||||
"error": str(e)
|
|
||||||
}
|
|
||||||
).__dict__
|
|
||||||
)
|
|
||||||
|
|
||||||
async def _update_job_status(self, job_id: str, status: str, progress: int, current_step: str, db: AsyncSession):
|
async def _update_job_status(self, job_id: str, status: str, progress: int, current_step: str, db: AsyncSession):
|
||||||
"""Update training job status"""
|
"""Update training job status"""
|
||||||
@@ -281,7 +267,7 @@ class TrainingService:
|
|||||||
status=status,
|
status=status,
|
||||||
progress=progress,
|
progress=progress,
|
||||||
current_step=current_step,
|
current_step=current_step,
|
||||||
updated_at= datetime.now(timezone.utc)
|
updated_at=datetime.now(timezone.utc)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
await db.commit()
|
await db.commit()
|
||||||
@@ -312,7 +298,7 @@ class TrainingService:
|
|||||||
raise Exception(f"Failed to get training data: {response.status_code}")
|
raise Exception(f"Failed to get training data: {response.status_code}")
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error getting training data: {e}")
|
logger.error("Error getting training data", error=str(e))
|
||||||
raise
|
raise
|
||||||
|
|
||||||
async def _save_trained_models(self, job_id: str, models_result: Dict[str, Any], validation_result: Dict[str, Any], db: AsyncSession):
|
async def _save_trained_models(self, job_id: str, models_result: Dict[str, Any], validation_result: Dict[str, Any], db: AsyncSession):
|
||||||
@@ -374,7 +360,7 @@ class TrainingService:
|
|||||||
status="completed",
|
status="completed",
|
||||||
progress=100,
|
progress=100,
|
||||||
current_step="Training completed successfully",
|
current_step="Training completed successfully",
|
||||||
completed_at= datetime.now(timezone.utc),
|
completed_at=datetime.now(timezone.utc),
|
||||||
duration_seconds=duration,
|
duration_seconds=duration,
|
||||||
models_trained=models_result,
|
models_trained=models_result,
|
||||||
metrics=metrics,
|
metrics=metrics,
|
||||||
|
|||||||
@@ -24,3 +24,4 @@ pytz==2023.3
|
|||||||
python-dateutil==2.8.2
|
python-dateutil==2.8.2
|
||||||
|
|
||||||
python-logstash==0.4.8
|
python-logstash==0.4.8
|
||||||
|
structlog==23.2.0
|
||||||
@@ -6,7 +6,7 @@ import json
|
|||||||
from typing import Dict, Any, Callable, Optional
|
from typing import Dict, Any, Callable, Optional
|
||||||
from datetime import datetime, date
|
from datetime import datetime, date
|
||||||
import uuid
|
import uuid
|
||||||
import logstash
|
import structlog
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import aio_pika
|
import aio_pika
|
||||||
@@ -15,7 +15,7 @@ try:
|
|||||||
except ImportError:
|
except ImportError:
|
||||||
AIO_PIKA_AVAILABLE = False
|
AIO_PIKA_AVAILABLE = False
|
||||||
|
|
||||||
logger = logstash.get_logger()
|
logger = structlog.get_logger()
|
||||||
|
|
||||||
def json_serializer(obj):
|
def json_serializer(obj):
|
||||||
"""JSON serializer for objects not serializable by default json code"""
|
"""JSON serializer for objects not serializable by default json code"""
|
||||||
|
|||||||
Reference in New Issue
Block a user