Fix imports
This commit is contained in:
@@ -4,7 +4,7 @@ Handles routing, authentication, rate limiting, and cross-cutting concerns
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import structlog
|
||||
from fastapi import FastAPI, Request, HTTPException, Depends
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from fastapi.responses import JSONResponse
|
||||
@@ -23,7 +23,7 @@ from shared.monitoring.metrics import MetricsCollector
|
||||
|
||||
# Setup logging
|
||||
setup_logging("gateway", settings.LOG_LEVEL)
|
||||
logger = logging.getLogger(__name__)
|
||||
logger = structlog.get_logger()
|
||||
|
||||
# Create FastAPI app
|
||||
app = FastAPI(
|
||||
|
||||
@@ -12,3 +12,4 @@ email-validator==2.0.0
|
||||
aio-pika==9.3.0
|
||||
pytz==2023.3
|
||||
python-logstash==0.4.8
|
||||
structlog==23.2.0
|
||||
@@ -7,7 +7,7 @@ Authentication API routes - Enhanced with proper metrics access
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, status, Request
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
import logging
|
||||
import structlog
|
||||
|
||||
from app.core.database import get_db
|
||||
from app.schemas.auth import (
|
||||
@@ -18,7 +18,7 @@ from app.services.auth_service import AuthService
|
||||
from app.core.security import security_manager
|
||||
from shared.monitoring.decorators import track_execution_time, count_calls
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logger = structlog.get_logger()
|
||||
router = APIRouter()
|
||||
|
||||
def get_metrics_collector(request: Request):
|
||||
|
||||
@@ -5,7 +5,7 @@ User management API routes
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from typing import List
|
||||
import logging
|
||||
import structlog
|
||||
|
||||
from app.core.database import get_db
|
||||
from app.schemas.auth import UserResponse, PasswordChangeRequest
|
||||
@@ -13,7 +13,7 @@ from app.services.user_service import UserService
|
||||
from app.core.auth import get_current_user
|
||||
from app.models.users import User
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logger = structlog.get_logger()
|
||||
router = APIRouter()
|
||||
|
||||
@router.get("/me", response_model=UserResponse)
|
||||
|
||||
@@ -8,13 +8,13 @@ from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select
|
||||
from jose import JWTError, jwt
|
||||
import logging
|
||||
import structlog
|
||||
|
||||
from app.core.config import settings
|
||||
from app.core.database import get_db
|
||||
from app.models.users import User
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logger = structlog.get_logger()
|
||||
|
||||
security = HTTPBearer()
|
||||
|
||||
|
||||
@@ -5,14 +5,14 @@
|
||||
Database configuration for authentication service
|
||||
"""
|
||||
|
||||
import logging
|
||||
import structlog
|
||||
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession, async_sessionmaker
|
||||
from sqlalchemy.pool import NullPool
|
||||
|
||||
from app.core.config import settings
|
||||
from shared.database.base import Base
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logger = structlog.get_logger()
|
||||
|
||||
# Create async engine
|
||||
engine = create_async_engine(
|
||||
|
||||
@@ -12,12 +12,12 @@ from datetime import datetime, timedelta
|
||||
from typing import Optional, Dict, Any
|
||||
import redis.asyncio as redis
|
||||
from fastapi import HTTPException, status
|
||||
import logging
|
||||
import structlog
|
||||
|
||||
from app.core.config import settings
|
||||
from shared.auth.jwt_handler import JWTHandler
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logger = structlog.get_logger()
|
||||
|
||||
# Initialize JWT handler
|
||||
jwt_handler = JWTHandler(settings.JWT_SECRET_KEY, settings.JWT_ALGORITHM)
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
Authentication Service Main Application - Fixed middleware issue
|
||||
"""
|
||||
|
||||
import logging
|
||||
import structlog
|
||||
from fastapi import FastAPI, Request
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from fastapi.responses import JSONResponse
|
||||
@@ -17,7 +17,7 @@ from shared.monitoring.metrics import setup_metrics_early
|
||||
|
||||
# Setup logging first
|
||||
setup_logging("auth-service", settings.LOG_LEVEL)
|
||||
logger = logging.getLogger(__name__)
|
||||
logger = structlog.get_logger()
|
||||
|
||||
# Global variables for lifespan access
|
||||
metrics_collector = None
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
Authentication service business logic - Complete implementation
|
||||
"""
|
||||
|
||||
import logging
|
||||
import structlog
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Optional, Dict, Any
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
@@ -18,7 +18,7 @@ from app.schemas.auth import UserRegistration, UserLogin, TokenResponse, UserRes
|
||||
from app.core.security import security_manager
|
||||
from app.services.messaging import publish_user_registered, publish_user_login
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logger = structlog.get_logger()
|
||||
|
||||
class AuthService:
|
||||
"""Authentication service business logic"""
|
||||
|
||||
@@ -4,9 +4,9 @@ Messaging service for auth service
|
||||
"""
|
||||
from shared.messaging.rabbitmq import RabbitMQClient
|
||||
from app.core.config import settings
|
||||
import logging
|
||||
import structlog
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logger = structlog.get_logger()
|
||||
|
||||
# Single global instance
|
||||
auth_publisher = RabbitMQClient(settings.RABBITMQ_URL, "auth-service")
|
||||
|
||||
@@ -6,12 +6,12 @@ from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, update, delete
|
||||
from fastapi import HTTPException, status
|
||||
from passlib.context import CryptContext
|
||||
import logging
|
||||
import structlog
|
||||
|
||||
from app.models.users import User
|
||||
from app.core.config import settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logger = structlog.get_logger()
|
||||
|
||||
# Password hashing
|
||||
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
|
||||
|
||||
@@ -16,3 +16,4 @@ prometheus-client==0.17.1
|
||||
python-json-logger==2.0.4
|
||||
pytz==2023.3
|
||||
python-logstash==0.4.8
|
||||
structlog==23.2.0
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
Data Service Main Application - Fixed middleware issue
|
||||
"""
|
||||
|
||||
import logging
|
||||
import structlog
|
||||
from contextlib import asynccontextmanager
|
||||
from fastapi import FastAPI, Request
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
@@ -21,7 +21,7 @@ from shared.monitoring.metrics import setup_metrics_early
|
||||
|
||||
# Setup logging first
|
||||
setup_logging("data-service", settings.LOG_LEVEL)
|
||||
logger = logging.getLogger(__name__)
|
||||
logger = structlog.get_logger()
|
||||
|
||||
# Global variables for lifespan access
|
||||
metrics_collector = None
|
||||
|
||||
@@ -4,9 +4,9 @@
|
||||
|
||||
from shared.messaging.rabbitmq import RabbitMQClient
|
||||
from app.core.config import settings
|
||||
import logging
|
||||
import structlog
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logger = structlog.get_logger()
|
||||
|
||||
# Single global instance
|
||||
data_publisher = RabbitMQClient(settings.RABBITMQ_URL, "data-service")
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
uLuforecasting Service
|
||||
"""
|
||||
|
||||
import logging
|
||||
import structlog
|
||||
from fastapi import FastAPI
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
|
||||
@@ -13,7 +13,7 @@ from shared.monitoring.metrics import MetricsCollector
|
||||
|
||||
# Setup logging
|
||||
setup_logging("forecasting-service", "INFO")
|
||||
logger = logging.getLogger(__name__)
|
||||
logger = structlog.get_logger()
|
||||
|
||||
# Create FastAPI app
|
||||
app = FastAPI(
|
||||
|
||||
@@ -12,3 +12,4 @@ prometheus-client==0.17.1
|
||||
python-json-logger==2.0.4
|
||||
pytz==2023.3
|
||||
python-logstash==0.4.8
|
||||
structlog==23.2.0
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
uLunotification Service
|
||||
"""
|
||||
|
||||
import logging
|
||||
import structlog
|
||||
from fastapi import FastAPI
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
|
||||
@@ -13,7 +13,7 @@ from shared.monitoring.metrics import MetricsCollector
|
||||
|
||||
# Setup logging
|
||||
setup_logging("notification-service", "INFO")
|
||||
logger = logging.getLogger(__name__)
|
||||
logger = structlog.get_logger()
|
||||
|
||||
# Create FastAPI app
|
||||
app = FastAPI(
|
||||
|
||||
@@ -12,3 +12,5 @@ prometheus-client==0.17.1
|
||||
python-json-logger==2.0.4
|
||||
pytz==2023.3
|
||||
python-logstash==0.4.8
|
||||
structlog==23.2.0
|
||||
structlog==23.2.0
|
||||
@@ -2,7 +2,7 @@
|
||||
uLutenant Service
|
||||
"""
|
||||
|
||||
import logging
|
||||
import structlog
|
||||
from fastapi import FastAPI
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
|
||||
@@ -13,7 +13,7 @@ from shared.monitoring.metrics import MetricsCollector
|
||||
|
||||
# Setup logging
|
||||
setup_logging("tenant-service", "INFO")
|
||||
logger = logging.getLogger(__name__)
|
||||
logger = structlog.get_logger()
|
||||
|
||||
# Create FastAPI app
|
||||
app = FastAPI(
|
||||
|
||||
@@ -12,3 +12,4 @@ prometheus-client==0.17.1
|
||||
python-json-logger==2.0.4
|
||||
pytz==2023.3
|
||||
python-logstash==0.4.8
|
||||
structlog==23.2.0
|
||||
@@ -5,14 +5,14 @@ Models API endpoints
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from typing import List
|
||||
import logging
|
||||
import structlog
|
||||
|
||||
from app.core.database import get_db
|
||||
from app.core.auth import verify_token
|
||||
from app.schemas.training import TrainedModelResponse
|
||||
from app.services.training_service import TrainingService
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logger = structlog.get_logger()
|
||||
router = APIRouter()
|
||||
|
||||
training_service = TrainingService()
|
||||
|
||||
@@ -5,14 +5,14 @@ Training API endpoints
|
||||
from fastapi import APIRouter, Depends, HTTPException, status, Query
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from typing import List, Optional
|
||||
import logging
|
||||
import structlog
|
||||
|
||||
from app.core.database import get_db
|
||||
from app.core.auth import verify_token
|
||||
from app.schemas.training import TrainingRequest, TrainingJobResponse, TrainedModelResponse
|
||||
from app.services.training_service import TrainingService
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logger = structlog.get_logger()
|
||||
router = APIRouter()
|
||||
|
||||
training_service = TrainingService()
|
||||
|
||||
@@ -5,11 +5,11 @@ Authentication utilities for training service
|
||||
import httpx
|
||||
from fastapi import HTTPException, status, Depends
|
||||
from fastapi.security import HTTPBearer
|
||||
import logging
|
||||
import structlog
|
||||
|
||||
from app.core.config import settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logger = structlog.get_logger()
|
||||
|
||||
security = HTTPBearer()
|
||||
|
||||
|
||||
@@ -3,20 +3,20 @@ Training Service
|
||||
Handles ML model training for bakery demand forecasting
|
||||
"""
|
||||
|
||||
import logging
|
||||
import structlog
|
||||
from fastapi import FastAPI, BackgroundTasks
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
|
||||
from app.core.config import settings
|
||||
from app.core.database import database_manager
|
||||
from app.api import training, models
|
||||
from app.services.messaging import message_publisher
|
||||
from app.services.messaging import setup_messaging, cleanup_messaging
|
||||
from shared.monitoring.logging import setup_logging
|
||||
from shared.monitoring.metrics import MetricsCollector
|
||||
|
||||
# Setup logging
|
||||
setup_logging("training-service", settings.LOG_LEVEL)
|
||||
logger = logging.getLogger(__name__)
|
||||
logger = structlog.get_logger()
|
||||
|
||||
# Create FastAPI app
|
||||
app = FastAPI(
|
||||
@@ -50,7 +50,7 @@ async def startup_event():
|
||||
await database_manager.create_tables()
|
||||
|
||||
# Initialize message publisher
|
||||
await message_publisher.connect()
|
||||
await setup_messaging()
|
||||
|
||||
# Start metrics server
|
||||
metrics_collector.start_metrics_server(8080)
|
||||
@@ -63,7 +63,7 @@ async def shutdown_event():
|
||||
logger.info("Shutting down Training Service")
|
||||
|
||||
# Cleanup message publisher
|
||||
await message_publisher.disconnect()
|
||||
await cleanup_messaging()
|
||||
|
||||
logger.info("Training Service shutdown complete")
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ ML Training implementation
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import structlog
|
||||
from typing import Dict, Any, List
|
||||
import pandas as pd
|
||||
from datetime import datetime
|
||||
@@ -15,7 +15,7 @@ from sklearn.metrics import mean_absolute_error, mean_squared_error, r2_score
|
||||
|
||||
from app.core.config import settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logger = structlog.get_logger()
|
||||
|
||||
class MLTrainer:
|
||||
"""ML training implementation"""
|
||||
|
||||
@@ -1,12 +1,15 @@
|
||||
# ================================================================
|
||||
# services/training/app/services/messaging.py
|
||||
# ================================================================
|
||||
"""
|
||||
Training service messaging - Uses shared RabbitMQ client only
|
||||
Messaging service for training service
|
||||
"""
|
||||
|
||||
import structlog
|
||||
from shared.messaging.rabbitmq import RabbitMQClient
|
||||
from app.core.config import settings
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logger = structlog.get_logger()
|
||||
|
||||
# Single global instance
|
||||
training_publisher = RabbitMQClient(settings.RABBITMQ_URL, "training-service")
|
||||
@@ -25,14 +28,22 @@ async def cleanup_messaging():
|
||||
logger.info("Training service messaging cleaned up")
|
||||
|
||||
# Convenience functions for training-specific events
|
||||
async def publish_training_started(data: dict) -> bool:
|
||||
async def publish_training_started(job_data: dict) -> bool:
|
||||
"""Publish training started event"""
|
||||
return await training_publisher.publish_training_event("started", data)
|
||||
return await training_publisher.publish_training_event("started", job_data)
|
||||
|
||||
async def publish_training_completed(data: dict) -> bool:
|
||||
async def publish_training_completed(job_data: dict) -> bool:
|
||||
"""Publish training completed event"""
|
||||
return await training_publisher.publish_training_event("completed", data)
|
||||
return await training_publisher.publish_training_event("completed", job_data)
|
||||
|
||||
async def publish_training_failed(data: dict) -> bool:
|
||||
async def publish_training_failed(job_data: dict) -> bool:
|
||||
"""Publish training failed event"""
|
||||
return await training_publisher.publish_training_event("failed", data)
|
||||
return await training_publisher.publish_training_event("failed", job_data)
|
||||
|
||||
async def publish_model_validated(model_data: dict) -> bool:
|
||||
"""Publish model validation event"""
|
||||
return await training_publisher.publish_training_event("model.validated", model_data)
|
||||
|
||||
async def publish_model_saved(model_data: dict) -> bool:
|
||||
"""Publish model saved event"""
|
||||
return await training_publisher.publish_training_event("model.saved", model_data)
|
||||
@@ -1,9 +1,8 @@
|
||||
"""
|
||||
Training service business logic
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import structlog
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Dict, Any, List, Optional
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
@@ -16,10 +15,9 @@ from app.core.config import settings
|
||||
from app.models.training import TrainingJob, TrainedModel, TrainingLog
|
||||
from app.schemas.training import TrainingRequest, TrainingJobResponse, TrainedModelResponse
|
||||
from app.ml.trainer import MLTrainer
|
||||
from app.services.messaging import message_publisher
|
||||
from shared.messaging.events import TrainingStartedEvent, TrainingCompletedEvent, TrainingFailedEvent
|
||||
from app.services.messaging import publish_training_started, publish_training_completed, publish_training_failed
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logger = structlog.get_logger()
|
||||
|
||||
class TrainingService:
|
||||
"""Training service business logic"""
|
||||
@@ -46,8 +44,8 @@ class TrainingService:
|
||||
progress=0,
|
||||
current_step="Queued for training",
|
||||
requested_by=user_data.get("user_id"),
|
||||
training_data_from= datetime.now(timezone.utc) - timedelta(days=request.training_days),
|
||||
training_data_to= datetime.now(timezone.utc)
|
||||
training_data_from=datetime.now(timezone.utc) - timedelta(days=request.training_days),
|
||||
training_data_to=datetime.now(timezone.utc)
|
||||
)
|
||||
|
||||
db.add(training_job)
|
||||
@@ -57,24 +55,20 @@ class TrainingService:
|
||||
# Start training in background
|
||||
asyncio.create_task(self._execute_training(training_job.id, request, db))
|
||||
|
||||
# Publish training started event
|
||||
await message_publisher.publish_event(
|
||||
"training_events",
|
||||
"training.started",
|
||||
TrainingStartedEvent(
|
||||
event_id=str(uuid.uuid4()),
|
||||
service_name="training-service",
|
||||
timestamp= datetime.now(timezone.utc),
|
||||
data={
|
||||
# Publish training started event - SIMPLIFIED
|
||||
event_data = {
|
||||
"job_id": str(training_job.id),
|
||||
"tenant_id": tenant_id,
|
||||
"requested_by": user_data.get("user_id"),
|
||||
"training_days": request.training_days
|
||||
"training_days": request.training_days,
|
||||
"timestamp": datetime.now(timezone.utc).isoformat()
|
||||
}
|
||||
).__dict__
|
||||
)
|
||||
|
||||
logger.info(f"Training job started: {training_job.id} for tenant: {tenant_id}")
|
||||
success = await publish_training_started(event_data)
|
||||
if not success:
|
||||
logger.warning("Failed to publish training started event", job_id=str(training_job.id))
|
||||
|
||||
logger.info("Training job started", job_id=str(training_job.id), tenant_id=tenant_id)
|
||||
|
||||
return TrainingJobResponse(
|
||||
id=str(training_job.id),
|
||||
@@ -229,47 +223,39 @@ class TrainingService:
|
||||
await self._save_trained_models(job_id, models_result, validation_result, db)
|
||||
|
||||
# Complete job
|
||||
duration = int(( datetime.now(timezone.utc) - start_time).total_seconds())
|
||||
duration = int((datetime.now(timezone.utc) - start_time).total_seconds())
|
||||
await self._complete_job(job_id, models_result, validation_result, duration, db)
|
||||
|
||||
# Publish completion event
|
||||
await message_publisher.publish_event(
|
||||
"training_events",
|
||||
"training.completed",
|
||||
TrainingCompletedEvent(
|
||||
event_id=str(uuid.uuid4()),
|
||||
service_name="training-service",
|
||||
timestamp= datetime.now(timezone.utc),
|
||||
data={
|
||||
# Publish completion event - SIMPLIFIED
|
||||
event_data = {
|
||||
"job_id": str(job_id),
|
||||
"models_trained": len(models_result),
|
||||
"duration_seconds": duration
|
||||
"duration_seconds": duration,
|
||||
"timestamp": datetime.now(timezone.utc).isoformat()
|
||||
}
|
||||
).__dict__
|
||||
)
|
||||
|
||||
logger.info(f"Training job completed: {job_id}")
|
||||
success = await publish_training_completed(event_data)
|
||||
if not success:
|
||||
logger.warning("Failed to publish training completed event", job_id=str(job_id))
|
||||
|
||||
logger.info("Training job completed", job_id=str(job_id))
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Training job failed: {job_id} - {e}")
|
||||
logger.error("Training job failed", job_id=str(job_id), error=str(e))
|
||||
|
||||
# Update job as failed
|
||||
await self._update_job_status(job_id, "failed", 0, f"Training failed: {str(e)}", db)
|
||||
|
||||
# Publish failure event
|
||||
await message_publisher.publish_event(
|
||||
"training_events",
|
||||
"training.failed",
|
||||
TrainingFailedEvent(
|
||||
event_id=str(uuid.uuid4()),
|
||||
service_name="training-service",
|
||||
timestamp= datetime.now(timezone.utc),
|
||||
data={
|
||||
# Publish failure event - SIMPLIFIED
|
||||
event_data = {
|
||||
"job_id": str(job_id),
|
||||
"error": str(e)
|
||||
"error": str(e),
|
||||
"timestamp": datetime.now(timezone.utc).isoformat()
|
||||
}
|
||||
).__dict__
|
||||
)
|
||||
|
||||
success = await publish_training_failed(event_data)
|
||||
if not success:
|
||||
logger.warning("Failed to publish training failed event", job_id=str(job_id))
|
||||
|
||||
async def _update_job_status(self, job_id: str, status: str, progress: int, current_step: str, db: AsyncSession):
|
||||
"""Update training job status"""
|
||||
@@ -281,7 +267,7 @@ class TrainingService:
|
||||
status=status,
|
||||
progress=progress,
|
||||
current_step=current_step,
|
||||
updated_at= datetime.now(timezone.utc)
|
||||
updated_at=datetime.now(timezone.utc)
|
||||
)
|
||||
)
|
||||
await db.commit()
|
||||
@@ -312,7 +298,7 @@ class TrainingService:
|
||||
raise Exception(f"Failed to get training data: {response.status_code}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting training data: {e}")
|
||||
logger.error("Error getting training data", error=str(e))
|
||||
raise
|
||||
|
||||
async def _save_trained_models(self, job_id: str, models_result: Dict[str, Any], validation_result: Dict[str, Any], db: AsyncSession):
|
||||
@@ -374,7 +360,7 @@ class TrainingService:
|
||||
status="completed",
|
||||
progress=100,
|
||||
current_step="Training completed successfully",
|
||||
completed_at= datetime.now(timezone.utc),
|
||||
completed_at=datetime.now(timezone.utc),
|
||||
duration_seconds=duration,
|
||||
models_trained=models_result,
|
||||
metrics=metrics,
|
||||
|
||||
@@ -24,3 +24,4 @@ pytz==2023.3
|
||||
python-dateutil==2.8.2
|
||||
|
||||
python-logstash==0.4.8
|
||||
structlog==23.2.0
|
||||
@@ -6,7 +6,7 @@ import json
|
||||
from typing import Dict, Any, Callable, Optional
|
||||
from datetime import datetime, date
|
||||
import uuid
|
||||
import logstash
|
||||
import structlog
|
||||
|
||||
try:
|
||||
import aio_pika
|
||||
@@ -15,7 +15,7 @@ try:
|
||||
except ImportError:
|
||||
AIO_PIKA_AVAILABLE = False
|
||||
|
||||
logger = logstash.get_logger()
|
||||
logger = structlog.get_logger()
|
||||
|
||||
def json_serializer(obj):
|
||||
"""JSON serializer for objects not serializable by default json code"""
|
||||
|
||||
Reference in New Issue
Block a user