Files
bakery-ia/services/procurement/app/main.py

234 lines
10 KiB
Python

# ================================================================
# services/procurement/app/main.py
# ================================================================
"""
Procurement Service - FastAPI Application
Procurement planning, purchase order management, and supplier integration
"""
from fastapi import FastAPI, Request
from sqlalchemy import text
from app.core.config import settings
from app.core.database import database_manager
from shared.service_base import StandardFastAPIService
from app.jobs.overdue_po_scheduler import OverduePOScheduler
class ProcurementService(StandardFastAPIService):
"""Procurement Service with standardized setup"""
expected_migration_version = "001_unified_initial_schema"
async def verify_migrations(self):
"""Verify database schema matches the latest migrations"""
try:
async with self.database_manager.get_session() as session:
result = await session.execute(text("SELECT version_num FROM alembic_version"))
version = result.scalar()
if version != self.expected_migration_version:
self.logger.error(f"Migration version mismatch: expected {self.expected_migration_version}, got {version}")
raise RuntimeError(f"Migration version mismatch: expected {self.expected_migration_version}, got {version}")
self.logger.info(f"Migration verification successful: {version}")
except Exception as e:
self.logger.error(f"Migration verification failed: {e}")
raise
def __init__(self):
# Define expected database tables for health checks
procurement_expected_tables = [
'procurement_plans',
'procurement_requirements',
'purchase_orders',
'purchase_order_items',
'deliveries',
'delivery_items',
'supplier_invoices',
'replenishment_plans',
'replenishment_plan_items',
'inventory_projections',
'supplier_allocations',
'supplier_selection_history'
]
# Initialize scheduler, delivery tracking, and rabbitmq client
self.overdue_po_scheduler = None
self.delivery_tracking_service = None
self.rabbitmq_client = None
self.event_publisher = None
super().__init__(
service_name="procurement-service",
app_name=settings.APP_NAME,
description=settings.DESCRIPTION,
version=settings.VERSION,
api_prefix="", # Empty because RouteBuilder already includes /api/v1
database_manager=database_manager,
expected_tables=procurement_expected_tables,
enable_messaging=True # Enable RabbitMQ for event publishing
)
async def _setup_messaging(self):
"""Setup messaging for procurement service"""
from shared.messaging import RabbitMQClient, UnifiedEventPublisher
try:
self.rabbitmq_client = RabbitMQClient(settings.RABBITMQ_URL, service_name="procurement-service")
await self.rabbitmq_client.connect()
# Create unified event publisher
self.event_publisher = UnifiedEventPublisher(self.rabbitmq_client, "procurement-service")
self.logger.info("Procurement service messaging setup completed")
except Exception as e:
self.logger.error("Failed to setup procurement messaging", error=str(e))
raise
async def _cleanup_messaging(self):
"""Cleanup messaging for procurement service"""
try:
if self.rabbitmq_client:
await self.rabbitmq_client.disconnect()
self.logger.info("Procurement service messaging cleanup completed")
except Exception as e:
self.logger.error("Error during procurement messaging cleanup", error=str(e))
async def on_startup(self, app: FastAPI):
"""Custom startup logic for procurement service"""
await super().on_startup(app)
self.logger.info("Procurement Service starting up...")
# Start delivery tracking service (APScheduler with leader election)
from app.services.delivery_tracking_service import DeliveryTrackingService
self.delivery_tracking_service = DeliveryTrackingService(
event_publisher=self.event_publisher,
config=settings,
database_manager=self.database_manager
)
await self.delivery_tracking_service.start()
self.logger.info("Delivery tracking service started")
# Initialize Redis for caching (optional - service can run without Redis)
from shared.redis_utils import initialize_redis, get_redis_client
try:
redis_url = settings.REDIS_URL # Use configured Redis URL with TLS and auth
await initialize_redis(redis_url, db=settings.REDIS_DB, max_connections=settings.REDIS_MAX_CONNECTIONS)
redis_client = await get_redis_client()
self.logger.info("Redis initialized successfully for procurement service",
redis_url=redis_url.split("@")[-1], db=settings.REDIS_DB)
except Exception as e:
self.logger.warning("Failed to initialize Redis for caching, service will continue without caching",
error=str(e), redis_url=redis_url.split("@")[-1] if 'redis_url' in locals() else "unknown")
redis_client = None
# Store in app state for internal API access
app.state.delivery_tracking_service = self.delivery_tracking_service
app.state.event_publisher = self.event_publisher
app.state.redis_client = redis_client
# Start overdue PO scheduler
if self.rabbitmq_client and self.rabbitmq_client.connected:
self.overdue_po_scheduler = OverduePOScheduler(
rabbitmq_client=self.rabbitmq_client,
check_interval_seconds=3600 # Check every hour
)
await self.overdue_po_scheduler.start()
self.logger.info("Overdue PO scheduler started")
else:
self.logger.warning("RabbitMQ not available, overdue PO scheduler not started")
async def on_shutdown(self, app: FastAPI):
"""Custom shutdown logic for procurement service"""
self.logger.info("Procurement Service shutting down...")
# Close Redis connections (if initialized)
try:
from shared.redis_utils import close_redis
await close_redis()
self.logger.info("Redis connections closed")
except Exception as e:
self.logger.debug("Redis cleanup failed or Redis was not initialized", error=str(e))
# Stop delivery tracking service
if self.delivery_tracking_service:
await self.delivery_tracking_service.stop()
self.logger.info("Delivery tracking service stopped")
# Stop overdue PO scheduler
if self.overdue_po_scheduler:
await self.overdue_po_scheduler.stop()
self.logger.info("Overdue PO scheduler stopped")
await super().on_shutdown(app)
def get_service_features(self):
"""Return procurement-specific features"""
return [
"procurement_planning",
"purchase_order_management",
"delivery_tracking",
"invoice_management",
"supplier_integration",
"local_production_support",
"recipe_explosion"
]
# Create service instance
service = ProcurementService()
# Create FastAPI app with standardized setup
app = service.create_app()
# Setup standard endpoints (health, readiness, metrics)
service.setup_standard_endpoints()
# Include routers
from app.api.procurement_plans import router as procurement_plans_router
from app.api.purchase_orders import router as purchase_orders_router
from app.api import internal_transfer # Internal Transfer Routes
from app.api import replenishment # Enhanced Replenishment Planning Routes
from app.api import analytics # Procurement Analytics Routes
from app.api import internal_delivery # Internal Delivery Tracking Routes
from app.api import ml_insights # ML insights endpoint
from app.api import internal_demo # Internal demo data cloning
from app.api.expected_deliveries import router as expected_deliveries_router # Expected Deliveries Routes
from app.api.internal_delivery_tracking import router as internal_delivery_tracking_router # NEW: Internal trigger endpoint
service.add_router(procurement_plans_router)
service.add_router(purchase_orders_router)
service.add_router(internal_transfer.router, tags=["internal-transfer"]) # Internal transfer routes
service.add_router(replenishment.router, tags=["replenishment"]) # RouteBuilder already includes full path
service.add_router(analytics.router, tags=["analytics"]) # RouteBuilder already includes full path
service.add_router(internal_demo.router, tags=["internal-demo"]) # Internal demo data cloning
service.add_router(internal_delivery.router, tags=["internal-delivery"]) # Internal delivery tracking
service.add_router(internal_delivery_tracking_router, tags=["internal-delivery-tracking"]) # NEW: Delivery alert trigger
service.add_router(ml_insights.router) # ML insights endpoint
service.add_router(ml_insights.internal_router) # Internal ML insights endpoint
service.add_router(expected_deliveries_router, tags=["expected-deliveries"]) # Expected deliveries endpoint
@app.middleware("http")
async def logging_middleware(request: Request, call_next):
"""Add request logging middleware"""
import time
start_time = time.time()
response = await call_next(request)
process_time = time.time() - start_time
service.logger.info("HTTP request processed",
method=request.method,
url=str(request.url),
status_code=response.status_code,
process_time=round(process_time, 4))
return response
if __name__ == "__main__":
import uvicorn
uvicorn.run(
"main:app",
host="0.0.0.0",
port=8000,
reload=settings.DEBUG
)