Initial commit - production deployment
This commit is contained in:
218
services/pos/app/main.py
Normal file
218
services/pos/app/main.py
Normal file
@@ -0,0 +1,218 @@
|
||||
"""
|
||||
POS Integration Service
|
||||
Handles integration with external POS systems (Square, Toast, Lightspeed)
|
||||
"""
|
||||
|
||||
import time
|
||||
from fastapi import FastAPI, Request
|
||||
from sqlalchemy import text
|
||||
from app.core.config import settings
|
||||
from app.api.configurations import router as configurations_router
|
||||
from app.api.transactions import router as transactions_router
|
||||
from app.api.pos_operations import router as pos_operations_router
|
||||
from app.api.analytics import router as analytics_router
|
||||
from app.api.audit import router as audit_router
|
||||
# from app.api.internal_demo import router as internal_demo_router # REMOVED: Replaced by script-based seed data loading
|
||||
from app.core.database import database_manager
|
||||
from shared.service_base import StandardFastAPIService
|
||||
|
||||
|
||||
class POSService(StandardFastAPIService):
|
||||
"""POS Integration Service with standardized setup"""
|
||||
|
||||
expected_migration_version = "e9976ec9fe9e"
|
||||
|
||||
def __init__(self):
|
||||
# Initialize scheduler reference
|
||||
self.pos_scheduler = None
|
||||
|
||||
# Define expected database tables for health checks
|
||||
pos_expected_tables = [
|
||||
'pos_configurations', 'pos_transactions', 'pos_transaction_items',
|
||||
'pos_webhook_logs', 'pos_sync_logs'
|
||||
]
|
||||
|
||||
# Define custom metrics for POS service
|
||||
pos_custom_metrics = {
|
||||
"pos_webhooks_received_total": {
|
||||
"type": "counter",
|
||||
"description": "Total POS webhooks received",
|
||||
"labels": ["provider", "event_type"]
|
||||
},
|
||||
"pos_sync_jobs_total": {
|
||||
"type": "counter",
|
||||
"description": "Total POS sync jobs",
|
||||
"labels": ["provider", "status"]
|
||||
},
|
||||
"pos_transactions_synced_total": {
|
||||
"type": "counter",
|
||||
"description": "Total transactions synced",
|
||||
"labels": ["provider"]
|
||||
},
|
||||
"pos_webhook_processing_duration_seconds": {
|
||||
"type": "histogram",
|
||||
"description": "Time spent processing webhooks"
|
||||
},
|
||||
"pos_sync_duration_seconds": {
|
||||
"type": "histogram",
|
||||
"description": "Time spent syncing data"
|
||||
}
|
||||
}
|
||||
|
||||
super().__init__(
|
||||
service_name="pos-service",
|
||||
app_name="POS Integration Service",
|
||||
description="Handles integration with external POS systems",
|
||||
version="1.0.0",
|
||||
cors_origins=settings.CORS_ORIGINS,
|
||||
api_prefix="", # Empty because RouteBuilder already includes /api/v1
|
||||
database_manager=database_manager,
|
||||
expected_tables=pos_expected_tables,
|
||||
custom_metrics=pos_custom_metrics
|
||||
)
|
||||
|
||||
async def verify_migrations(self):
|
||||
"""Verify database schema matches the latest migrations."""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
result = await session.execute(text("SELECT version_num FROM alembic_version"))
|
||||
version = result.scalar()
|
||||
if version != self.expected_migration_version:
|
||||
self.logger.error(f"Migration version mismatch: expected {self.expected_migration_version}, got {version}")
|
||||
raise RuntimeError(f"Migration version mismatch: expected {self.expected_migration_version}, got {version}")
|
||||
self.logger.info(f"Migration verification successful: {version}")
|
||||
except Exception as e:
|
||||
self.logger.error(f"Migration verification failed: {e}")
|
||||
raise
|
||||
|
||||
async def on_startup(self, app: FastAPI):
|
||||
"""Custom startup logic for POS service"""
|
||||
# Verify migrations first
|
||||
await self.verify_migrations()
|
||||
|
||||
# Call parent startup
|
||||
await super().on_startup(app)
|
||||
|
||||
# Start background scheduler for POS-to-Sales sync with leader election
|
||||
try:
|
||||
from app.scheduler import POSScheduler
|
||||
self.pos_scheduler = POSScheduler(
|
||||
redis_url=settings.REDIS_URL, # Pass Redis URL for leader election
|
||||
sync_interval_minutes=settings.SYNC_INTERVAL_SECONDS // 60 if settings.SYNC_INTERVAL_SECONDS >= 60 else 5
|
||||
)
|
||||
await self.pos_scheduler.start()
|
||||
self.logger.info("POS scheduler started successfully with leader election")
|
||||
|
||||
# Store scheduler in app state for status checks
|
||||
app.state.pos_scheduler = self.pos_scheduler
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to start POS scheduler: {e}", exc_info=True)
|
||||
# Don't fail startup if scheduler fails
|
||||
|
||||
# Custom startup completed
|
||||
self.logger.info("POS Integration Service started successfully")
|
||||
|
||||
async def on_shutdown(self, app: FastAPI):
|
||||
"""Custom shutdown logic for POS service"""
|
||||
# Shutdown POS scheduler
|
||||
try:
|
||||
if self.pos_scheduler:
|
||||
await self.pos_scheduler.stop()
|
||||
self.logger.info("POS scheduler stopped successfully")
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to stop POS scheduler: {e}", exc_info=True)
|
||||
|
||||
# Database cleanup is handled by the base class
|
||||
pass
|
||||
|
||||
def get_service_features(self):
|
||||
"""Return POS-specific features"""
|
||||
return [
|
||||
"pos_integration",
|
||||
"square_support",
|
||||
"toast_support",
|
||||
"lightspeed_support",
|
||||
"webhook_handling",
|
||||
"transaction_sync",
|
||||
"real_time_updates"
|
||||
]
|
||||
|
||||
def setup_custom_middleware(self):
|
||||
"""Setup custom middleware for POS service"""
|
||||
# Middleware for request logging and timing
|
||||
@self.app.middleware("http")
|
||||
async def log_requests(request: Request, call_next):
|
||||
start_time = time.time()
|
||||
|
||||
# Log request
|
||||
self.logger.info(
|
||||
"Incoming request",
|
||||
method=request.method,
|
||||
url=str(request.url),
|
||||
client_ip=request.client.host if request.client else None
|
||||
)
|
||||
|
||||
response = await call_next(request)
|
||||
|
||||
# Log response
|
||||
process_time = time.time() - start_time
|
||||
self.logger.info(
|
||||
"Request completed",
|
||||
method=request.method,
|
||||
url=str(request.url),
|
||||
status_code=response.status_code,
|
||||
process_time=f"{process_time:.4f}s"
|
||||
)
|
||||
|
||||
response.headers["X-Process-Time"] = str(process_time)
|
||||
return response
|
||||
|
||||
def setup_custom_endpoints(self):
|
||||
"""Setup custom endpoints for POS service"""
|
||||
@self.app.get("/")
|
||||
async def root():
|
||||
"""Root endpoint"""
|
||||
return {
|
||||
"service": "POS Integration Service",
|
||||
"version": "1.0.0",
|
||||
"status": "running",
|
||||
"supported_pos_systems": ["square", "toast", "lightspeed"]
|
||||
}
|
||||
|
||||
|
||||
# Create service instance
|
||||
service = POSService()
|
||||
|
||||
# Create FastAPI app with standardized setup
|
||||
app = service.create_app(
|
||||
docs_url="/docs" if settings.ENVIRONMENT != "production" else None,
|
||||
redoc_url="/redoc" if settings.ENVIRONMENT != "production" else None
|
||||
)
|
||||
|
||||
# Setup standard endpoints
|
||||
service.setup_standard_endpoints()
|
||||
|
||||
# Setup custom middleware
|
||||
service.setup_custom_middleware()
|
||||
|
||||
# Setup custom endpoints
|
||||
service.setup_custom_endpoints()
|
||||
|
||||
# Include routers
|
||||
# IMPORTANT: Register audit router FIRST to avoid route matching conflicts
|
||||
service.add_router(audit_router, tags=["audit-logs"])
|
||||
service.add_router(configurations_router, tags=["pos-configurations"])
|
||||
service.add_router(transactions_router, tags=["pos-transactions"])
|
||||
service.add_router(pos_operations_router, tags=["pos-operations"])
|
||||
service.add_router(analytics_router, tags=["pos-analytics"])
|
||||
# service.add_router(internal_demo_router, tags=["internal-demo"]) # REMOVED: Replaced by script-based seed data loading
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
uvicorn.run(
|
||||
"main:app",
|
||||
host="0.0.0.0",
|
||||
port=8000,
|
||||
reload=True
|
||||
)
|
||||
Reference in New Issue
Block a user