Add DEMO feature to the project
This commit is contained in:
42
services/demo_session/Dockerfile
Normal file
42
services/demo_session/Dockerfile
Normal file
@@ -0,0 +1,42 @@
|
||||
# Multi-stage build for Demo Session Service
|
||||
FROM python:3.11-slim as builder
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install build dependencies
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
gcc \
|
||||
g++ \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy requirements and install
|
||||
COPY services/demo_session/requirements.txt .
|
||||
RUN pip install --no-cache-dir --user -r requirements.txt
|
||||
|
||||
# Final stage
|
||||
FROM python:3.11-slim
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Copy Python dependencies from builder
|
||||
COPY --from=builder /root/.local /root/.local
|
||||
|
||||
# Copy shared libraries
|
||||
COPY shared/ /app/shared/
|
||||
|
||||
# Copy service code
|
||||
COPY services/demo_session/ /app/
|
||||
|
||||
# Copy scripts
|
||||
COPY scripts/ /app/scripts/
|
||||
|
||||
# Make sure scripts are in path
|
||||
ENV PATH=/root/.local/bin:$PATH
|
||||
ENV PYTHONPATH=/app:$PYTHONPATH
|
||||
|
||||
# Health check
|
||||
HEALTHCHECK --interval=30s --timeout=10s --start-period=40s --retries=3 \
|
||||
CMD python -c "import httpx; httpx.get('http://localhost:8000/health')"
|
||||
|
||||
# Run the application
|
||||
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]
|
||||
40
services/demo_session/alembic.ini
Normal file
40
services/demo_session/alembic.ini
Normal file
@@ -0,0 +1,40 @@
|
||||
[alembic]
|
||||
script_location = migrations
|
||||
prepend_sys_path = .
|
||||
sqlalchemy.url = postgresql+asyncpg://postgres:postgres@localhost:5432/demo_session_db
|
||||
|
||||
[post_write_hooks]
|
||||
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
3
services/demo_session/app/__init__.py
Normal file
3
services/demo_session/app/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
"""Demo Session Service"""
|
||||
|
||||
__version__ = "1.0.0"
|
||||
5
services/demo_session/app/api/__init__.py
Normal file
5
services/demo_session/app/api/__init__.py
Normal file
@@ -0,0 +1,5 @@
|
||||
"""Demo Session API"""
|
||||
|
||||
from .routes import router
|
||||
|
||||
__all__ = ["router"]
|
||||
254
services/demo_session/app/api/routes.py
Normal file
254
services/demo_session/app/api/routes.py
Normal file
@@ -0,0 +1,254 @@
|
||||
"""
|
||||
Demo Session API Routes
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Request
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from typing import List
|
||||
import structlog
|
||||
|
||||
from app.api.schemas import (
|
||||
DemoSessionCreate,
|
||||
DemoSessionResponse,
|
||||
DemoSessionExtend,
|
||||
DemoSessionDestroy,
|
||||
DemoSessionStats,
|
||||
DemoAccountInfo
|
||||
)
|
||||
from app.services import DemoSessionManager, DemoDataCloner, DemoCleanupService
|
||||
from app.core import get_db, get_redis, settings, RedisClient
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
router = APIRouter(prefix="/api/demo", tags=["demo"])
|
||||
|
||||
|
||||
@router.get("/accounts", response_model=List[DemoAccountInfo])
|
||||
async def get_demo_accounts():
|
||||
"""
|
||||
Get public demo account information
|
||||
Returns credentials for prospects to use
|
||||
"""
|
||||
accounts = []
|
||||
|
||||
for account_type, config in settings.DEMO_ACCOUNTS.items():
|
||||
accounts.append({
|
||||
"account_type": account_type,
|
||||
"name": config["name"],
|
||||
"email": config["email"],
|
||||
"password": "DemoSanPablo2024!" if "sanpablo" in config["email"] else "DemoLaEspiga2024!",
|
||||
"description": (
|
||||
"Panadería individual que produce todo localmente"
|
||||
if account_type == "individual_bakery"
|
||||
else "Punto de venta con obrador central"
|
||||
),
|
||||
"features": (
|
||||
["Gestión de Producción", "Recetas", "Inventario", "Previsión de Demanda", "Ventas"]
|
||||
if account_type == "individual_bakery"
|
||||
else ["Gestión de Proveedores", "Inventario", "Ventas", "Pedidos", "Previsión"]
|
||||
),
|
||||
"business_model": (
|
||||
"Producción Local" if account_type == "individual_bakery" else "Obrador Central + Punto de Venta"
|
||||
)
|
||||
})
|
||||
|
||||
return accounts
|
||||
|
||||
|
||||
@router.post("/session/create", response_model=DemoSessionResponse)
|
||||
async def create_demo_session(
|
||||
request: DemoSessionCreate,
|
||||
http_request: Request,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
redis: RedisClient = Depends(get_redis)
|
||||
):
|
||||
"""
|
||||
Create a new isolated demo session
|
||||
"""
|
||||
logger.info("Creating demo session", demo_account_type=request.demo_account_type)
|
||||
|
||||
try:
|
||||
# Get client info
|
||||
ip_address = request.ip_address or http_request.client.host
|
||||
user_agent = request.user_agent or http_request.headers.get("user-agent", "")
|
||||
|
||||
# Create session
|
||||
session_manager = DemoSessionManager(db, redis)
|
||||
session = await session_manager.create_session(
|
||||
demo_account_type=request.demo_account_type,
|
||||
user_id=request.user_id,
|
||||
ip_address=ip_address,
|
||||
user_agent=user_agent
|
||||
)
|
||||
|
||||
# Clone demo data using Kubernetes Job (better architecture)
|
||||
from app.services.k8s_job_cloner import K8sJobCloner
|
||||
|
||||
job_cloner = K8sJobCloner()
|
||||
|
||||
# Trigger async cloning job (don't wait for completion)
|
||||
import asyncio
|
||||
asyncio.create_task(
|
||||
job_cloner.clone_tenant_data(
|
||||
session.session_id,
|
||||
"", # base_tenant_id not used in job approach
|
||||
str(session.virtual_tenant_id),
|
||||
request.demo_account_type
|
||||
)
|
||||
)
|
||||
|
||||
# Mark as data cloning started
|
||||
await session_manager.mark_data_cloned(session.session_id)
|
||||
await session_manager.mark_redis_populated(session.session_id)
|
||||
|
||||
# Generate session token (simple JWT-like format)
|
||||
import jwt
|
||||
from datetime import datetime, timezone
|
||||
|
||||
session_token = jwt.encode(
|
||||
{
|
||||
"session_id": session.session_id,
|
||||
"virtual_tenant_id": str(session.virtual_tenant_id),
|
||||
"demo_account_type": request.demo_account_type,
|
||||
"exp": session.expires_at.timestamp()
|
||||
},
|
||||
"demo-secret-key", # In production, use proper secret
|
||||
algorithm="HS256"
|
||||
)
|
||||
|
||||
return {
|
||||
"session_id": session.session_id,
|
||||
"virtual_tenant_id": str(session.virtual_tenant_id),
|
||||
"demo_account_type": session.demo_account_type,
|
||||
"status": session.status.value,
|
||||
"created_at": session.created_at,
|
||||
"expires_at": session.expires_at,
|
||||
"demo_config": session.metadata.get("demo_config", {}),
|
||||
"session_token": session_token
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to create demo session", error=str(e))
|
||||
raise HTTPException(status_code=500, detail=f"Failed to create demo session: {str(e)}")
|
||||
|
||||
|
||||
@router.post("/session/extend", response_model=DemoSessionResponse)
|
||||
async def extend_demo_session(
|
||||
request: DemoSessionExtend,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
redis: RedisClient = Depends(get_redis)
|
||||
):
|
||||
"""
|
||||
Extend demo session expiration
|
||||
"""
|
||||
try:
|
||||
session_manager = DemoSessionManager(db, redis)
|
||||
session = await session_manager.extend_session(request.session_id)
|
||||
|
||||
# Generate new token
|
||||
import jwt
|
||||
session_token = jwt.encode(
|
||||
{
|
||||
"session_id": session.session_id,
|
||||
"virtual_tenant_id": str(session.virtual_tenant_id),
|
||||
"demo_account_type": session.demo_account_type,
|
||||
"exp": session.expires_at.timestamp()
|
||||
},
|
||||
"demo-secret-key",
|
||||
algorithm="HS256"
|
||||
)
|
||||
|
||||
return {
|
||||
"session_id": session.session_id,
|
||||
"virtual_tenant_id": str(session.virtual_tenant_id),
|
||||
"demo_account_type": session.demo_account_type,
|
||||
"status": session.status.value,
|
||||
"created_at": session.created_at,
|
||||
"expires_at": session.expires_at,
|
||||
"demo_config": session.metadata.get("demo_config", {}),
|
||||
"session_token": session_token
|
||||
}
|
||||
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
except Exception as e:
|
||||
logger.error("Failed to extend session", error=str(e))
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.post("/session/destroy")
|
||||
async def destroy_demo_session(
|
||||
request: DemoSessionDestroy,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
redis: RedisClient = Depends(get_redis)
|
||||
):
|
||||
"""
|
||||
Destroy demo session and cleanup resources
|
||||
"""
|
||||
try:
|
||||
session_manager = DemoSessionManager(db, redis)
|
||||
await session_manager.destroy_session(request.session_id)
|
||||
|
||||
return {"message": "Session destroyed successfully", "session_id": request.session_id}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to destroy session", error=str(e))
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.get("/session/{session_id}")
|
||||
async def get_session_info(
|
||||
session_id: str,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
redis: RedisClient = Depends(get_redis)
|
||||
):
|
||||
"""
|
||||
Get demo session information
|
||||
"""
|
||||
session_manager = DemoSessionManager(db, redis)
|
||||
session = await session_manager.get_session(session_id)
|
||||
|
||||
if not session:
|
||||
raise HTTPException(status_code=404, detail="Session not found")
|
||||
|
||||
return session.to_dict()
|
||||
|
||||
|
||||
@router.get("/stats", response_model=DemoSessionStats)
|
||||
async def get_demo_stats(
|
||||
db: AsyncSession = Depends(get_db),
|
||||
redis: RedisClient = Depends(get_redis)
|
||||
):
|
||||
"""
|
||||
Get demo session statistics
|
||||
"""
|
||||
session_manager = DemoSessionManager(db, redis)
|
||||
stats = await session_manager.get_session_stats()
|
||||
return stats
|
||||
|
||||
|
||||
@router.post("/cleanup/run")
|
||||
async def run_cleanup(
|
||||
db: AsyncSession = Depends(get_db),
|
||||
redis: RedisClient = Depends(get_redis)
|
||||
):
|
||||
"""
|
||||
Manually trigger session cleanup
|
||||
Internal endpoint for CronJob
|
||||
"""
|
||||
cleanup_service = DemoCleanupService(db, redis)
|
||||
stats = await cleanup_service.cleanup_expired_sessions()
|
||||
return stats
|
||||
|
||||
|
||||
@router.get("/health")
|
||||
async def health_check(redis: RedisClient = Depends(get_redis)):
|
||||
"""
|
||||
Health check endpoint
|
||||
"""
|
||||
redis_ok = await redis.ping()
|
||||
|
||||
return {
|
||||
"status": "healthy" if redis_ok else "degraded",
|
||||
"redis": "connected" if redis_ok else "disconnected"
|
||||
}
|
||||
76
services/demo_session/app/api/schemas.py
Normal file
76
services/demo_session/app/api/schemas.py
Normal file
@@ -0,0 +1,76 @@
|
||||
"""
|
||||
API Schemas for Demo Session Service
|
||||
"""
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
from typing import Optional, Dict, Any
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
class DemoSessionCreate(BaseModel):
|
||||
"""Create demo session request"""
|
||||
demo_account_type: str = Field(..., description="individual_bakery or central_baker")
|
||||
user_id: Optional[str] = Field(None, description="Optional authenticated user ID")
|
||||
ip_address: Optional[str] = None
|
||||
user_agent: Optional[str] = None
|
||||
|
||||
|
||||
class DemoSessionResponse(BaseModel):
|
||||
"""Demo session response"""
|
||||
session_id: str
|
||||
virtual_tenant_id: str
|
||||
demo_account_type: str
|
||||
status: str
|
||||
created_at: datetime
|
||||
expires_at: datetime
|
||||
demo_config: Dict[str, Any]
|
||||
session_token: str
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class DemoSessionExtend(BaseModel):
|
||||
"""Extend session request"""
|
||||
session_id: str
|
||||
|
||||
|
||||
class DemoSessionDestroy(BaseModel):
|
||||
"""Destroy session request"""
|
||||
session_id: str
|
||||
|
||||
|
||||
class DemoSessionStats(BaseModel):
|
||||
"""Demo session statistics"""
|
||||
total_sessions: int
|
||||
active_sessions: int
|
||||
expired_sessions: int
|
||||
destroyed_sessions: int
|
||||
avg_duration_minutes: float
|
||||
total_requests: int
|
||||
|
||||
|
||||
class DemoAccountInfo(BaseModel):
|
||||
"""Public demo account information"""
|
||||
account_type: str
|
||||
name: str
|
||||
email: str
|
||||
password: str
|
||||
description: str
|
||||
features: list[str]
|
||||
business_model: str
|
||||
|
||||
|
||||
class CloneDataRequest(BaseModel):
|
||||
"""Request to clone tenant data"""
|
||||
base_tenant_id: str
|
||||
virtual_tenant_id: str
|
||||
session_id: str
|
||||
|
||||
|
||||
class CloneDataResponse(BaseModel):
|
||||
"""Response from data cloning"""
|
||||
session_id: str
|
||||
services_cloned: list[str]
|
||||
total_records: int
|
||||
redis_keys: int
|
||||
7
services/demo_session/app/core/__init__.py
Normal file
7
services/demo_session/app/core/__init__.py
Normal file
@@ -0,0 +1,7 @@
|
||||
"""Demo Session Service Core"""
|
||||
|
||||
from .config import settings
|
||||
from .database import DatabaseManager, get_db
|
||||
from .redis_client import RedisClient, get_redis
|
||||
|
||||
__all__ = ["settings", "DatabaseManager", "get_db", "RedisClient", "get_redis"]
|
||||
66
services/demo_session/app/core/config.py
Normal file
66
services/demo_session/app/core/config.py
Normal file
@@ -0,0 +1,66 @@
|
||||
"""
|
||||
Demo Session Service Configuration
|
||||
"""
|
||||
|
||||
import os
|
||||
from pydantic_settings import BaseSettings
|
||||
from typing import Optional
|
||||
|
||||
|
||||
class Settings(BaseSettings):
|
||||
"""Demo Session Service Settings"""
|
||||
|
||||
# Service info
|
||||
SERVICE_NAME: str = "demo-session"
|
||||
VERSION: str = "1.0.0"
|
||||
DEBUG: bool = os.getenv("DEBUG", "false").lower() == "true"
|
||||
|
||||
# Database
|
||||
DATABASE_URL: str = os.getenv(
|
||||
"DEMO_SESSION_DATABASE_URL",
|
||||
"postgresql+asyncpg://postgres:postgres@localhost:5432/demo_session_db"
|
||||
)
|
||||
|
||||
# Redis
|
||||
REDIS_URL: str = os.getenv("REDIS_URL", "redis://localhost:6379/0")
|
||||
REDIS_KEY_PREFIX: str = "demo:session"
|
||||
REDIS_SESSION_TTL: int = 1800 # 30 minutes
|
||||
|
||||
# Demo session configuration
|
||||
DEMO_SESSION_DURATION_MINUTES: int = 30
|
||||
DEMO_SESSION_MAX_EXTENSIONS: int = 3
|
||||
DEMO_SESSION_CLEANUP_INTERVAL_MINUTES: int = 60
|
||||
|
||||
# Demo account credentials (public)
|
||||
DEMO_ACCOUNTS: dict = {
|
||||
"individual_bakery": {
|
||||
"email": "demo.individual@panaderiasanpablo.com",
|
||||
"name": "Panadería San Pablo - Demo",
|
||||
"subdomain": "demo-sanpablo"
|
||||
},
|
||||
"central_baker": {
|
||||
"email": "demo.central@panaderialaespiga.com",
|
||||
"name": "Panadería La Espiga - Demo",
|
||||
"subdomain": "demo-laespiga"
|
||||
}
|
||||
}
|
||||
|
||||
# Service URLs
|
||||
AUTH_SERVICE_URL: str = os.getenv("AUTH_SERVICE_URL", "http://auth-service:8000")
|
||||
TENANT_SERVICE_URL: str = os.getenv("TENANT_SERVICE_URL", "http://tenant-service:8000")
|
||||
INVENTORY_SERVICE_URL: str = os.getenv("INVENTORY_SERVICE_URL", "http://inventory-service:8000")
|
||||
RECIPES_SERVICE_URL: str = os.getenv("RECIPES_SERVICE_URL", "http://recipes-service:8000")
|
||||
SALES_SERVICE_URL: str = os.getenv("SALES_SERVICE_URL", "http://sales-service:8000")
|
||||
ORDERS_SERVICE_URL: str = os.getenv("ORDERS_SERVICE_URL", "http://orders-service:8000")
|
||||
PRODUCTION_SERVICE_URL: str = os.getenv("PRODUCTION_SERVICE_URL", "http://production-service:8000")
|
||||
SUPPLIERS_SERVICE_URL: str = os.getenv("SUPPLIERS_SERVICE_URL", "http://suppliers-service:8000")
|
||||
|
||||
# Logging
|
||||
LOG_LEVEL: str = os.getenv("LOG_LEVEL", "INFO")
|
||||
|
||||
class Config:
|
||||
env_file = ".env"
|
||||
case_sensitive = True
|
||||
|
||||
|
||||
settings = Settings()
|
||||
61
services/demo_session/app/core/database.py
Normal file
61
services/demo_session/app/core/database.py
Normal file
@@ -0,0 +1,61 @@
|
||||
"""
|
||||
Database connection management for Demo Session Service
|
||||
"""
|
||||
|
||||
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession, async_sessionmaker
|
||||
from sqlalchemy.pool import NullPool
|
||||
import structlog
|
||||
|
||||
from .config import settings
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class DatabaseManager:
|
||||
"""Database connection manager"""
|
||||
|
||||
def __init__(self, database_url: str = None):
|
||||
self.database_url = database_url or settings.DATABASE_URL
|
||||
self.engine = None
|
||||
self.session_factory = None
|
||||
|
||||
def initialize(self):
|
||||
"""Initialize database engine and session factory"""
|
||||
self.engine = create_async_engine(
|
||||
self.database_url,
|
||||
echo=settings.DEBUG,
|
||||
poolclass=NullPool,
|
||||
pool_pre_ping=True
|
||||
)
|
||||
|
||||
self.session_factory = async_sessionmaker(
|
||||
self.engine,
|
||||
class_=AsyncSession,
|
||||
expire_on_commit=False,
|
||||
autocommit=False,
|
||||
autoflush=False
|
||||
)
|
||||
|
||||
logger.info("Database manager initialized", database_url=self.database_url.split("@")[-1])
|
||||
|
||||
async def close(self):
|
||||
"""Close database connections"""
|
||||
if self.engine:
|
||||
await self.engine.dispose()
|
||||
logger.info("Database connections closed")
|
||||
|
||||
async def get_session(self) -> AsyncSession:
|
||||
"""Get database session"""
|
||||
if not self.session_factory:
|
||||
self.initialize()
|
||||
async with self.session_factory() as session:
|
||||
yield session
|
||||
|
||||
|
||||
db_manager = DatabaseManager()
|
||||
|
||||
|
||||
async def get_db() -> AsyncSession:
|
||||
"""Dependency for FastAPI"""
|
||||
async for session in db_manager.get_session():
|
||||
yield session
|
||||
164
services/demo_session/app/core/redis_client.py
Normal file
164
services/demo_session/app/core/redis_client.py
Normal file
@@ -0,0 +1,164 @@
|
||||
"""
|
||||
Redis client for demo session data caching
|
||||
"""
|
||||
|
||||
import redis.asyncio as redis
|
||||
from typing import Optional, Any
|
||||
import json
|
||||
import structlog
|
||||
from datetime import timedelta
|
||||
|
||||
from .config import settings
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class RedisClient:
|
||||
"""Redis client for session data"""
|
||||
|
||||
def __init__(self, redis_url: str = None):
|
||||
self.redis_url = redis_url or settings.REDIS_URL
|
||||
self.client: Optional[redis.Redis] = None
|
||||
self.key_prefix = settings.REDIS_KEY_PREFIX
|
||||
|
||||
async def connect(self):
|
||||
"""Connect to Redis"""
|
||||
if not self.client:
|
||||
self.client = await redis.from_url(
|
||||
self.redis_url,
|
||||
encoding="utf-8",
|
||||
decode_responses=True
|
||||
)
|
||||
logger.info("Redis client connected", redis_url=self.redis_url.split("@")[-1])
|
||||
|
||||
async def close(self):
|
||||
"""Close Redis connection"""
|
||||
if self.client:
|
||||
await self.client.close()
|
||||
logger.info("Redis connection closed")
|
||||
|
||||
async def ping(self) -> bool:
|
||||
"""Check Redis connection"""
|
||||
try:
|
||||
if not self.client:
|
||||
await self.connect()
|
||||
return await self.client.ping()
|
||||
except Exception as e:
|
||||
logger.error("Redis ping failed", error=str(e))
|
||||
return False
|
||||
|
||||
def _make_key(self, *parts: str) -> str:
|
||||
"""Create Redis key with prefix"""
|
||||
return f"{self.key_prefix}:{':'.join(parts)}"
|
||||
|
||||
async def set_session_data(self, session_id: str, key: str, data: Any, ttl: int = None):
|
||||
"""Store session data in Redis"""
|
||||
if not self.client:
|
||||
await self.connect()
|
||||
|
||||
redis_key = self._make_key(session_id, key)
|
||||
serialized = json.dumps(data) if not isinstance(data, str) else data
|
||||
|
||||
if ttl:
|
||||
await self.client.setex(redis_key, ttl, serialized)
|
||||
else:
|
||||
await self.client.set(redis_key, serialized)
|
||||
|
||||
logger.debug("Session data stored", session_id=session_id, key=key)
|
||||
|
||||
async def get_session_data(self, session_id: str, key: str) -> Optional[Any]:
|
||||
"""Retrieve session data from Redis"""
|
||||
if not self.client:
|
||||
await self.connect()
|
||||
|
||||
redis_key = self._make_key(session_id, key)
|
||||
data = await self.client.get(redis_key)
|
||||
|
||||
if data:
|
||||
try:
|
||||
return json.loads(data)
|
||||
except json.JSONDecodeError:
|
||||
return data
|
||||
|
||||
return None
|
||||
|
||||
async def delete_session_data(self, session_id: str, key: str = None):
|
||||
"""Delete session data"""
|
||||
if not self.client:
|
||||
await self.connect()
|
||||
|
||||
if key:
|
||||
redis_key = self._make_key(session_id, key)
|
||||
await self.client.delete(redis_key)
|
||||
else:
|
||||
pattern = self._make_key(session_id, "*")
|
||||
keys = await self.client.keys(pattern)
|
||||
if keys:
|
||||
await self.client.delete(*keys)
|
||||
|
||||
logger.debug("Session data deleted", session_id=session_id, key=key)
|
||||
|
||||
async def extend_session_ttl(self, session_id: str, ttl: int):
|
||||
"""Extend TTL for all session keys"""
|
||||
if not self.client:
|
||||
await self.connect()
|
||||
|
||||
pattern = self._make_key(session_id, "*")
|
||||
keys = await self.client.keys(pattern)
|
||||
|
||||
for key in keys:
|
||||
await self.client.expire(key, ttl)
|
||||
|
||||
logger.debug("Session TTL extended", session_id=session_id, ttl=ttl)
|
||||
|
||||
async def set_hash(self, session_id: str, hash_key: str, field: str, value: Any):
|
||||
"""Store hash field in Redis"""
|
||||
if not self.client:
|
||||
await self.connect()
|
||||
|
||||
redis_key = self._make_key(session_id, hash_key)
|
||||
serialized = json.dumps(value) if not isinstance(value, str) else value
|
||||
await self.client.hset(redis_key, field, serialized)
|
||||
|
||||
async def get_hash(self, session_id: str, hash_key: str, field: str) -> Optional[Any]:
|
||||
"""Get hash field from Redis"""
|
||||
if not self.client:
|
||||
await self.connect()
|
||||
|
||||
redis_key = self._make_key(session_id, hash_key)
|
||||
data = await self.client.hget(redis_key, field)
|
||||
|
||||
if data:
|
||||
try:
|
||||
return json.loads(data)
|
||||
except json.JSONDecodeError:
|
||||
return data
|
||||
|
||||
return None
|
||||
|
||||
async def get_all_hash(self, session_id: str, hash_key: str) -> dict:
|
||||
"""Get all hash fields"""
|
||||
if not self.client:
|
||||
await self.connect()
|
||||
|
||||
redis_key = self._make_key(session_id, hash_key)
|
||||
data = await self.client.hgetall(redis_key)
|
||||
|
||||
result = {}
|
||||
for field, value in data.items():
|
||||
try:
|
||||
result[field] = json.loads(value)
|
||||
except json.JSONDecodeError:
|
||||
result[field] = value
|
||||
|
||||
return result
|
||||
|
||||
|
||||
redis_client = RedisClient()
|
||||
|
||||
|
||||
async def get_redis() -> RedisClient:
|
||||
"""Dependency for FastAPI"""
|
||||
if not redis_client.client:
|
||||
await redis_client.connect()
|
||||
return redis_client
|
||||
111
services/demo_session/app/main.py
Normal file
111
services/demo_session/app/main.py
Normal file
@@ -0,0 +1,111 @@
|
||||
"""
|
||||
Demo Session Service - Main Application
|
||||
Manages isolated demo sessions with ephemeral data
|
||||
"""
|
||||
|
||||
from fastapi import FastAPI, Request
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from fastapi.responses import JSONResponse
|
||||
import structlog
|
||||
from contextlib import asynccontextmanager
|
||||
|
||||
from app.core import settings, DatabaseManager, RedisClient
|
||||
from app.api import router
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
# Initialize database and redis
|
||||
db_manager = DatabaseManager()
|
||||
redis_client = RedisClient()
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
"""Application lifespan handler"""
|
||||
logger.info("Starting Demo Session Service", version=settings.VERSION)
|
||||
|
||||
# Initialize database
|
||||
db_manager.initialize()
|
||||
|
||||
# Connect to Redis
|
||||
await redis_client.connect()
|
||||
|
||||
logger.info("Demo Session Service started successfully")
|
||||
|
||||
yield
|
||||
|
||||
# Cleanup on shutdown
|
||||
await db_manager.close()
|
||||
await redis_client.close()
|
||||
|
||||
logger.info("Demo Session Service stopped")
|
||||
|
||||
|
||||
app = FastAPI(
|
||||
title="Demo Session Service",
|
||||
description="Manages isolated demo sessions for prospect users",
|
||||
version=settings.VERSION,
|
||||
lifespan=lifespan
|
||||
)
|
||||
|
||||
# CORS middleware
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=["*"],
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
|
||||
@app.exception_handler(Exception)
|
||||
async def global_exception_handler(request: Request, exc: Exception):
|
||||
"""Global exception handler"""
|
||||
logger.error(
|
||||
"Unhandled exception",
|
||||
path=request.url.path,
|
||||
method=request.method,
|
||||
error=str(exc)
|
||||
)
|
||||
return JSONResponse(
|
||||
status_code=500,
|
||||
content={"detail": "Internal server error"}
|
||||
)
|
||||
|
||||
|
||||
# Include routers
|
||||
app.include_router(router)
|
||||
|
||||
|
||||
@app.get("/")
|
||||
async def root():
|
||||
"""Root endpoint"""
|
||||
return {
|
||||
"service": "demo-session",
|
||||
"version": settings.VERSION,
|
||||
"status": "running"
|
||||
}
|
||||
|
||||
|
||||
@app.get("/health")
|
||||
async def health():
|
||||
"""Health check endpoint"""
|
||||
redis_ok = await redis_client.ping()
|
||||
|
||||
return {
|
||||
"status": "healthy" if redis_ok else "degraded",
|
||||
"service": "demo-session",
|
||||
"version": settings.VERSION,
|
||||
"redis": "connected" if redis_ok else "disconnected"
|
||||
}
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
uvicorn.run(
|
||||
"app.main:app",
|
||||
host="0.0.0.0",
|
||||
port=8000,
|
||||
reload=settings.DEBUG,
|
||||
log_level=settings.LOG_LEVEL.lower()
|
||||
)
|
||||
5
services/demo_session/app/models/__init__.py
Normal file
5
services/demo_session/app/models/__init__.py
Normal file
@@ -0,0 +1,5 @@
|
||||
"""Demo Session Service Models"""
|
||||
|
||||
from .demo_session import DemoSession, DemoSessionStatus
|
||||
|
||||
__all__ = ["DemoSession", "DemoSessionStatus"]
|
||||
71
services/demo_session/app/models/demo_session.py
Normal file
71
services/demo_session/app/models/demo_session.py
Normal file
@@ -0,0 +1,71 @@
|
||||
"""
|
||||
Demo Session Models
|
||||
Tracks ephemeral demo sessions for prospect users
|
||||
"""
|
||||
|
||||
from sqlalchemy import Column, String, Boolean, DateTime, Integer, Enum as SQLEnum
|
||||
from sqlalchemy.dialects.postgresql import UUID, JSONB
|
||||
from datetime import datetime, timezone
|
||||
import uuid
|
||||
import enum
|
||||
|
||||
from shared.database.base import Base
|
||||
|
||||
|
||||
class DemoSessionStatus(enum.Enum):
|
||||
"""Demo session status"""
|
||||
ACTIVE = "active"
|
||||
EXPIRED = "expired"
|
||||
DESTROYED = "destroyed"
|
||||
|
||||
|
||||
class DemoSession(Base):
|
||||
"""Demo Session tracking model"""
|
||||
__tablename__ = "demo_sessions"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
session_id = Column(String(100), unique=True, nullable=False, index=True)
|
||||
|
||||
# Session ownership
|
||||
user_id = Column(UUID(as_uuid=True), nullable=True)
|
||||
ip_address = Column(String(45), nullable=True)
|
||||
user_agent = Column(String(500), nullable=True)
|
||||
|
||||
# Demo tenant linking
|
||||
base_demo_tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
virtual_tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
demo_account_type = Column(String(50), nullable=False) # 'individual_bakery', 'central_baker'
|
||||
|
||||
# Session lifecycle
|
||||
status = Column(SQLEnum(DemoSessionStatus, values_callable=lambda obj: [e.value for e in obj]), default=DemoSessionStatus.ACTIVE, index=True)
|
||||
created_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc), index=True)
|
||||
expires_at = Column(DateTime(timezone=True), nullable=False, index=True)
|
||||
last_activity_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc))
|
||||
destroyed_at = Column(DateTime(timezone=True), nullable=True)
|
||||
|
||||
# Session metrics
|
||||
request_count = Column(Integer, default=0)
|
||||
data_cloned = Column(Boolean, default=False)
|
||||
redis_populated = Column(Boolean, default=False)
|
||||
|
||||
# Session metadata
|
||||
session_metadata = Column(JSONB, default=dict)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<DemoSession(session_id={self.session_id}, status={self.status.value})>"
|
||||
|
||||
def to_dict(self):
|
||||
"""Convert to dictionary"""
|
||||
return {
|
||||
"id": str(self.id),
|
||||
"session_id": self.session_id,
|
||||
"virtual_tenant_id": str(self.virtual_tenant_id),
|
||||
"base_demo_tenant_id": str(self.base_demo_tenant_id),
|
||||
"demo_account_type": self.demo_account_type,
|
||||
"status": self.status.value,
|
||||
"created_at": self.created_at.isoformat() if self.created_at else None,
|
||||
"expires_at": self.expires_at.isoformat() if self.expires_at else None,
|
||||
"last_activity_at": self.last_activity_at.isoformat() if self.last_activity_at else None,
|
||||
"request_count": self.request_count,
|
||||
"metadata": self.session_metadata
|
||||
}
|
||||
7
services/demo_session/app/services/__init__.py
Normal file
7
services/demo_session/app/services/__init__.py
Normal file
@@ -0,0 +1,7 @@
|
||||
"""Demo Session Services"""
|
||||
|
||||
from .session_manager import DemoSessionManager
|
||||
from .data_cloner import DemoDataCloner
|
||||
from .cleanup_service import DemoCleanupService
|
||||
|
||||
__all__ = ["DemoSessionManager", "DemoDataCloner", "DemoCleanupService"]
|
||||
147
services/demo_session/app/services/cleanup_service.py
Normal file
147
services/demo_session/app/services/cleanup_service.py
Normal file
@@ -0,0 +1,147 @@
|
||||
"""
|
||||
Demo Cleanup Service
|
||||
Handles automatic cleanup of expired sessions
|
||||
"""
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, update
|
||||
from datetime import datetime, timezone
|
||||
from typing import List
|
||||
import structlog
|
||||
|
||||
from app.models import DemoSession, DemoSessionStatus
|
||||
from app.services.data_cloner import DemoDataCloner
|
||||
from app.core import RedisClient
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class DemoCleanupService:
|
||||
"""Handles cleanup of expired demo sessions"""
|
||||
|
||||
def __init__(self, db: AsyncSession, redis: RedisClient):
|
||||
self.db = db
|
||||
self.redis = redis
|
||||
self.data_cloner = DemoDataCloner(db, redis)
|
||||
|
||||
async def cleanup_expired_sessions(self) -> dict:
|
||||
"""
|
||||
Find and cleanup all expired sessions
|
||||
|
||||
Returns:
|
||||
Cleanup statistics
|
||||
"""
|
||||
logger.info("Starting demo session cleanup")
|
||||
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
# Find expired sessions
|
||||
result = await self.db.execute(
|
||||
select(DemoSession).where(
|
||||
DemoSession.status == DemoSessionStatus.ACTIVE,
|
||||
DemoSession.expires_at < now
|
||||
)
|
||||
)
|
||||
expired_sessions = result.scalars().all()
|
||||
|
||||
stats = {
|
||||
"total_expired": len(expired_sessions),
|
||||
"cleaned_up": 0,
|
||||
"failed": 0,
|
||||
"errors": []
|
||||
}
|
||||
|
||||
for session in expired_sessions:
|
||||
try:
|
||||
# Mark as expired
|
||||
session.status = DemoSessionStatus.EXPIRED
|
||||
await self.db.commit()
|
||||
|
||||
# Delete session data
|
||||
await self.data_cloner.delete_session_data(
|
||||
str(session.virtual_tenant_id),
|
||||
session.session_id
|
||||
)
|
||||
|
||||
stats["cleaned_up"] += 1
|
||||
|
||||
logger.info(
|
||||
"Session cleaned up",
|
||||
session_id=session.session_id,
|
||||
age_minutes=(now - session.created_at).total_seconds() / 60
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
stats["failed"] += 1
|
||||
stats["errors"].append({
|
||||
"session_id": session.session_id,
|
||||
"error": str(e)
|
||||
})
|
||||
logger.error(
|
||||
"Failed to cleanup session",
|
||||
session_id=session.session_id,
|
||||
error=str(e)
|
||||
)
|
||||
|
||||
logger.info("Demo session cleanup completed", stats=stats)
|
||||
return stats
|
||||
|
||||
async def cleanup_old_destroyed_sessions(self, days: int = 7) -> int:
|
||||
"""
|
||||
Delete destroyed session records older than specified days
|
||||
|
||||
Args:
|
||||
days: Number of days to keep destroyed sessions
|
||||
|
||||
Returns:
|
||||
Number of deleted records
|
||||
"""
|
||||
from datetime import timedelta
|
||||
|
||||
cutoff_date = datetime.now(timezone.utc) - timedelta(days=days)
|
||||
|
||||
result = await self.db.execute(
|
||||
select(DemoSession).where(
|
||||
DemoSession.status == DemoSessionStatus.DESTROYED,
|
||||
DemoSession.destroyed_at < cutoff_date
|
||||
)
|
||||
)
|
||||
old_sessions = result.scalars().all()
|
||||
|
||||
for session in old_sessions:
|
||||
await self.db.delete(session)
|
||||
|
||||
await self.db.commit()
|
||||
|
||||
logger.info(
|
||||
"Old destroyed sessions deleted",
|
||||
count=len(old_sessions),
|
||||
older_than_days=days
|
||||
)
|
||||
|
||||
return len(old_sessions)
|
||||
|
||||
async def get_cleanup_stats(self) -> dict:
|
||||
"""Get cleanup statistics"""
|
||||
result = await self.db.execute(select(DemoSession))
|
||||
all_sessions = result.scalars().all()
|
||||
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
active_count = len([s for s in all_sessions if s.status == DemoSessionStatus.ACTIVE])
|
||||
expired_count = len([s for s in all_sessions if s.status == DemoSessionStatus.EXPIRED])
|
||||
destroyed_count = len([s for s in all_sessions if s.status == DemoSessionStatus.DESTROYED])
|
||||
|
||||
# Find sessions that should be expired but aren't marked yet
|
||||
should_be_expired = len([
|
||||
s for s in all_sessions
|
||||
if s.status == DemoSessionStatus.ACTIVE and s.expires_at < now
|
||||
])
|
||||
|
||||
return {
|
||||
"total_sessions": len(all_sessions),
|
||||
"active_sessions": active_count,
|
||||
"expired_sessions": expired_count,
|
||||
"destroyed_sessions": destroyed_count,
|
||||
"pending_cleanup": should_be_expired
|
||||
}
|
||||
288
services/demo_session/app/services/data_cloner.py
Normal file
288
services/demo_session/app/services/data_cloner.py
Normal file
@@ -0,0 +1,288 @@
|
||||
"""
|
||||
Demo Data Cloner
|
||||
Clones base demo data to session-specific virtual tenants
|
||||
"""
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from typing import Dict, Any, List
|
||||
import httpx
|
||||
import structlog
|
||||
import uuid
|
||||
|
||||
from app.core import RedisClient, settings
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class DemoDataCloner:
|
||||
"""Clones demo data for isolated sessions"""
|
||||
|
||||
def __init__(self, db: AsyncSession, redis: RedisClient):
|
||||
self.db = db
|
||||
self.redis = redis
|
||||
|
||||
async def clone_tenant_data(
|
||||
self,
|
||||
session_id: str,
|
||||
base_demo_tenant_id: str,
|
||||
virtual_tenant_id: str,
|
||||
demo_account_type: str
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Clone all demo data from base tenant to virtual tenant
|
||||
|
||||
Args:
|
||||
session_id: Session ID
|
||||
base_demo_tenant_id: Base demo tenant UUID
|
||||
virtual_tenant_id: Virtual tenant UUID for this session
|
||||
demo_account_type: Type of demo account
|
||||
|
||||
Returns:
|
||||
Cloning statistics
|
||||
"""
|
||||
logger.info(
|
||||
"Starting data cloning",
|
||||
session_id=session_id,
|
||||
base_demo_tenant_id=base_demo_tenant_id,
|
||||
virtual_tenant_id=virtual_tenant_id
|
||||
)
|
||||
|
||||
stats = {
|
||||
"session_id": session_id,
|
||||
"services_cloned": [],
|
||||
"total_records": 0,
|
||||
"redis_keys": 0
|
||||
}
|
||||
|
||||
# Clone data from each service based on demo account type
|
||||
services_to_clone = self._get_services_for_demo_type(demo_account_type)
|
||||
|
||||
for service_name in services_to_clone:
|
||||
try:
|
||||
service_stats = await self._clone_service_data(
|
||||
service_name,
|
||||
base_demo_tenant_id,
|
||||
virtual_tenant_id,
|
||||
session_id
|
||||
)
|
||||
stats["services_cloned"].append(service_name)
|
||||
stats["total_records"] += service_stats.get("records_cloned", 0)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to clone service data",
|
||||
service=service_name,
|
||||
error=str(e)
|
||||
)
|
||||
|
||||
# Populate Redis cache with hot data
|
||||
redis_stats = await self._populate_redis_cache(
|
||||
session_id,
|
||||
virtual_tenant_id,
|
||||
demo_account_type
|
||||
)
|
||||
stats["redis_keys"] = redis_stats.get("keys_created", 0)
|
||||
|
||||
logger.info(
|
||||
"Data cloning completed",
|
||||
session_id=session_id,
|
||||
stats=stats
|
||||
)
|
||||
|
||||
return stats
|
||||
|
||||
def _get_services_for_demo_type(self, demo_account_type: str) -> List[str]:
|
||||
"""Get list of services to clone based on demo type"""
|
||||
base_services = ["inventory", "sales", "orders", "pos"]
|
||||
|
||||
if demo_account_type == "individual_bakery":
|
||||
# Individual bakery has production, recipes
|
||||
return base_services + ["recipes", "production"]
|
||||
elif demo_account_type == "central_baker":
|
||||
# Central baker satellite has suppliers
|
||||
return base_services + ["suppliers"]
|
||||
else:
|
||||
return base_services
|
||||
|
||||
async def _clone_service_data(
|
||||
self,
|
||||
service_name: str,
|
||||
base_tenant_id: str,
|
||||
virtual_tenant_id: str,
|
||||
session_id: str
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Clone data for a specific service
|
||||
|
||||
Args:
|
||||
service_name: Name of the service
|
||||
base_tenant_id: Source tenant ID
|
||||
virtual_tenant_id: Target tenant ID
|
||||
session_id: Session ID
|
||||
|
||||
Returns:
|
||||
Cloning statistics
|
||||
"""
|
||||
service_url = self._get_service_url(service_name)
|
||||
|
||||
async with httpx.AsyncClient(timeout=30.0) as client:
|
||||
response = await client.post(
|
||||
f"{service_url}/internal/demo/clone",
|
||||
json={
|
||||
"base_tenant_id": base_tenant_id,
|
||||
"virtual_tenant_id": virtual_tenant_id,
|
||||
"session_id": session_id
|
||||
},
|
||||
headers={"X-Internal-Service": "demo-session"}
|
||||
)
|
||||
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
async def _populate_redis_cache(
|
||||
self,
|
||||
session_id: str,
|
||||
virtual_tenant_id: str,
|
||||
demo_account_type: str
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Populate Redis with frequently accessed data
|
||||
|
||||
Args:
|
||||
session_id: Session ID
|
||||
virtual_tenant_id: Virtual tenant ID
|
||||
demo_account_type: Demo account type
|
||||
|
||||
Returns:
|
||||
Statistics about cached data
|
||||
"""
|
||||
logger.info("Populating Redis cache", session_id=session_id)
|
||||
|
||||
keys_created = 0
|
||||
|
||||
# Cache inventory data (hot data)
|
||||
try:
|
||||
inventory_data = await self._fetch_inventory_data(virtual_tenant_id)
|
||||
await self.redis.set_session_data(
|
||||
session_id,
|
||||
"inventory",
|
||||
inventory_data,
|
||||
ttl=settings.REDIS_SESSION_TTL
|
||||
)
|
||||
keys_created += 1
|
||||
except Exception as e:
|
||||
logger.error("Failed to cache inventory", error=str(e))
|
||||
|
||||
# Cache POS data
|
||||
try:
|
||||
pos_data = await self._fetch_pos_data(virtual_tenant_id)
|
||||
await self.redis.set_session_data(
|
||||
session_id,
|
||||
"pos",
|
||||
pos_data,
|
||||
ttl=settings.REDIS_SESSION_TTL
|
||||
)
|
||||
keys_created += 1
|
||||
except Exception as e:
|
||||
logger.error("Failed to cache POS data", error=str(e))
|
||||
|
||||
# Cache recent sales
|
||||
try:
|
||||
sales_data = await self._fetch_recent_sales(virtual_tenant_id)
|
||||
await self.redis.set_session_data(
|
||||
session_id,
|
||||
"recent_sales",
|
||||
sales_data,
|
||||
ttl=settings.REDIS_SESSION_TTL
|
||||
)
|
||||
keys_created += 1
|
||||
except Exception as e:
|
||||
logger.error("Failed to cache sales", error=str(e))
|
||||
|
||||
return {"keys_created": keys_created}
|
||||
|
||||
async def _fetch_inventory_data(self, tenant_id: str) -> Dict[str, Any]:
|
||||
"""Fetch inventory data for caching"""
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.get(
|
||||
f"{settings.INVENTORY_SERVICE_URL}/api/inventory/summary",
|
||||
headers={"X-Tenant-Id": tenant_id}
|
||||
)
|
||||
return response.json()
|
||||
|
||||
async def _fetch_pos_data(self, tenant_id: str) -> Dict[str, Any]:
|
||||
"""Fetch POS data for caching"""
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.get(
|
||||
f"{settings.POS_SERVICE_URL}/api/pos/current-session",
|
||||
headers={"X-Tenant-Id": tenant_id}
|
||||
)
|
||||
return response.json()
|
||||
|
||||
async def _fetch_recent_sales(self, tenant_id: str) -> Dict[str, Any]:
|
||||
"""Fetch recent sales for caching"""
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.get(
|
||||
f"{settings.SALES_SERVICE_URL}/api/sales/recent?limit=50",
|
||||
headers={"X-Tenant-Id": tenant_id}
|
||||
)
|
||||
return response.json()
|
||||
|
||||
def _get_service_url(self, service_name: str) -> str:
|
||||
"""Get service URL from settings"""
|
||||
url_map = {
|
||||
"inventory": settings.INVENTORY_SERVICE_URL,
|
||||
"recipes": settings.RECIPES_SERVICE_URL,
|
||||
"sales": settings.SALES_SERVICE_URL,
|
||||
"orders": settings.ORDERS_SERVICE_URL,
|
||||
"production": settings.PRODUCTION_SERVICE_URL,
|
||||
"suppliers": settings.SUPPLIERS_SERVICE_URL,
|
||||
"pos": settings.SALES_SERVICE_URL,
|
||||
}
|
||||
return url_map.get(service_name, "")
|
||||
|
||||
async def delete_session_data(
|
||||
self,
|
||||
virtual_tenant_id: str,
|
||||
session_id: str
|
||||
):
|
||||
"""
|
||||
Delete all data for a session
|
||||
|
||||
Args:
|
||||
virtual_tenant_id: Virtual tenant ID to delete
|
||||
session_id: Session ID
|
||||
"""
|
||||
logger.info(
|
||||
"Deleting session data",
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
session_id=session_id
|
||||
)
|
||||
|
||||
# Delete from each service
|
||||
services = ["inventory", "recipes", "sales", "orders", "production", "suppliers", "pos"]
|
||||
|
||||
for service_name in services:
|
||||
try:
|
||||
await self._delete_service_data(service_name, virtual_tenant_id)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to delete service data",
|
||||
service=service_name,
|
||||
error=str(e)
|
||||
)
|
||||
|
||||
# Delete from Redis
|
||||
await self.redis.delete_session_data(session_id)
|
||||
|
||||
logger.info("Session data deleted", virtual_tenant_id=virtual_tenant_id)
|
||||
|
||||
async def _delete_service_data(self, service_name: str, virtual_tenant_id: str):
|
||||
"""Delete data from a specific service"""
|
||||
service_url = self._get_service_url(service_name)
|
||||
|
||||
async with httpx.AsyncClient(timeout=30.0) as client:
|
||||
await client.delete(
|
||||
f"{service_url}/internal/demo/tenant/{virtual_tenant_id}",
|
||||
headers={"X-Internal-Service": "demo-session"}
|
||||
)
|
||||
166
services/demo_session/app/services/k8s_job_cloner.py
Normal file
166
services/demo_session/app/services/k8s_job_cloner.py
Normal file
@@ -0,0 +1,166 @@
|
||||
"""
|
||||
Kubernetes Job-based Demo Data Cloner
|
||||
Triggers a K8s Job to clone demo data at the database level
|
||||
"""
|
||||
|
||||
import httpx
|
||||
import structlog
|
||||
from typing import Dict, Any
|
||||
import os
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class K8sJobCloner:
|
||||
"""Triggers Kubernetes Jobs to clone demo data"""
|
||||
|
||||
def __init__(self):
|
||||
self.k8s_api_url = os.getenv("KUBERNETES_SERVICE_HOST")
|
||||
self.namespace = os.getenv("POD_NAMESPACE", "bakery-ia")
|
||||
self.clone_job_image = os.getenv("CLONE_JOB_IMAGE", "bakery/inventory-service:latest")
|
||||
# Service account token for K8s API access
|
||||
with open("/var/run/secrets/kubernetes.io/serviceaccount/token", "r") as f:
|
||||
self.token = f.read()
|
||||
|
||||
async def clone_tenant_data(
|
||||
self,
|
||||
session_id: str,
|
||||
base_demo_tenant_id: str,
|
||||
virtual_tenant_id: str,
|
||||
demo_account_type: str
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Clone demo data by creating a Kubernetes Job
|
||||
|
||||
Args:
|
||||
session_id: Session ID
|
||||
base_demo_tenant_id: Base demo tenant UUID (not used in job approach)
|
||||
virtual_tenant_id: Virtual tenant UUID for this session
|
||||
demo_account_type: Type of demo account
|
||||
|
||||
Returns:
|
||||
Job creation status
|
||||
"""
|
||||
logger.info(
|
||||
"Triggering demo data cloning job",
|
||||
session_id=session_id,
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
demo_account_type=demo_account_type,
|
||||
clone_image=self.clone_job_image
|
||||
)
|
||||
|
||||
job_name = f"demo-clone-{virtual_tenant_id[:8]}"
|
||||
|
||||
# Create Job manifest
|
||||
job_manifest = {
|
||||
"apiVersion": "batch/v1",
|
||||
"kind": "Job",
|
||||
"metadata": {
|
||||
"name": job_name,
|
||||
"namespace": self.namespace,
|
||||
"labels": {
|
||||
"app": "demo-clone",
|
||||
"session-id": session_id,
|
||||
"component": "runtime"
|
||||
}
|
||||
},
|
||||
"spec": {
|
||||
"ttlSecondsAfterFinished": 3600,
|
||||
"backoffLimit": 2,
|
||||
"template": {
|
||||
"metadata": {
|
||||
"labels": {"app": "demo-clone"}
|
||||
},
|
||||
"spec": {
|
||||
"restartPolicy": "Never",
|
||||
"containers": [{
|
||||
"name": "clone-data",
|
||||
"image": self.clone_job_image, # Configured via environment variable
|
||||
"imagePullPolicy": "IfNotPresent", # Don't pull if image exists locally
|
||||
"command": ["python", "/app/scripts/demo/clone_demo_tenant.py"],
|
||||
"env": [
|
||||
{"name": "VIRTUAL_TENANT_ID", "value": virtual_tenant_id},
|
||||
{"name": "DEMO_ACCOUNT_TYPE", "value": demo_account_type},
|
||||
{
|
||||
"name": "INVENTORY_DATABASE_URL",
|
||||
"valueFrom": {
|
||||
"secretKeyRef": {
|
||||
"name": "database-secrets",
|
||||
"key": "INVENTORY_DATABASE_URL"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "SALES_DATABASE_URL",
|
||||
"valueFrom": {
|
||||
"secretKeyRef": {
|
||||
"name": "database-secrets",
|
||||
"key": "SALES_DATABASE_URL"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "ORDERS_DATABASE_URL",
|
||||
"valueFrom": {
|
||||
"secretKeyRef": {
|
||||
"name": "database-secrets",
|
||||
"key": "ORDERS_DATABASE_URL"
|
||||
}
|
||||
}
|
||||
},
|
||||
{"name": "LOG_LEVEL", "value": "INFO"}
|
||||
],
|
||||
"resources": {
|
||||
"requests": {"memory": "256Mi", "cpu": "100m"},
|
||||
"limits": {"memory": "512Mi", "cpu": "500m"}
|
||||
}
|
||||
}]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
try:
|
||||
# Create the Job via K8s API
|
||||
async with httpx.AsyncClient(verify=False, timeout=30.0) as client:
|
||||
response = await client.post(
|
||||
f"https://{self.k8s_api_url}/apis/batch/v1/namespaces/{self.namespace}/jobs",
|
||||
json=job_manifest,
|
||||
headers={
|
||||
"Authorization": f"Bearer {self.token}",
|
||||
"Content-Type": "application/json"
|
||||
}
|
||||
)
|
||||
|
||||
if response.status_code == 201:
|
||||
logger.info(
|
||||
"Demo clone job created successfully",
|
||||
job_name=job_name,
|
||||
session_id=session_id
|
||||
)
|
||||
return {
|
||||
"success": True,
|
||||
"job_name": job_name,
|
||||
"method": "kubernetes_job"
|
||||
}
|
||||
else:
|
||||
logger.error(
|
||||
"Failed to create demo clone job",
|
||||
status_code=response.status_code,
|
||||
response=response.text
|
||||
)
|
||||
return {
|
||||
"success": False,
|
||||
"error": f"K8s API returned {response.status_code}"
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error creating demo clone job",
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
return {
|
||||
"success": False,
|
||||
"error": str(e)
|
||||
}
|
||||
267
services/demo_session/app/services/session_manager.py
Normal file
267
services/demo_session/app/services/session_manager.py
Normal file
@@ -0,0 +1,267 @@
|
||||
"""
|
||||
Demo Session Manager
|
||||
Handles creation, extension, and destruction of demo sessions
|
||||
"""
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, update
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Optional, Dict, Any
|
||||
import uuid
|
||||
import secrets
|
||||
import structlog
|
||||
|
||||
from app.models import DemoSession, DemoSessionStatus
|
||||
from app.core import RedisClient, settings
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class DemoSessionManager:
|
||||
"""Manages demo session lifecycle"""
|
||||
|
||||
def __init__(self, db: AsyncSession, redis: RedisClient):
|
||||
self.db = db
|
||||
self.redis = redis
|
||||
|
||||
async def create_session(
|
||||
self,
|
||||
demo_account_type: str,
|
||||
user_id: Optional[str] = None,
|
||||
ip_address: Optional[str] = None,
|
||||
user_agent: Optional[str] = None
|
||||
) -> DemoSession:
|
||||
"""
|
||||
Create a new demo session
|
||||
|
||||
Args:
|
||||
demo_account_type: 'individual_bakery' or 'central_baker'
|
||||
user_id: Optional user ID if authenticated
|
||||
ip_address: Client IP address
|
||||
user_agent: Client user agent
|
||||
|
||||
Returns:
|
||||
Created demo session
|
||||
"""
|
||||
logger.info("Creating demo session", demo_account_type=demo_account_type)
|
||||
|
||||
# Generate unique session ID
|
||||
session_id = f"demo_{secrets.token_urlsafe(16)}"
|
||||
|
||||
# Generate virtual tenant ID
|
||||
virtual_tenant_id = uuid.uuid4()
|
||||
|
||||
# Get base demo tenant ID from config
|
||||
demo_config = settings.DEMO_ACCOUNTS.get(demo_account_type)
|
||||
if not demo_config:
|
||||
raise ValueError(f"Invalid demo account type: {demo_account_type}")
|
||||
|
||||
# Create session record
|
||||
session = DemoSession(
|
||||
session_id=session_id,
|
||||
user_id=uuid.UUID(user_id) if user_id else None,
|
||||
ip_address=ip_address,
|
||||
user_agent=user_agent,
|
||||
base_demo_tenant_id=uuid.uuid4(), # Will be set by seeding script
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
demo_account_type=demo_account_type,
|
||||
status=DemoSessionStatus.ACTIVE,
|
||||
created_at=datetime.now(timezone.utc),
|
||||
expires_at=datetime.now(timezone.utc) + timedelta(
|
||||
minutes=settings.DEMO_SESSION_DURATION_MINUTES
|
||||
),
|
||||
last_activity_at=datetime.now(timezone.utc),
|
||||
data_cloned=False,
|
||||
redis_populated=False,
|
||||
metadata={
|
||||
"demo_config": demo_config,
|
||||
"extension_count": 0
|
||||
}
|
||||
)
|
||||
|
||||
self.db.add(session)
|
||||
await self.db.commit()
|
||||
await self.db.refresh(session)
|
||||
|
||||
# Store session metadata in Redis
|
||||
await self._store_session_metadata(session)
|
||||
|
||||
logger.info(
|
||||
"Demo session created",
|
||||
session_id=session_id,
|
||||
virtual_tenant_id=str(virtual_tenant_id),
|
||||
expires_at=session.expires_at.isoformat()
|
||||
)
|
||||
|
||||
return session
|
||||
|
||||
async def get_session(self, session_id: str) -> Optional[DemoSession]:
|
||||
"""Get session by session_id"""
|
||||
result = await self.db.execute(
|
||||
select(DemoSession).where(DemoSession.session_id == session_id)
|
||||
)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
async def get_session_by_virtual_tenant(self, virtual_tenant_id: str) -> Optional[DemoSession]:
|
||||
"""Get session by virtual tenant ID"""
|
||||
result = await self.db.execute(
|
||||
select(DemoSession).where(
|
||||
DemoSession.virtual_tenant_id == uuid.UUID(virtual_tenant_id)
|
||||
)
|
||||
)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
async def extend_session(self, session_id: str) -> DemoSession:
|
||||
"""
|
||||
Extend session expiration time
|
||||
|
||||
Args:
|
||||
session_id: Session ID to extend
|
||||
|
||||
Returns:
|
||||
Updated session
|
||||
|
||||
Raises:
|
||||
ValueError: If session cannot be extended
|
||||
"""
|
||||
session = await self.get_session(session_id)
|
||||
|
||||
if not session:
|
||||
raise ValueError(f"Session not found: {session_id}")
|
||||
|
||||
if session.status != DemoSessionStatus.ACTIVE:
|
||||
raise ValueError(f"Cannot extend {session.status.value} session")
|
||||
|
||||
# Check extension limit
|
||||
extension_count = session.metadata.get("extension_count", 0)
|
||||
if extension_count >= settings.DEMO_SESSION_MAX_EXTENSIONS:
|
||||
raise ValueError(f"Maximum extensions ({settings.DEMO_SESSION_MAX_EXTENSIONS}) reached")
|
||||
|
||||
# Extend expiration
|
||||
new_expires_at = datetime.now(timezone.utc) + timedelta(
|
||||
minutes=settings.DEMO_SESSION_DURATION_MINUTES
|
||||
)
|
||||
|
||||
session.expires_at = new_expires_at
|
||||
session.last_activity_at = datetime.now(timezone.utc)
|
||||
session.metadata["extension_count"] = extension_count + 1
|
||||
|
||||
await self.db.commit()
|
||||
await self.db.refresh(session)
|
||||
|
||||
# Extend Redis TTL
|
||||
await self.redis.extend_session_ttl(
|
||||
session_id,
|
||||
settings.REDIS_SESSION_TTL
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Session extended",
|
||||
session_id=session_id,
|
||||
new_expires_at=new_expires_at.isoformat(),
|
||||
extension_count=extension_count + 1
|
||||
)
|
||||
|
||||
return session
|
||||
|
||||
async def update_activity(self, session_id: str):
|
||||
"""Update last activity timestamp"""
|
||||
await self.db.execute(
|
||||
update(DemoSession)
|
||||
.where(DemoSession.session_id == session_id)
|
||||
.values(
|
||||
last_activity_at=datetime.now(timezone.utc),
|
||||
request_count=DemoSession.request_count + 1
|
||||
)
|
||||
)
|
||||
await self.db.commit()
|
||||
|
||||
async def mark_data_cloned(self, session_id: str):
|
||||
"""Mark session as having data cloned"""
|
||||
await self.db.execute(
|
||||
update(DemoSession)
|
||||
.where(DemoSession.session_id == session_id)
|
||||
.values(data_cloned=True)
|
||||
)
|
||||
await self.db.commit()
|
||||
|
||||
async def mark_redis_populated(self, session_id: str):
|
||||
"""Mark session as having Redis data populated"""
|
||||
await self.db.execute(
|
||||
update(DemoSession)
|
||||
.where(DemoSession.session_id == session_id)
|
||||
.values(redis_populated=True)
|
||||
)
|
||||
await self.db.commit()
|
||||
|
||||
async def destroy_session(self, session_id: str):
|
||||
"""
|
||||
Destroy a demo session and cleanup resources
|
||||
|
||||
Args:
|
||||
session_id: Session ID to destroy
|
||||
"""
|
||||
session = await self.get_session(session_id)
|
||||
|
||||
if not session:
|
||||
logger.warning("Session not found for destruction", session_id=session_id)
|
||||
return
|
||||
|
||||
# Update session status
|
||||
session.status = DemoSessionStatus.DESTROYED
|
||||
session.destroyed_at = datetime.now(timezone.utc)
|
||||
|
||||
await self.db.commit()
|
||||
|
||||
# Delete Redis data
|
||||
await self.redis.delete_session_data(session_id)
|
||||
|
||||
logger.info(
|
||||
"Session destroyed",
|
||||
session_id=session_id,
|
||||
virtual_tenant_id=str(session.virtual_tenant_id),
|
||||
duration_seconds=(
|
||||
session.destroyed_at - session.created_at
|
||||
).total_seconds()
|
||||
)
|
||||
|
||||
async def _store_session_metadata(self, session: DemoSession):
|
||||
"""Store session metadata in Redis"""
|
||||
await self.redis.set_session_data(
|
||||
session.session_id,
|
||||
"metadata",
|
||||
{
|
||||
"session_id": session.session_id,
|
||||
"virtual_tenant_id": str(session.virtual_tenant_id),
|
||||
"demo_account_type": session.demo_account_type,
|
||||
"expires_at": session.expires_at.isoformat(),
|
||||
"created_at": session.created_at.isoformat()
|
||||
},
|
||||
ttl=settings.REDIS_SESSION_TTL
|
||||
)
|
||||
|
||||
async def get_active_sessions_count(self) -> int:
|
||||
"""Get count of active sessions"""
|
||||
result = await self.db.execute(
|
||||
select(DemoSession).where(DemoSession.status == DemoSessionStatus.ACTIVE)
|
||||
)
|
||||
return len(result.scalars().all())
|
||||
|
||||
async def get_session_stats(self) -> Dict[str, Any]:
|
||||
"""Get session statistics"""
|
||||
result = await self.db.execute(select(DemoSession))
|
||||
all_sessions = result.scalars().all()
|
||||
|
||||
active_sessions = [s for s in all_sessions if s.status == DemoSessionStatus.ACTIVE]
|
||||
|
||||
return {
|
||||
"total_sessions": len(all_sessions),
|
||||
"active_sessions": len(active_sessions),
|
||||
"expired_sessions": len([s for s in all_sessions if s.status == DemoSessionStatus.EXPIRED]),
|
||||
"destroyed_sessions": len([s for s in all_sessions if s.status == DemoSessionStatus.DESTROYED]),
|
||||
"avg_duration_minutes": sum(
|
||||
(s.destroyed_at - s.created_at).total_seconds() / 60
|
||||
for s in all_sessions if s.destroyed_at
|
||||
) / max(len([s for s in all_sessions if s.destroyed_at]), 1),
|
||||
"total_requests": sum(s.request_count for s in all_sessions)
|
||||
}
|
||||
77
services/demo_session/migrations/env.py
Normal file
77
services/demo_session/migrations/env.py
Normal file
@@ -0,0 +1,77 @@
|
||||
"""Alembic environment for demo_session service"""
|
||||
|
||||
from logging.config import fileConfig
|
||||
from sqlalchemy import engine_from_config, pool
|
||||
from alembic import context
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
# Add service root to path for container environment
|
||||
service_root = Path(__file__).parent.parent
|
||||
sys.path.insert(0, str(service_root))
|
||||
|
||||
# Also add project root for local development
|
||||
project_root = Path(__file__).parent.parent.parent.parent
|
||||
sys.path.insert(0, str(project_root))
|
||||
|
||||
# Import models - try container path first, then dev path
|
||||
try:
|
||||
from app.models import *
|
||||
from shared.database.base import Base
|
||||
except ImportError:
|
||||
from services.demo_session.app.models import *
|
||||
from shared.database.base import Base
|
||||
|
||||
# this is the Alembic Config object
|
||||
config = context.config
|
||||
|
||||
# Set database URL from environment
|
||||
database_url = os.getenv("DEMO_SESSION_DATABASE_URL")
|
||||
if database_url:
|
||||
# Convert asyncpg URL to psycopg2 for synchronous migrations
|
||||
database_url = database_url.replace("postgresql+asyncpg://", "postgresql://")
|
||||
config.set_main_option("sqlalchemy.url", database_url)
|
||||
|
||||
# Interpret the config file for Python logging
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
target_metadata = Base.metadata
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode."""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode."""
|
||||
connectable = engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(
|
||||
connection=connection, target_metadata=target_metadata
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
24
services/demo_session/migrations/script.py.mako
Normal file
24
services/demo_session/migrations/script.py.mako
Normal file
@@ -0,0 +1,24 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = ${repr(up_revision)}
|
||||
down_revision = ${repr(down_revision)}
|
||||
branch_labels = ${repr(branch_labels)}
|
||||
depends_on = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
${downgrades if downgrades else "pass"}
|
||||
@@ -0,0 +1,64 @@
|
||||
"""initial_schema
|
||||
|
||||
Revision ID: a1b2c3d4e5f6
|
||||
Revises:
|
||||
Create Date: 2025-10-02 17:45:00.000000+02:00
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = 'a1b2c3d4e5f6'
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Create demo_sessions table
|
||||
op.create_table('demo_sessions',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('session_id', sa.String(length=100), nullable=False),
|
||||
sa.Column('user_id', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('ip_address', sa.String(length=45), nullable=True),
|
||||
sa.Column('user_agent', sa.String(length=500), nullable=True),
|
||||
sa.Column('base_demo_tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('virtual_tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('demo_account_type', sa.String(length=50), nullable=False),
|
||||
sa.Column('status', sa.Enum('active', 'expired', 'destroyed', name='demosessionstatus'), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('expires_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('last_activity_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('destroyed_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('request_count', sa.Integer(), nullable=True),
|
||||
sa.Column('data_cloned', sa.Boolean(), nullable=True),
|
||||
sa.Column('redis_populated', sa.Boolean(), nullable=True),
|
||||
sa.Column('session_metadata', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('session_id')
|
||||
)
|
||||
|
||||
# Create indexes
|
||||
op.create_index(op.f('ix_demo_sessions_session_id'), 'demo_sessions', ['session_id'], unique=False)
|
||||
op.create_index(op.f('ix_demo_sessions_base_demo_tenant_id'), 'demo_sessions', ['base_demo_tenant_id'], unique=False)
|
||||
op.create_index(op.f('ix_demo_sessions_virtual_tenant_id'), 'demo_sessions', ['virtual_tenant_id'], unique=False)
|
||||
op.create_index(op.f('ix_demo_sessions_status'), 'demo_sessions', ['status'], unique=False)
|
||||
op.create_index(op.f('ix_demo_sessions_created_at'), 'demo_sessions', ['created_at'], unique=False)
|
||||
op.create_index(op.f('ix_demo_sessions_expires_at'), 'demo_sessions', ['expires_at'], unique=False)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# Drop indexes
|
||||
op.drop_index(op.f('ix_demo_sessions_expires_at'), table_name='demo_sessions')
|
||||
op.drop_index(op.f('ix_demo_sessions_created_at'), table_name='demo_sessions')
|
||||
op.drop_index(op.f('ix_demo_sessions_status'), table_name='demo_sessions')
|
||||
op.drop_index(op.f('ix_demo_sessions_virtual_tenant_id'), table_name='demo_sessions')
|
||||
op.drop_index(op.f('ix_demo_sessions_base_demo_tenant_id'), table_name='demo_sessions')
|
||||
op.drop_index(op.f('ix_demo_sessions_session_id'), table_name='demo_sessions')
|
||||
|
||||
# Drop table (this will automatically drop the enum if it's only used here)
|
||||
op.drop_table('demo_sessions')
|
||||
13
services/demo_session/requirements.txt
Normal file
13
services/demo_session/requirements.txt
Normal file
@@ -0,0 +1,13 @@
|
||||
fastapi==0.104.1
|
||||
uvicorn[standard]==0.24.0
|
||||
sqlalchemy[asyncio]==2.0.23
|
||||
asyncpg==0.29.0
|
||||
psycopg2-binary==2.9.9
|
||||
alembic==1.12.1
|
||||
redis==5.0.1
|
||||
structlog==23.2.0
|
||||
pydantic==2.5.0
|
||||
pydantic-settings==2.1.0
|
||||
httpx==0.25.2
|
||||
PyJWT==2.8.0
|
||||
python-multipart==0.0.6
|
||||
@@ -41,13 +41,13 @@ async def create_enhanced_single_forecast(
|
||||
):
|
||||
"""Generate a single product forecast using enhanced repository pattern"""
|
||||
metrics = get_metrics_collector(request_obj)
|
||||
|
||||
|
||||
try:
|
||||
logger.info("Generating enhanced single forecast",
|
||||
tenant_id=tenant_id,
|
||||
inventory_product_id=request.inventory_product_id,
|
||||
forecast_date=request.forecast_date.isoformat())
|
||||
|
||||
|
||||
# Record metrics
|
||||
if metrics:
|
||||
metrics.increment_counter("enhanced_single_forecasts_total")
|
||||
@@ -163,13 +163,13 @@ async def create_enhanced_batch_forecast(
|
||||
):
|
||||
"""Generate batch forecasts using enhanced repository pattern"""
|
||||
metrics = get_metrics_collector(request_obj)
|
||||
|
||||
|
||||
try:
|
||||
logger.info("Generating enhanced batch forecasts",
|
||||
tenant_id=tenant_id,
|
||||
products_count=len(request.inventory_product_ids),
|
||||
forecast_dates_count=request.forecast_days)
|
||||
|
||||
|
||||
# Record metrics
|
||||
if metrics:
|
||||
metrics.increment_counter("enhanced_batch_forecasts_total")
|
||||
|
||||
@@ -11,6 +11,7 @@ import structlog
|
||||
from apscheduler.triggers.cron import CronTrigger
|
||||
|
||||
from shared.alerts.base_service import BaseAlertService, AlertServiceMixin
|
||||
from shared.database.base import create_database_manager
|
||||
from app.services.procurement_service import ProcurementService
|
||||
|
||||
logger = structlog.get_logger()
|
||||
@@ -204,10 +205,44 @@ class ProcurementSchedulerService(BaseAlertService, AlertServiceMixin):
|
||||
logger.error("💥 Stale plan cleanup failed", error=str(e))
|
||||
|
||||
async def get_active_tenants(self) -> List[UUID]:
|
||||
"""Get active tenants from tenant service or base implementation"""
|
||||
# Only use tenant service, no fallbacks
|
||||
"""Get active tenants from tenant service, excluding demo tenants"""
|
||||
try:
|
||||
return await super().get_active_tenants()
|
||||
all_tenants = await super().get_active_tenants()
|
||||
|
||||
# Filter out demo tenants
|
||||
from services.tenant.app.models.tenants import Tenant
|
||||
from sqlalchemy import select
|
||||
import os
|
||||
|
||||
tenant_db_url = os.getenv("TENANT_DATABASE_URL")
|
||||
if not tenant_db_url:
|
||||
logger.warning("TENANT_DATABASE_URL not set, returning all tenants")
|
||||
return all_tenants
|
||||
|
||||
tenant_db = create_database_manager(tenant_db_url, "tenant-filter")
|
||||
non_demo_tenants = []
|
||||
|
||||
async with tenant_db.get_session() as session:
|
||||
for tenant_id in all_tenants:
|
||||
result = await session.execute(
|
||||
select(Tenant).where(Tenant.id == tenant_id)
|
||||
)
|
||||
tenant = result.scalars().first()
|
||||
|
||||
# Only include non-demo tenants
|
||||
if tenant and not tenant.is_demo:
|
||||
non_demo_tenants.append(tenant_id)
|
||||
elif tenant and tenant.is_demo:
|
||||
logger.debug("Excluding demo tenant from procurement scheduler",
|
||||
tenant_id=str(tenant_id))
|
||||
|
||||
logger.info("Filtered demo tenants from procurement scheduling",
|
||||
total_tenants=len(all_tenants),
|
||||
non_demo_tenants=len(non_demo_tenants),
|
||||
demo_tenants_filtered=len(all_tenants) - len(non_demo_tenants))
|
||||
|
||||
return non_demo_tenants
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Could not fetch tenants from base service", error=str(e))
|
||||
return []
|
||||
|
||||
@@ -36,11 +36,18 @@ class Tenant(Base):
|
||||
# Status
|
||||
is_active = Column(Boolean, default=True)
|
||||
subscription_tier = Column(String(50), default="starter")
|
||||
|
||||
|
||||
# Demo account flags
|
||||
is_demo = Column(Boolean, default=False, index=True)
|
||||
is_demo_template = Column(Boolean, default=False, index=True)
|
||||
base_demo_tenant_id = Column(UUID(as_uuid=True), nullable=True, index=True)
|
||||
demo_session_id = Column(String(100), nullable=True, index=True)
|
||||
demo_expires_at = Column(DateTime(timezone=True), nullable=True)
|
||||
|
||||
# ML status
|
||||
ml_model_trained = Column(Boolean, default=False)
|
||||
last_training_date = Column(DateTime(timezone=True))
|
||||
|
||||
|
||||
# Ownership (user_id without FK - cross-service reference)
|
||||
owner_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
|
||||
|
||||
@@ -0,0 +1,48 @@
|
||||
"""add_demo_columns
|
||||
|
||||
Revision ID: 2a9b3c4d5e6f
|
||||
Revises: 1e8aebb4d9ce
|
||||
Create Date: 2025-10-02 17:00:00.000000+02:00
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '2a9b3c4d5e6f'
|
||||
down_revision: Union[str, None] = '1e8aebb4d9ce'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Add demo-related columns to tenants table
|
||||
op.add_column('tenants', sa.Column('is_demo', sa.Boolean(), nullable=False, server_default='false'))
|
||||
op.add_column('tenants', sa.Column('is_demo_template', sa.Boolean(), nullable=False, server_default='false'))
|
||||
op.add_column('tenants', sa.Column('base_demo_tenant_id', sa.UUID(), nullable=True))
|
||||
op.add_column('tenants', sa.Column('demo_session_id', sa.String(length=100), nullable=True))
|
||||
op.add_column('tenants', sa.Column('demo_expires_at', sa.DateTime(timezone=True), nullable=True))
|
||||
|
||||
# Create indexes for demo columns
|
||||
op.create_index(op.f('ix_tenants_is_demo'), 'tenants', ['is_demo'], unique=False)
|
||||
op.create_index(op.f('ix_tenants_is_demo_template'), 'tenants', ['is_demo_template'], unique=False)
|
||||
op.create_index(op.f('ix_tenants_base_demo_tenant_id'), 'tenants', ['base_demo_tenant_id'], unique=False)
|
||||
op.create_index(op.f('ix_tenants_demo_session_id'), 'tenants', ['demo_session_id'], unique=False)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# Drop indexes
|
||||
op.drop_index(op.f('ix_tenants_demo_session_id'), table_name='tenants')
|
||||
op.drop_index(op.f('ix_tenants_base_demo_tenant_id'), table_name='tenants')
|
||||
op.drop_index(op.f('ix_tenants_is_demo_template'), table_name='tenants')
|
||||
op.drop_index(op.f('ix_tenants_is_demo'), table_name='tenants')
|
||||
|
||||
# Drop columns
|
||||
op.drop_column('tenants', 'demo_expires_at')
|
||||
op.drop_column('tenants', 'demo_session_id')
|
||||
op.drop_column('tenants', 'base_demo_tenant_id')
|
||||
op.drop_column('tenants', 'is_demo_template')
|
||||
op.drop_column('tenants', 'is_demo')
|
||||
Reference in New Issue
Block a user