Add DEMO feature to the project
This commit is contained in:
7
services/demo_session/app/services/__init__.py
Normal file
7
services/demo_session/app/services/__init__.py
Normal file
@@ -0,0 +1,7 @@
|
||||
"""Demo Session Services"""
|
||||
|
||||
from .session_manager import DemoSessionManager
|
||||
from .data_cloner import DemoDataCloner
|
||||
from .cleanup_service import DemoCleanupService
|
||||
|
||||
__all__ = ["DemoSessionManager", "DemoDataCloner", "DemoCleanupService"]
|
||||
147
services/demo_session/app/services/cleanup_service.py
Normal file
147
services/demo_session/app/services/cleanup_service.py
Normal file
@@ -0,0 +1,147 @@
|
||||
"""
|
||||
Demo Cleanup Service
|
||||
Handles automatic cleanup of expired sessions
|
||||
"""
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, update
|
||||
from datetime import datetime, timezone
|
||||
from typing import List
|
||||
import structlog
|
||||
|
||||
from app.models import DemoSession, DemoSessionStatus
|
||||
from app.services.data_cloner import DemoDataCloner
|
||||
from app.core import RedisClient
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class DemoCleanupService:
|
||||
"""Handles cleanup of expired demo sessions"""
|
||||
|
||||
def __init__(self, db: AsyncSession, redis: RedisClient):
|
||||
self.db = db
|
||||
self.redis = redis
|
||||
self.data_cloner = DemoDataCloner(db, redis)
|
||||
|
||||
async def cleanup_expired_sessions(self) -> dict:
|
||||
"""
|
||||
Find and cleanup all expired sessions
|
||||
|
||||
Returns:
|
||||
Cleanup statistics
|
||||
"""
|
||||
logger.info("Starting demo session cleanup")
|
||||
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
# Find expired sessions
|
||||
result = await self.db.execute(
|
||||
select(DemoSession).where(
|
||||
DemoSession.status == DemoSessionStatus.ACTIVE,
|
||||
DemoSession.expires_at < now
|
||||
)
|
||||
)
|
||||
expired_sessions = result.scalars().all()
|
||||
|
||||
stats = {
|
||||
"total_expired": len(expired_sessions),
|
||||
"cleaned_up": 0,
|
||||
"failed": 0,
|
||||
"errors": []
|
||||
}
|
||||
|
||||
for session in expired_sessions:
|
||||
try:
|
||||
# Mark as expired
|
||||
session.status = DemoSessionStatus.EXPIRED
|
||||
await self.db.commit()
|
||||
|
||||
# Delete session data
|
||||
await self.data_cloner.delete_session_data(
|
||||
str(session.virtual_tenant_id),
|
||||
session.session_id
|
||||
)
|
||||
|
||||
stats["cleaned_up"] += 1
|
||||
|
||||
logger.info(
|
||||
"Session cleaned up",
|
||||
session_id=session.session_id,
|
||||
age_minutes=(now - session.created_at).total_seconds() / 60
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
stats["failed"] += 1
|
||||
stats["errors"].append({
|
||||
"session_id": session.session_id,
|
||||
"error": str(e)
|
||||
})
|
||||
logger.error(
|
||||
"Failed to cleanup session",
|
||||
session_id=session.session_id,
|
||||
error=str(e)
|
||||
)
|
||||
|
||||
logger.info("Demo session cleanup completed", stats=stats)
|
||||
return stats
|
||||
|
||||
async def cleanup_old_destroyed_sessions(self, days: int = 7) -> int:
|
||||
"""
|
||||
Delete destroyed session records older than specified days
|
||||
|
||||
Args:
|
||||
days: Number of days to keep destroyed sessions
|
||||
|
||||
Returns:
|
||||
Number of deleted records
|
||||
"""
|
||||
from datetime import timedelta
|
||||
|
||||
cutoff_date = datetime.now(timezone.utc) - timedelta(days=days)
|
||||
|
||||
result = await self.db.execute(
|
||||
select(DemoSession).where(
|
||||
DemoSession.status == DemoSessionStatus.DESTROYED,
|
||||
DemoSession.destroyed_at < cutoff_date
|
||||
)
|
||||
)
|
||||
old_sessions = result.scalars().all()
|
||||
|
||||
for session in old_sessions:
|
||||
await self.db.delete(session)
|
||||
|
||||
await self.db.commit()
|
||||
|
||||
logger.info(
|
||||
"Old destroyed sessions deleted",
|
||||
count=len(old_sessions),
|
||||
older_than_days=days
|
||||
)
|
||||
|
||||
return len(old_sessions)
|
||||
|
||||
async def get_cleanup_stats(self) -> dict:
|
||||
"""Get cleanup statistics"""
|
||||
result = await self.db.execute(select(DemoSession))
|
||||
all_sessions = result.scalars().all()
|
||||
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
active_count = len([s for s in all_sessions if s.status == DemoSessionStatus.ACTIVE])
|
||||
expired_count = len([s for s in all_sessions if s.status == DemoSessionStatus.EXPIRED])
|
||||
destroyed_count = len([s for s in all_sessions if s.status == DemoSessionStatus.DESTROYED])
|
||||
|
||||
# Find sessions that should be expired but aren't marked yet
|
||||
should_be_expired = len([
|
||||
s for s in all_sessions
|
||||
if s.status == DemoSessionStatus.ACTIVE and s.expires_at < now
|
||||
])
|
||||
|
||||
return {
|
||||
"total_sessions": len(all_sessions),
|
||||
"active_sessions": active_count,
|
||||
"expired_sessions": expired_count,
|
||||
"destroyed_sessions": destroyed_count,
|
||||
"pending_cleanup": should_be_expired
|
||||
}
|
||||
288
services/demo_session/app/services/data_cloner.py
Normal file
288
services/demo_session/app/services/data_cloner.py
Normal file
@@ -0,0 +1,288 @@
|
||||
"""
|
||||
Demo Data Cloner
|
||||
Clones base demo data to session-specific virtual tenants
|
||||
"""
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from typing import Dict, Any, List
|
||||
import httpx
|
||||
import structlog
|
||||
import uuid
|
||||
|
||||
from app.core import RedisClient, settings
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class DemoDataCloner:
|
||||
"""Clones demo data for isolated sessions"""
|
||||
|
||||
def __init__(self, db: AsyncSession, redis: RedisClient):
|
||||
self.db = db
|
||||
self.redis = redis
|
||||
|
||||
async def clone_tenant_data(
|
||||
self,
|
||||
session_id: str,
|
||||
base_demo_tenant_id: str,
|
||||
virtual_tenant_id: str,
|
||||
demo_account_type: str
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Clone all demo data from base tenant to virtual tenant
|
||||
|
||||
Args:
|
||||
session_id: Session ID
|
||||
base_demo_tenant_id: Base demo tenant UUID
|
||||
virtual_tenant_id: Virtual tenant UUID for this session
|
||||
demo_account_type: Type of demo account
|
||||
|
||||
Returns:
|
||||
Cloning statistics
|
||||
"""
|
||||
logger.info(
|
||||
"Starting data cloning",
|
||||
session_id=session_id,
|
||||
base_demo_tenant_id=base_demo_tenant_id,
|
||||
virtual_tenant_id=virtual_tenant_id
|
||||
)
|
||||
|
||||
stats = {
|
||||
"session_id": session_id,
|
||||
"services_cloned": [],
|
||||
"total_records": 0,
|
||||
"redis_keys": 0
|
||||
}
|
||||
|
||||
# Clone data from each service based on demo account type
|
||||
services_to_clone = self._get_services_for_demo_type(demo_account_type)
|
||||
|
||||
for service_name in services_to_clone:
|
||||
try:
|
||||
service_stats = await self._clone_service_data(
|
||||
service_name,
|
||||
base_demo_tenant_id,
|
||||
virtual_tenant_id,
|
||||
session_id
|
||||
)
|
||||
stats["services_cloned"].append(service_name)
|
||||
stats["total_records"] += service_stats.get("records_cloned", 0)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to clone service data",
|
||||
service=service_name,
|
||||
error=str(e)
|
||||
)
|
||||
|
||||
# Populate Redis cache with hot data
|
||||
redis_stats = await self._populate_redis_cache(
|
||||
session_id,
|
||||
virtual_tenant_id,
|
||||
demo_account_type
|
||||
)
|
||||
stats["redis_keys"] = redis_stats.get("keys_created", 0)
|
||||
|
||||
logger.info(
|
||||
"Data cloning completed",
|
||||
session_id=session_id,
|
||||
stats=stats
|
||||
)
|
||||
|
||||
return stats
|
||||
|
||||
def _get_services_for_demo_type(self, demo_account_type: str) -> List[str]:
|
||||
"""Get list of services to clone based on demo type"""
|
||||
base_services = ["inventory", "sales", "orders", "pos"]
|
||||
|
||||
if demo_account_type == "individual_bakery":
|
||||
# Individual bakery has production, recipes
|
||||
return base_services + ["recipes", "production"]
|
||||
elif demo_account_type == "central_baker":
|
||||
# Central baker satellite has suppliers
|
||||
return base_services + ["suppliers"]
|
||||
else:
|
||||
return base_services
|
||||
|
||||
async def _clone_service_data(
|
||||
self,
|
||||
service_name: str,
|
||||
base_tenant_id: str,
|
||||
virtual_tenant_id: str,
|
||||
session_id: str
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Clone data for a specific service
|
||||
|
||||
Args:
|
||||
service_name: Name of the service
|
||||
base_tenant_id: Source tenant ID
|
||||
virtual_tenant_id: Target tenant ID
|
||||
session_id: Session ID
|
||||
|
||||
Returns:
|
||||
Cloning statistics
|
||||
"""
|
||||
service_url = self._get_service_url(service_name)
|
||||
|
||||
async with httpx.AsyncClient(timeout=30.0) as client:
|
||||
response = await client.post(
|
||||
f"{service_url}/internal/demo/clone",
|
||||
json={
|
||||
"base_tenant_id": base_tenant_id,
|
||||
"virtual_tenant_id": virtual_tenant_id,
|
||||
"session_id": session_id
|
||||
},
|
||||
headers={"X-Internal-Service": "demo-session"}
|
||||
)
|
||||
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
async def _populate_redis_cache(
|
||||
self,
|
||||
session_id: str,
|
||||
virtual_tenant_id: str,
|
||||
demo_account_type: str
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Populate Redis with frequently accessed data
|
||||
|
||||
Args:
|
||||
session_id: Session ID
|
||||
virtual_tenant_id: Virtual tenant ID
|
||||
demo_account_type: Demo account type
|
||||
|
||||
Returns:
|
||||
Statistics about cached data
|
||||
"""
|
||||
logger.info("Populating Redis cache", session_id=session_id)
|
||||
|
||||
keys_created = 0
|
||||
|
||||
# Cache inventory data (hot data)
|
||||
try:
|
||||
inventory_data = await self._fetch_inventory_data(virtual_tenant_id)
|
||||
await self.redis.set_session_data(
|
||||
session_id,
|
||||
"inventory",
|
||||
inventory_data,
|
||||
ttl=settings.REDIS_SESSION_TTL
|
||||
)
|
||||
keys_created += 1
|
||||
except Exception as e:
|
||||
logger.error("Failed to cache inventory", error=str(e))
|
||||
|
||||
# Cache POS data
|
||||
try:
|
||||
pos_data = await self._fetch_pos_data(virtual_tenant_id)
|
||||
await self.redis.set_session_data(
|
||||
session_id,
|
||||
"pos",
|
||||
pos_data,
|
||||
ttl=settings.REDIS_SESSION_TTL
|
||||
)
|
||||
keys_created += 1
|
||||
except Exception as e:
|
||||
logger.error("Failed to cache POS data", error=str(e))
|
||||
|
||||
# Cache recent sales
|
||||
try:
|
||||
sales_data = await self._fetch_recent_sales(virtual_tenant_id)
|
||||
await self.redis.set_session_data(
|
||||
session_id,
|
||||
"recent_sales",
|
||||
sales_data,
|
||||
ttl=settings.REDIS_SESSION_TTL
|
||||
)
|
||||
keys_created += 1
|
||||
except Exception as e:
|
||||
logger.error("Failed to cache sales", error=str(e))
|
||||
|
||||
return {"keys_created": keys_created}
|
||||
|
||||
async def _fetch_inventory_data(self, tenant_id: str) -> Dict[str, Any]:
|
||||
"""Fetch inventory data for caching"""
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.get(
|
||||
f"{settings.INVENTORY_SERVICE_URL}/api/inventory/summary",
|
||||
headers={"X-Tenant-Id": tenant_id}
|
||||
)
|
||||
return response.json()
|
||||
|
||||
async def _fetch_pos_data(self, tenant_id: str) -> Dict[str, Any]:
|
||||
"""Fetch POS data for caching"""
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.get(
|
||||
f"{settings.POS_SERVICE_URL}/api/pos/current-session",
|
||||
headers={"X-Tenant-Id": tenant_id}
|
||||
)
|
||||
return response.json()
|
||||
|
||||
async def _fetch_recent_sales(self, tenant_id: str) -> Dict[str, Any]:
|
||||
"""Fetch recent sales for caching"""
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.get(
|
||||
f"{settings.SALES_SERVICE_URL}/api/sales/recent?limit=50",
|
||||
headers={"X-Tenant-Id": tenant_id}
|
||||
)
|
||||
return response.json()
|
||||
|
||||
def _get_service_url(self, service_name: str) -> str:
|
||||
"""Get service URL from settings"""
|
||||
url_map = {
|
||||
"inventory": settings.INVENTORY_SERVICE_URL,
|
||||
"recipes": settings.RECIPES_SERVICE_URL,
|
||||
"sales": settings.SALES_SERVICE_URL,
|
||||
"orders": settings.ORDERS_SERVICE_URL,
|
||||
"production": settings.PRODUCTION_SERVICE_URL,
|
||||
"suppliers": settings.SUPPLIERS_SERVICE_URL,
|
||||
"pos": settings.SALES_SERVICE_URL,
|
||||
}
|
||||
return url_map.get(service_name, "")
|
||||
|
||||
async def delete_session_data(
|
||||
self,
|
||||
virtual_tenant_id: str,
|
||||
session_id: str
|
||||
):
|
||||
"""
|
||||
Delete all data for a session
|
||||
|
||||
Args:
|
||||
virtual_tenant_id: Virtual tenant ID to delete
|
||||
session_id: Session ID
|
||||
"""
|
||||
logger.info(
|
||||
"Deleting session data",
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
session_id=session_id
|
||||
)
|
||||
|
||||
# Delete from each service
|
||||
services = ["inventory", "recipes", "sales", "orders", "production", "suppliers", "pos"]
|
||||
|
||||
for service_name in services:
|
||||
try:
|
||||
await self._delete_service_data(service_name, virtual_tenant_id)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to delete service data",
|
||||
service=service_name,
|
||||
error=str(e)
|
||||
)
|
||||
|
||||
# Delete from Redis
|
||||
await self.redis.delete_session_data(session_id)
|
||||
|
||||
logger.info("Session data deleted", virtual_tenant_id=virtual_tenant_id)
|
||||
|
||||
async def _delete_service_data(self, service_name: str, virtual_tenant_id: str):
|
||||
"""Delete data from a specific service"""
|
||||
service_url = self._get_service_url(service_name)
|
||||
|
||||
async with httpx.AsyncClient(timeout=30.0) as client:
|
||||
await client.delete(
|
||||
f"{service_url}/internal/demo/tenant/{virtual_tenant_id}",
|
||||
headers={"X-Internal-Service": "demo-session"}
|
||||
)
|
||||
166
services/demo_session/app/services/k8s_job_cloner.py
Normal file
166
services/demo_session/app/services/k8s_job_cloner.py
Normal file
@@ -0,0 +1,166 @@
|
||||
"""
|
||||
Kubernetes Job-based Demo Data Cloner
|
||||
Triggers a K8s Job to clone demo data at the database level
|
||||
"""
|
||||
|
||||
import httpx
|
||||
import structlog
|
||||
from typing import Dict, Any
|
||||
import os
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class K8sJobCloner:
|
||||
"""Triggers Kubernetes Jobs to clone demo data"""
|
||||
|
||||
def __init__(self):
|
||||
self.k8s_api_url = os.getenv("KUBERNETES_SERVICE_HOST")
|
||||
self.namespace = os.getenv("POD_NAMESPACE", "bakery-ia")
|
||||
self.clone_job_image = os.getenv("CLONE_JOB_IMAGE", "bakery/inventory-service:latest")
|
||||
# Service account token for K8s API access
|
||||
with open("/var/run/secrets/kubernetes.io/serviceaccount/token", "r") as f:
|
||||
self.token = f.read()
|
||||
|
||||
async def clone_tenant_data(
|
||||
self,
|
||||
session_id: str,
|
||||
base_demo_tenant_id: str,
|
||||
virtual_tenant_id: str,
|
||||
demo_account_type: str
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Clone demo data by creating a Kubernetes Job
|
||||
|
||||
Args:
|
||||
session_id: Session ID
|
||||
base_demo_tenant_id: Base demo tenant UUID (not used in job approach)
|
||||
virtual_tenant_id: Virtual tenant UUID for this session
|
||||
demo_account_type: Type of demo account
|
||||
|
||||
Returns:
|
||||
Job creation status
|
||||
"""
|
||||
logger.info(
|
||||
"Triggering demo data cloning job",
|
||||
session_id=session_id,
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
demo_account_type=demo_account_type,
|
||||
clone_image=self.clone_job_image
|
||||
)
|
||||
|
||||
job_name = f"demo-clone-{virtual_tenant_id[:8]}"
|
||||
|
||||
# Create Job manifest
|
||||
job_manifest = {
|
||||
"apiVersion": "batch/v1",
|
||||
"kind": "Job",
|
||||
"metadata": {
|
||||
"name": job_name,
|
||||
"namespace": self.namespace,
|
||||
"labels": {
|
||||
"app": "demo-clone",
|
||||
"session-id": session_id,
|
||||
"component": "runtime"
|
||||
}
|
||||
},
|
||||
"spec": {
|
||||
"ttlSecondsAfterFinished": 3600,
|
||||
"backoffLimit": 2,
|
||||
"template": {
|
||||
"metadata": {
|
||||
"labels": {"app": "demo-clone"}
|
||||
},
|
||||
"spec": {
|
||||
"restartPolicy": "Never",
|
||||
"containers": [{
|
||||
"name": "clone-data",
|
||||
"image": self.clone_job_image, # Configured via environment variable
|
||||
"imagePullPolicy": "IfNotPresent", # Don't pull if image exists locally
|
||||
"command": ["python", "/app/scripts/demo/clone_demo_tenant.py"],
|
||||
"env": [
|
||||
{"name": "VIRTUAL_TENANT_ID", "value": virtual_tenant_id},
|
||||
{"name": "DEMO_ACCOUNT_TYPE", "value": demo_account_type},
|
||||
{
|
||||
"name": "INVENTORY_DATABASE_URL",
|
||||
"valueFrom": {
|
||||
"secretKeyRef": {
|
||||
"name": "database-secrets",
|
||||
"key": "INVENTORY_DATABASE_URL"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "SALES_DATABASE_URL",
|
||||
"valueFrom": {
|
||||
"secretKeyRef": {
|
||||
"name": "database-secrets",
|
||||
"key": "SALES_DATABASE_URL"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "ORDERS_DATABASE_URL",
|
||||
"valueFrom": {
|
||||
"secretKeyRef": {
|
||||
"name": "database-secrets",
|
||||
"key": "ORDERS_DATABASE_URL"
|
||||
}
|
||||
}
|
||||
},
|
||||
{"name": "LOG_LEVEL", "value": "INFO"}
|
||||
],
|
||||
"resources": {
|
||||
"requests": {"memory": "256Mi", "cpu": "100m"},
|
||||
"limits": {"memory": "512Mi", "cpu": "500m"}
|
||||
}
|
||||
}]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
try:
|
||||
# Create the Job via K8s API
|
||||
async with httpx.AsyncClient(verify=False, timeout=30.0) as client:
|
||||
response = await client.post(
|
||||
f"https://{self.k8s_api_url}/apis/batch/v1/namespaces/{self.namespace}/jobs",
|
||||
json=job_manifest,
|
||||
headers={
|
||||
"Authorization": f"Bearer {self.token}",
|
||||
"Content-Type": "application/json"
|
||||
}
|
||||
)
|
||||
|
||||
if response.status_code == 201:
|
||||
logger.info(
|
||||
"Demo clone job created successfully",
|
||||
job_name=job_name,
|
||||
session_id=session_id
|
||||
)
|
||||
return {
|
||||
"success": True,
|
||||
"job_name": job_name,
|
||||
"method": "kubernetes_job"
|
||||
}
|
||||
else:
|
||||
logger.error(
|
||||
"Failed to create demo clone job",
|
||||
status_code=response.status_code,
|
||||
response=response.text
|
||||
)
|
||||
return {
|
||||
"success": False,
|
||||
"error": f"K8s API returned {response.status_code}"
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error creating demo clone job",
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
return {
|
||||
"success": False,
|
||||
"error": str(e)
|
||||
}
|
||||
267
services/demo_session/app/services/session_manager.py
Normal file
267
services/demo_session/app/services/session_manager.py
Normal file
@@ -0,0 +1,267 @@
|
||||
"""
|
||||
Demo Session Manager
|
||||
Handles creation, extension, and destruction of demo sessions
|
||||
"""
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, update
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Optional, Dict, Any
|
||||
import uuid
|
||||
import secrets
|
||||
import structlog
|
||||
|
||||
from app.models import DemoSession, DemoSessionStatus
|
||||
from app.core import RedisClient, settings
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class DemoSessionManager:
|
||||
"""Manages demo session lifecycle"""
|
||||
|
||||
def __init__(self, db: AsyncSession, redis: RedisClient):
|
||||
self.db = db
|
||||
self.redis = redis
|
||||
|
||||
async def create_session(
|
||||
self,
|
||||
demo_account_type: str,
|
||||
user_id: Optional[str] = None,
|
||||
ip_address: Optional[str] = None,
|
||||
user_agent: Optional[str] = None
|
||||
) -> DemoSession:
|
||||
"""
|
||||
Create a new demo session
|
||||
|
||||
Args:
|
||||
demo_account_type: 'individual_bakery' or 'central_baker'
|
||||
user_id: Optional user ID if authenticated
|
||||
ip_address: Client IP address
|
||||
user_agent: Client user agent
|
||||
|
||||
Returns:
|
||||
Created demo session
|
||||
"""
|
||||
logger.info("Creating demo session", demo_account_type=demo_account_type)
|
||||
|
||||
# Generate unique session ID
|
||||
session_id = f"demo_{secrets.token_urlsafe(16)}"
|
||||
|
||||
# Generate virtual tenant ID
|
||||
virtual_tenant_id = uuid.uuid4()
|
||||
|
||||
# Get base demo tenant ID from config
|
||||
demo_config = settings.DEMO_ACCOUNTS.get(demo_account_type)
|
||||
if not demo_config:
|
||||
raise ValueError(f"Invalid demo account type: {demo_account_type}")
|
||||
|
||||
# Create session record
|
||||
session = DemoSession(
|
||||
session_id=session_id,
|
||||
user_id=uuid.UUID(user_id) if user_id else None,
|
||||
ip_address=ip_address,
|
||||
user_agent=user_agent,
|
||||
base_demo_tenant_id=uuid.uuid4(), # Will be set by seeding script
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
demo_account_type=demo_account_type,
|
||||
status=DemoSessionStatus.ACTIVE,
|
||||
created_at=datetime.now(timezone.utc),
|
||||
expires_at=datetime.now(timezone.utc) + timedelta(
|
||||
minutes=settings.DEMO_SESSION_DURATION_MINUTES
|
||||
),
|
||||
last_activity_at=datetime.now(timezone.utc),
|
||||
data_cloned=False,
|
||||
redis_populated=False,
|
||||
metadata={
|
||||
"demo_config": demo_config,
|
||||
"extension_count": 0
|
||||
}
|
||||
)
|
||||
|
||||
self.db.add(session)
|
||||
await self.db.commit()
|
||||
await self.db.refresh(session)
|
||||
|
||||
# Store session metadata in Redis
|
||||
await self._store_session_metadata(session)
|
||||
|
||||
logger.info(
|
||||
"Demo session created",
|
||||
session_id=session_id,
|
||||
virtual_tenant_id=str(virtual_tenant_id),
|
||||
expires_at=session.expires_at.isoformat()
|
||||
)
|
||||
|
||||
return session
|
||||
|
||||
async def get_session(self, session_id: str) -> Optional[DemoSession]:
|
||||
"""Get session by session_id"""
|
||||
result = await self.db.execute(
|
||||
select(DemoSession).where(DemoSession.session_id == session_id)
|
||||
)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
async def get_session_by_virtual_tenant(self, virtual_tenant_id: str) -> Optional[DemoSession]:
|
||||
"""Get session by virtual tenant ID"""
|
||||
result = await self.db.execute(
|
||||
select(DemoSession).where(
|
||||
DemoSession.virtual_tenant_id == uuid.UUID(virtual_tenant_id)
|
||||
)
|
||||
)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
async def extend_session(self, session_id: str) -> DemoSession:
|
||||
"""
|
||||
Extend session expiration time
|
||||
|
||||
Args:
|
||||
session_id: Session ID to extend
|
||||
|
||||
Returns:
|
||||
Updated session
|
||||
|
||||
Raises:
|
||||
ValueError: If session cannot be extended
|
||||
"""
|
||||
session = await self.get_session(session_id)
|
||||
|
||||
if not session:
|
||||
raise ValueError(f"Session not found: {session_id}")
|
||||
|
||||
if session.status != DemoSessionStatus.ACTIVE:
|
||||
raise ValueError(f"Cannot extend {session.status.value} session")
|
||||
|
||||
# Check extension limit
|
||||
extension_count = session.metadata.get("extension_count", 0)
|
||||
if extension_count >= settings.DEMO_SESSION_MAX_EXTENSIONS:
|
||||
raise ValueError(f"Maximum extensions ({settings.DEMO_SESSION_MAX_EXTENSIONS}) reached")
|
||||
|
||||
# Extend expiration
|
||||
new_expires_at = datetime.now(timezone.utc) + timedelta(
|
||||
minutes=settings.DEMO_SESSION_DURATION_MINUTES
|
||||
)
|
||||
|
||||
session.expires_at = new_expires_at
|
||||
session.last_activity_at = datetime.now(timezone.utc)
|
||||
session.metadata["extension_count"] = extension_count + 1
|
||||
|
||||
await self.db.commit()
|
||||
await self.db.refresh(session)
|
||||
|
||||
# Extend Redis TTL
|
||||
await self.redis.extend_session_ttl(
|
||||
session_id,
|
||||
settings.REDIS_SESSION_TTL
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Session extended",
|
||||
session_id=session_id,
|
||||
new_expires_at=new_expires_at.isoformat(),
|
||||
extension_count=extension_count + 1
|
||||
)
|
||||
|
||||
return session
|
||||
|
||||
async def update_activity(self, session_id: str):
|
||||
"""Update last activity timestamp"""
|
||||
await self.db.execute(
|
||||
update(DemoSession)
|
||||
.where(DemoSession.session_id == session_id)
|
||||
.values(
|
||||
last_activity_at=datetime.now(timezone.utc),
|
||||
request_count=DemoSession.request_count + 1
|
||||
)
|
||||
)
|
||||
await self.db.commit()
|
||||
|
||||
async def mark_data_cloned(self, session_id: str):
|
||||
"""Mark session as having data cloned"""
|
||||
await self.db.execute(
|
||||
update(DemoSession)
|
||||
.where(DemoSession.session_id == session_id)
|
||||
.values(data_cloned=True)
|
||||
)
|
||||
await self.db.commit()
|
||||
|
||||
async def mark_redis_populated(self, session_id: str):
|
||||
"""Mark session as having Redis data populated"""
|
||||
await self.db.execute(
|
||||
update(DemoSession)
|
||||
.where(DemoSession.session_id == session_id)
|
||||
.values(redis_populated=True)
|
||||
)
|
||||
await self.db.commit()
|
||||
|
||||
async def destroy_session(self, session_id: str):
|
||||
"""
|
||||
Destroy a demo session and cleanup resources
|
||||
|
||||
Args:
|
||||
session_id: Session ID to destroy
|
||||
"""
|
||||
session = await self.get_session(session_id)
|
||||
|
||||
if not session:
|
||||
logger.warning("Session not found for destruction", session_id=session_id)
|
||||
return
|
||||
|
||||
# Update session status
|
||||
session.status = DemoSessionStatus.DESTROYED
|
||||
session.destroyed_at = datetime.now(timezone.utc)
|
||||
|
||||
await self.db.commit()
|
||||
|
||||
# Delete Redis data
|
||||
await self.redis.delete_session_data(session_id)
|
||||
|
||||
logger.info(
|
||||
"Session destroyed",
|
||||
session_id=session_id,
|
||||
virtual_tenant_id=str(session.virtual_tenant_id),
|
||||
duration_seconds=(
|
||||
session.destroyed_at - session.created_at
|
||||
).total_seconds()
|
||||
)
|
||||
|
||||
async def _store_session_metadata(self, session: DemoSession):
|
||||
"""Store session metadata in Redis"""
|
||||
await self.redis.set_session_data(
|
||||
session.session_id,
|
||||
"metadata",
|
||||
{
|
||||
"session_id": session.session_id,
|
||||
"virtual_tenant_id": str(session.virtual_tenant_id),
|
||||
"demo_account_type": session.demo_account_type,
|
||||
"expires_at": session.expires_at.isoformat(),
|
||||
"created_at": session.created_at.isoformat()
|
||||
},
|
||||
ttl=settings.REDIS_SESSION_TTL
|
||||
)
|
||||
|
||||
async def get_active_sessions_count(self) -> int:
|
||||
"""Get count of active sessions"""
|
||||
result = await self.db.execute(
|
||||
select(DemoSession).where(DemoSession.status == DemoSessionStatus.ACTIVE)
|
||||
)
|
||||
return len(result.scalars().all())
|
||||
|
||||
async def get_session_stats(self) -> Dict[str, Any]:
|
||||
"""Get session statistics"""
|
||||
result = await self.db.execute(select(DemoSession))
|
||||
all_sessions = result.scalars().all()
|
||||
|
||||
active_sessions = [s for s in all_sessions if s.status == DemoSessionStatus.ACTIVE]
|
||||
|
||||
return {
|
||||
"total_sessions": len(all_sessions),
|
||||
"active_sessions": len(active_sessions),
|
||||
"expired_sessions": len([s for s in all_sessions if s.status == DemoSessionStatus.EXPIRED]),
|
||||
"destroyed_sessions": len([s for s in all_sessions if s.status == DemoSessionStatus.DESTROYED]),
|
||||
"avg_duration_minutes": sum(
|
||||
(s.destroyed_at - s.created_at).total_seconds() / 60
|
||||
for s in all_sessions if s.destroyed_at
|
||||
) / max(len([s for s in all_sessions if s.destroyed_at]), 1),
|
||||
"total_requests": sum(s.request_count for s in all_sessions)
|
||||
}
|
||||
Reference in New Issue
Block a user