357 lines
12 KiB
Python
357 lines
12 KiB
Python
"""
|
|
Demo Data Cloner
|
|
Clones base demo data to session-specific virtual tenants
|
|
"""
|
|
|
|
from sqlalchemy.ext.asyncio import AsyncSession
|
|
from typing import Dict, Any, List, Optional
|
|
import httpx
|
|
import structlog
|
|
import uuid
|
|
import os
|
|
import asyncio
|
|
|
|
from app.core.redis_wrapper import DemoRedisWrapper
|
|
from app.core import settings
|
|
|
|
logger = structlog.get_logger()
|
|
|
|
|
|
class DemoDataCloner:
|
|
"""Clones demo data for isolated sessions"""
|
|
|
|
def __init__(self, db: AsyncSession, redis: DemoRedisWrapper):
|
|
self.db = db
|
|
self.redis = redis
|
|
self._http_client: Optional[httpx.AsyncClient] = None
|
|
|
|
async def get_http_client(self) -> httpx.AsyncClient:
|
|
"""Get or create shared HTTP client with connection pooling"""
|
|
if self._http_client is None:
|
|
self._http_client = httpx.AsyncClient(
|
|
timeout=httpx.Timeout(30.0, connect_timeout=10.0),
|
|
limits=httpx.Limits(
|
|
max_connections=20,
|
|
max_keepalive_connections=10,
|
|
keepalive_expiry=30.0
|
|
)
|
|
)
|
|
return self._http_client
|
|
|
|
async def close(self):
|
|
"""Close HTTP client on cleanup"""
|
|
if self._http_client:
|
|
await self._http_client.aclose()
|
|
self._http_client = None
|
|
|
|
async def clone_tenant_data(
|
|
self,
|
|
session_id: str,
|
|
base_demo_tenant_id: str,
|
|
virtual_tenant_id: str,
|
|
demo_account_type: str
|
|
) -> Dict[str, Any]:
|
|
"""
|
|
Clone all demo data from base tenant to virtual tenant
|
|
|
|
Args:
|
|
session_id: Session ID
|
|
base_demo_tenant_id: Base demo tenant UUID
|
|
virtual_tenant_id: Virtual tenant UUID for this session
|
|
demo_account_type: Type of demo account
|
|
|
|
Returns:
|
|
Cloning statistics
|
|
"""
|
|
logger.info(
|
|
"Starting data cloning",
|
|
session_id=session_id,
|
|
base_demo_tenant_id=base_demo_tenant_id,
|
|
virtual_tenant_id=virtual_tenant_id
|
|
)
|
|
|
|
stats = {
|
|
"session_id": session_id,
|
|
"services_cloned": [],
|
|
"total_records": 0,
|
|
"redis_keys": 0
|
|
}
|
|
|
|
# Clone data from each service based on demo account type
|
|
services_to_clone = self._get_services_for_demo_type(demo_account_type)
|
|
|
|
for service_name in services_to_clone:
|
|
try:
|
|
service_stats = await self._clone_service_data(
|
|
service_name,
|
|
base_demo_tenant_id,
|
|
virtual_tenant_id,
|
|
session_id,
|
|
demo_account_type
|
|
)
|
|
stats["services_cloned"].append(service_name)
|
|
stats["total_records"] += service_stats.get("records_cloned", 0)
|
|
|
|
except Exception as e:
|
|
logger.error(
|
|
"Failed to clone service data",
|
|
service=service_name,
|
|
error=str(e)
|
|
)
|
|
|
|
# Populate Redis cache with hot data
|
|
redis_stats = await self._populate_redis_cache(
|
|
session_id,
|
|
virtual_tenant_id,
|
|
demo_account_type
|
|
)
|
|
stats["redis_keys"] = redis_stats.get("keys_created", 0)
|
|
|
|
logger.info(
|
|
"Data cloning completed",
|
|
session_id=session_id,
|
|
stats=stats
|
|
)
|
|
|
|
return stats
|
|
|
|
def _get_services_for_demo_type(self, demo_account_type: str) -> List[str]:
|
|
"""Get list of services to clone based on demo type"""
|
|
base_services = ["inventory", "sales", "orders", "pos"]
|
|
|
|
if demo_account_type == "professional":
|
|
# Professional has production, recipes, suppliers, and procurement
|
|
return base_services + ["recipes", "production", "suppliers", "procurement", "alert_processor"]
|
|
elif demo_account_type == "enterprise":
|
|
# Enterprise has suppliers, procurement, and distribution (for parent-child network)
|
|
return base_services + ["suppliers", "procurement", "distribution", "alert_processor"]
|
|
else:
|
|
# Basic tenant has suppliers and procurement
|
|
return base_services + ["suppliers", "procurement", "distribution", "alert_processor"]
|
|
|
|
async def _clone_service_data(
|
|
self,
|
|
service_name: str,
|
|
base_tenant_id: str,
|
|
virtual_tenant_id: str,
|
|
session_id: str,
|
|
demo_account_type: str
|
|
) -> Dict[str, Any]:
|
|
"""
|
|
Clone data for a specific service
|
|
|
|
Args:
|
|
service_name: Name of the service
|
|
base_tenant_id: Source tenant ID
|
|
virtual_tenant_id: Target tenant ID
|
|
session_id: Session ID
|
|
demo_account_type: Type of demo account
|
|
|
|
Returns:
|
|
Cloning statistics
|
|
"""
|
|
service_url = self._get_service_url(service_name)
|
|
|
|
# Get internal API key from settings
|
|
from app.core.config import settings
|
|
internal_api_key = settings.INTERNAL_API_KEY
|
|
|
|
async with httpx.AsyncClient(timeout=30.0) as client:
|
|
response = await client.post(
|
|
f"{service_url}/internal/demo/clone",
|
|
json={
|
|
"base_tenant_id": base_tenant_id,
|
|
"virtual_tenant_id": virtual_tenant_id,
|
|
"session_id": session_id,
|
|
"demo_account_type": demo_account_type
|
|
},
|
|
headers={"X-Internal-API-Key": internal_api_key}
|
|
)
|
|
|
|
response.raise_for_status()
|
|
return response.json()
|
|
|
|
async def _populate_redis_cache(
|
|
self,
|
|
session_id: str,
|
|
virtual_tenant_id: str,
|
|
demo_account_type: str
|
|
) -> Dict[str, Any]:
|
|
"""
|
|
Populate Redis with frequently accessed data
|
|
|
|
Args:
|
|
session_id: Session ID
|
|
virtual_tenant_id: Virtual tenant ID
|
|
demo_account_type: Demo account type
|
|
|
|
Returns:
|
|
Statistics about cached data
|
|
"""
|
|
logger.info("Populating Redis cache", session_id=session_id)
|
|
|
|
keys_created = 0
|
|
|
|
# Cache inventory data (hot data)
|
|
try:
|
|
inventory_data = await self._fetch_inventory_data(virtual_tenant_id)
|
|
await self.redis.set_session_data(
|
|
session_id,
|
|
"inventory",
|
|
inventory_data,
|
|
ttl=settings.REDIS_SESSION_TTL
|
|
)
|
|
keys_created += 1
|
|
except Exception as e:
|
|
logger.error("Failed to cache inventory", error=str(e))
|
|
|
|
# Cache POS data
|
|
try:
|
|
pos_data = await self._fetch_pos_data(virtual_tenant_id)
|
|
await self.redis.set_session_data(
|
|
session_id,
|
|
"pos",
|
|
pos_data,
|
|
ttl=settings.REDIS_SESSION_TTL
|
|
)
|
|
keys_created += 1
|
|
except Exception as e:
|
|
logger.error("Failed to cache POS data", error=str(e))
|
|
|
|
# Cache recent sales
|
|
try:
|
|
sales_data = await self._fetch_recent_sales(virtual_tenant_id)
|
|
await self.redis.set_session_data(
|
|
session_id,
|
|
"recent_sales",
|
|
sales_data,
|
|
ttl=settings.REDIS_SESSION_TTL
|
|
)
|
|
keys_created += 1
|
|
except Exception as e:
|
|
logger.error("Failed to cache sales", error=str(e))
|
|
|
|
return {"keys_created": keys_created}
|
|
|
|
async def _fetch_inventory_data(self, tenant_id: str) -> Dict[str, Any]:
|
|
"""Fetch inventory data for caching"""
|
|
async with httpx.AsyncClient(timeout=httpx.Timeout(15.0, connect_timeout=5.0)) as client:
|
|
response = await client.get(
|
|
f"{settings.INVENTORY_SERVICE_URL}/api/inventory/summary",
|
|
headers={"X-Tenant-Id": tenant_id}
|
|
)
|
|
return response.json()
|
|
|
|
async def _fetch_pos_data(self, tenant_id: str) -> Dict[str, Any]:
|
|
"""Fetch POS data for caching"""
|
|
async with httpx.AsyncClient(timeout=httpx.Timeout(15.0, connect_timeout=5.0)) as client:
|
|
response = await client.get(
|
|
f"{settings.POS_SERVICE_URL}/api/pos/current-session",
|
|
headers={"X-Tenant-Id": tenant_id}
|
|
)
|
|
return response.json()
|
|
|
|
async def _fetch_recent_sales(self, tenant_id: str) -> Dict[str, Any]:
|
|
"""Fetch recent sales for caching"""
|
|
async with httpx.AsyncClient() as client:
|
|
response = await client.get(
|
|
f"{settings.SALES_SERVICE_URL}/api/sales/recent?limit=50",
|
|
headers={"X-Tenant-Id": tenant_id}
|
|
)
|
|
return response.json()
|
|
|
|
def _get_service_url(self, service_name: str) -> str:
|
|
"""Get service URL from settings"""
|
|
url_map = {
|
|
"inventory": settings.INVENTORY_SERVICE_URL,
|
|
"recipes": settings.RECIPES_SERVICE_URL,
|
|
"sales": settings.SALES_SERVICE_URL,
|
|
"orders": settings.ORDERS_SERVICE_URL,
|
|
"production": settings.PRODUCTION_SERVICE_URL,
|
|
"suppliers": settings.SUPPLIERS_SERVICE_URL,
|
|
"pos": settings.POS_SERVICE_URL,
|
|
"procurement": settings.PROCUREMENT_SERVICE_URL,
|
|
"distribution": settings.DISTRIBUTION_SERVICE_URL,
|
|
"forecasting": settings.FORECASTING_SERVICE_URL,
|
|
"alert_processor": settings.ALERT_PROCESSOR_SERVICE_URL,
|
|
}
|
|
return url_map.get(service_name, "")
|
|
|
|
async def delete_session_data(
|
|
self,
|
|
virtual_tenant_id: str,
|
|
session_id: str
|
|
):
|
|
"""
|
|
Delete all data for a session using parallel deletion for performance
|
|
|
|
Args:
|
|
virtual_tenant_id: Virtual tenant ID to delete
|
|
session_id: Session ID
|
|
"""
|
|
logger.info(
|
|
"Deleting session data",
|
|
virtual_tenant_id=virtual_tenant_id,
|
|
session_id=session_id
|
|
)
|
|
|
|
# Get shared HTTP client for all deletions
|
|
client = await self.get_http_client()
|
|
|
|
# Services list - all can be deleted in parallel as deletion endpoints
|
|
# handle their own internal ordering if needed
|
|
services = [
|
|
"forecasting",
|
|
"sales",
|
|
"orders",
|
|
"production",
|
|
"inventory",
|
|
"recipes",
|
|
"suppliers",
|
|
"pos",
|
|
"distribution",
|
|
"procurement",
|
|
"alert_processor"
|
|
]
|
|
|
|
# Create deletion tasks for all services
|
|
deletion_tasks = [
|
|
self._delete_service_data(service_name, virtual_tenant_id, client)
|
|
for service_name in services
|
|
]
|
|
|
|
# Execute all deletions in parallel with exception handling
|
|
results = await asyncio.gather(*deletion_tasks, return_exceptions=True)
|
|
|
|
# Log any failures
|
|
for service_name, result in zip(services, results):
|
|
if isinstance(result, Exception):
|
|
logger.error(
|
|
"Failed to delete service data",
|
|
service=service_name,
|
|
error=str(result)
|
|
)
|
|
|
|
# Delete from Redis
|
|
await self.redis.delete_session_data(session_id)
|
|
|
|
logger.info("Session data deleted", virtual_tenant_id=virtual_tenant_id)
|
|
|
|
async def _delete_service_data(
|
|
self,
|
|
service_name: str,
|
|
virtual_tenant_id: str,
|
|
client: httpx.AsyncClient
|
|
):
|
|
"""Delete data from a specific service using provided HTTP client"""
|
|
service_url = self._get_service_url(service_name)
|
|
|
|
# Get internal API key from settings
|
|
from app.core.config import settings
|
|
internal_api_key = settings.INTERNAL_API_KEY
|
|
|
|
await client.delete(
|
|
f"{service_url}/internal/demo/tenant/{virtual_tenant_id}",
|
|
headers={"X-Internal-API-Key": internal_api_key}
|
|
)
|