314 lines
10 KiB
Python
314 lines
10 KiB
Python
"""
|
|
Demo Data Cloner
|
|
Clones base demo data to session-specific virtual tenants
|
|
"""
|
|
|
|
from sqlalchemy.ext.asyncio import AsyncSession
|
|
from typing import Dict, Any, List
|
|
import httpx
|
|
import structlog
|
|
import uuid
|
|
import os
|
|
|
|
from app.core.redis_wrapper import DemoRedisWrapper
|
|
from app.core import settings
|
|
|
|
logger = structlog.get_logger()
|
|
|
|
|
|
class DemoDataCloner:
|
|
"""Clones demo data for isolated sessions"""
|
|
|
|
def __init__(self, db: AsyncSession, redis: DemoRedisWrapper):
|
|
self.db = db
|
|
self.redis = redis
|
|
|
|
async def clone_tenant_data(
|
|
self,
|
|
session_id: str,
|
|
base_demo_tenant_id: str,
|
|
virtual_tenant_id: str,
|
|
demo_account_type: str
|
|
) -> Dict[str, Any]:
|
|
"""
|
|
Clone all demo data from base tenant to virtual tenant
|
|
|
|
Args:
|
|
session_id: Session ID
|
|
base_demo_tenant_id: Base demo tenant UUID
|
|
virtual_tenant_id: Virtual tenant UUID for this session
|
|
demo_account_type: Type of demo account
|
|
|
|
Returns:
|
|
Cloning statistics
|
|
"""
|
|
logger.info(
|
|
"Starting data cloning",
|
|
session_id=session_id,
|
|
base_demo_tenant_id=base_demo_tenant_id,
|
|
virtual_tenant_id=virtual_tenant_id
|
|
)
|
|
|
|
stats = {
|
|
"session_id": session_id,
|
|
"services_cloned": [],
|
|
"total_records": 0,
|
|
"redis_keys": 0
|
|
}
|
|
|
|
# Clone data from each service based on demo account type
|
|
services_to_clone = self._get_services_for_demo_type(demo_account_type)
|
|
|
|
for service_name in services_to_clone:
|
|
try:
|
|
service_stats = await self._clone_service_data(
|
|
service_name,
|
|
base_demo_tenant_id,
|
|
virtual_tenant_id,
|
|
session_id,
|
|
demo_account_type
|
|
)
|
|
stats["services_cloned"].append(service_name)
|
|
stats["total_records"] += service_stats.get("records_cloned", 0)
|
|
|
|
except Exception as e:
|
|
logger.error(
|
|
"Failed to clone service data",
|
|
service=service_name,
|
|
error=str(e)
|
|
)
|
|
|
|
# Populate Redis cache with hot data
|
|
redis_stats = await self._populate_redis_cache(
|
|
session_id,
|
|
virtual_tenant_id,
|
|
demo_account_type
|
|
)
|
|
stats["redis_keys"] = redis_stats.get("keys_created", 0)
|
|
|
|
logger.info(
|
|
"Data cloning completed",
|
|
session_id=session_id,
|
|
stats=stats
|
|
)
|
|
|
|
return stats
|
|
|
|
def _get_services_for_demo_type(self, demo_account_type: str) -> List[str]:
|
|
"""Get list of services to clone based on demo type"""
|
|
base_services = ["inventory", "sales", "orders", "pos"]
|
|
|
|
if demo_account_type == "individual_bakery":
|
|
# Individual bakery has production, recipes, suppliers, and procurement
|
|
return base_services + ["recipes", "production", "suppliers", "procurement"]
|
|
elif demo_account_type == "central_baker":
|
|
# Central baker satellite has suppliers and procurement
|
|
return base_services + ["suppliers", "procurement"]
|
|
else:
|
|
# Basic tenant has suppliers and procurement
|
|
return base_services + ["suppliers", "procurement"]
|
|
|
|
async def _clone_service_data(
|
|
self,
|
|
service_name: str,
|
|
base_tenant_id: str,
|
|
virtual_tenant_id: str,
|
|
session_id: str,
|
|
demo_account_type: str
|
|
) -> Dict[str, Any]:
|
|
"""
|
|
Clone data for a specific service
|
|
|
|
Args:
|
|
service_name: Name of the service
|
|
base_tenant_id: Source tenant ID
|
|
virtual_tenant_id: Target tenant ID
|
|
session_id: Session ID
|
|
demo_account_type: Type of demo account
|
|
|
|
Returns:
|
|
Cloning statistics
|
|
"""
|
|
service_url = self._get_service_url(service_name)
|
|
|
|
# Get internal API key from environment
|
|
internal_api_key = os.getenv("INTERNAL_API_KEY", "dev-internal-key-change-in-production")
|
|
|
|
async with httpx.AsyncClient(timeout=30.0) as client:
|
|
response = await client.post(
|
|
f"{service_url}/internal/demo/clone",
|
|
json={
|
|
"base_tenant_id": base_tenant_id,
|
|
"virtual_tenant_id": virtual_tenant_id,
|
|
"session_id": session_id,
|
|
"demo_account_type": demo_account_type
|
|
},
|
|
headers={"X-Internal-Api-Key": internal_api_key}
|
|
)
|
|
|
|
response.raise_for_status()
|
|
return response.json()
|
|
|
|
async def _populate_redis_cache(
|
|
self,
|
|
session_id: str,
|
|
virtual_tenant_id: str,
|
|
demo_account_type: str
|
|
) -> Dict[str, Any]:
|
|
"""
|
|
Populate Redis with frequently accessed data
|
|
|
|
Args:
|
|
session_id: Session ID
|
|
virtual_tenant_id: Virtual tenant ID
|
|
demo_account_type: Demo account type
|
|
|
|
Returns:
|
|
Statistics about cached data
|
|
"""
|
|
logger.info("Populating Redis cache", session_id=session_id)
|
|
|
|
keys_created = 0
|
|
|
|
# Cache inventory data (hot data)
|
|
try:
|
|
inventory_data = await self._fetch_inventory_data(virtual_tenant_id)
|
|
await self.redis.set_session_data(
|
|
session_id,
|
|
"inventory",
|
|
inventory_data,
|
|
ttl=settings.REDIS_SESSION_TTL
|
|
)
|
|
keys_created += 1
|
|
except Exception as e:
|
|
logger.error("Failed to cache inventory", error=str(e))
|
|
|
|
# Cache POS data
|
|
try:
|
|
pos_data = await self._fetch_pos_data(virtual_tenant_id)
|
|
await self.redis.set_session_data(
|
|
session_id,
|
|
"pos",
|
|
pos_data,
|
|
ttl=settings.REDIS_SESSION_TTL
|
|
)
|
|
keys_created += 1
|
|
except Exception as e:
|
|
logger.error("Failed to cache POS data", error=str(e))
|
|
|
|
# Cache recent sales
|
|
try:
|
|
sales_data = await self._fetch_recent_sales(virtual_tenant_id)
|
|
await self.redis.set_session_data(
|
|
session_id,
|
|
"recent_sales",
|
|
sales_data,
|
|
ttl=settings.REDIS_SESSION_TTL
|
|
)
|
|
keys_created += 1
|
|
except Exception as e:
|
|
logger.error("Failed to cache sales", error=str(e))
|
|
|
|
return {"keys_created": keys_created}
|
|
|
|
async def _fetch_inventory_data(self, tenant_id: str) -> Dict[str, Any]:
|
|
"""Fetch inventory data for caching"""
|
|
async with httpx.AsyncClient() as client:
|
|
response = await client.get(
|
|
f"{settings.INVENTORY_SERVICE_URL}/api/inventory/summary",
|
|
headers={"X-Tenant-Id": tenant_id}
|
|
)
|
|
return response.json()
|
|
|
|
async def _fetch_pos_data(self, tenant_id: str) -> Dict[str, Any]:
|
|
"""Fetch POS data for caching"""
|
|
async with httpx.AsyncClient() as client:
|
|
response = await client.get(
|
|
f"{settings.POS_SERVICE_URL}/api/pos/current-session",
|
|
headers={"X-Tenant-Id": tenant_id}
|
|
)
|
|
return response.json()
|
|
|
|
async def _fetch_recent_sales(self, tenant_id: str) -> Dict[str, Any]:
|
|
"""Fetch recent sales for caching"""
|
|
async with httpx.AsyncClient() as client:
|
|
response = await client.get(
|
|
f"{settings.SALES_SERVICE_URL}/api/sales/recent?limit=50",
|
|
headers={"X-Tenant-Id": tenant_id}
|
|
)
|
|
return response.json()
|
|
|
|
def _get_service_url(self, service_name: str) -> str:
|
|
"""Get service URL from settings"""
|
|
url_map = {
|
|
"inventory": settings.INVENTORY_SERVICE_URL,
|
|
"recipes": settings.RECIPES_SERVICE_URL,
|
|
"sales": settings.SALES_SERVICE_URL,
|
|
"orders": settings.ORDERS_SERVICE_URL,
|
|
"production": settings.PRODUCTION_SERVICE_URL,
|
|
"suppliers": settings.SUPPLIERS_SERVICE_URL,
|
|
"pos": settings.POS_SERVICE_URL,
|
|
"procurement": settings.PROCUREMENT_SERVICE_URL,
|
|
}
|
|
return url_map.get(service_name, "")
|
|
|
|
async def delete_session_data(
|
|
self,
|
|
virtual_tenant_id: str,
|
|
session_id: str
|
|
):
|
|
"""
|
|
Delete all data for a session
|
|
|
|
Args:
|
|
virtual_tenant_id: Virtual tenant ID to delete
|
|
session_id: Session ID
|
|
"""
|
|
logger.info(
|
|
"Deleting session data",
|
|
virtual_tenant_id=virtual_tenant_id,
|
|
session_id=session_id
|
|
)
|
|
|
|
# Delete from each service
|
|
# Note: Services are deleted in reverse dependency order to avoid foreign key issues
|
|
services = [
|
|
"forecasting", # No dependencies
|
|
"sales", # Depends on inventory, recipes
|
|
"orders", # Depends on customers (within same service)
|
|
"production", # Depends on recipes, equipment
|
|
"inventory", # Core data (ingredients, products)
|
|
"recipes", # Core data
|
|
"suppliers", # Core data
|
|
"pos", # Point of sale data
|
|
"procurement" # Procurement and purchase orders
|
|
]
|
|
|
|
for service_name in services:
|
|
try:
|
|
await self._delete_service_data(service_name, virtual_tenant_id)
|
|
except Exception as e:
|
|
logger.error(
|
|
"Failed to delete service data",
|
|
service=service_name,
|
|
error=str(e)
|
|
)
|
|
|
|
# Delete from Redis
|
|
await self.redis.delete_session_data(session_id)
|
|
|
|
logger.info("Session data deleted", virtual_tenant_id=virtual_tenant_id)
|
|
|
|
async def _delete_service_data(self, service_name: str, virtual_tenant_id: str):
|
|
"""Delete data from a specific service"""
|
|
service_url = self._get_service_url(service_name)
|
|
|
|
# Get internal API key from environment
|
|
internal_api_key = os.getenv("INTERNAL_API_KEY", "dev-internal-key-change-in-production")
|
|
|
|
async with httpx.AsyncClient(timeout=30.0) as client:
|
|
await client.delete(
|
|
f"{service_url}/internal/demo/tenant/{virtual_tenant_id}",
|
|
headers={"X-Internal-Api-Key": internal_api_key}
|
|
)
|