New enterprise feature2
This commit is contained in:
@@ -4,11 +4,12 @@ Clones base demo data to session-specific virtual tenants
|
||||
"""
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from typing import Dict, Any, List
|
||||
from typing import Dict, Any, List, Optional
|
||||
import httpx
|
||||
import structlog
|
||||
import uuid
|
||||
import os
|
||||
import asyncio
|
||||
|
||||
from app.core.redis_wrapper import DemoRedisWrapper
|
||||
from app.core import settings
|
||||
@@ -22,6 +23,26 @@ class DemoDataCloner:
|
||||
def __init__(self, db: AsyncSession, redis: DemoRedisWrapper):
|
||||
self.db = db
|
||||
self.redis = redis
|
||||
self._http_client: Optional[httpx.AsyncClient] = None
|
||||
|
||||
async def get_http_client(self) -> httpx.AsyncClient:
|
||||
"""Get or create shared HTTP client with connection pooling"""
|
||||
if self._http_client is None:
|
||||
self._http_client = httpx.AsyncClient(
|
||||
timeout=httpx.Timeout(30.0, connect_timeout=10.0),
|
||||
limits=httpx.Limits(
|
||||
max_connections=20,
|
||||
max_keepalive_connections=10,
|
||||
keepalive_expiry=30.0
|
||||
)
|
||||
)
|
||||
return self._http_client
|
||||
|
||||
async def close(self):
|
||||
"""Close HTTP client on cleanup"""
|
||||
if self._http_client:
|
||||
await self._http_client.aclose()
|
||||
self._http_client = None
|
||||
|
||||
async def clone_tenant_data(
|
||||
self,
|
||||
@@ -214,7 +235,7 @@ class DemoDataCloner:
|
||||
|
||||
async def _fetch_inventory_data(self, tenant_id: str) -> Dict[str, Any]:
|
||||
"""Fetch inventory data for caching"""
|
||||
async with httpx.AsyncClient() as client:
|
||||
async with httpx.AsyncClient(timeout=httpx.Timeout(15.0, connect_timeout=5.0)) as client:
|
||||
response = await client.get(
|
||||
f"{settings.INVENTORY_SERVICE_URL}/api/inventory/summary",
|
||||
headers={"X-Tenant-Id": tenant_id}
|
||||
@@ -223,7 +244,7 @@ class DemoDataCloner:
|
||||
|
||||
async def _fetch_pos_data(self, tenant_id: str) -> Dict[str, Any]:
|
||||
"""Fetch POS data for caching"""
|
||||
async with httpx.AsyncClient() as client:
|
||||
async with httpx.AsyncClient(timeout=httpx.Timeout(15.0, connect_timeout=5.0)) as client:
|
||||
response = await client.get(
|
||||
f"{settings.POS_SERVICE_URL}/api/pos/current-session",
|
||||
headers={"X-Tenant-Id": tenant_id}
|
||||
@@ -261,7 +282,7 @@ class DemoDataCloner:
|
||||
session_id: str
|
||||
):
|
||||
"""
|
||||
Delete all data for a session
|
||||
Delete all data for a session using parallel deletion for performance
|
||||
|
||||
Args:
|
||||
virtual_tenant_id: Virtual tenant ID to delete
|
||||
@@ -273,29 +294,40 @@ class DemoDataCloner:
|
||||
session_id=session_id
|
||||
)
|
||||
|
||||
# Delete from each service
|
||||
# Note: Services are deleted in reverse dependency order to avoid foreign key issues
|
||||
# Get shared HTTP client for all deletions
|
||||
client = await self.get_http_client()
|
||||
|
||||
# Services list - all can be deleted in parallel as deletion endpoints
|
||||
# handle their own internal ordering if needed
|
||||
services = [
|
||||
"forecasting", # No dependencies
|
||||
"sales", # Depends on inventory, recipes
|
||||
"orders", # Depends on customers (within same service)
|
||||
"production", # Depends on recipes, equipment
|
||||
"inventory", # Core data (ingredients, products)
|
||||
"recipes", # Core data
|
||||
"suppliers", # Core data
|
||||
"pos", # Point of sale data
|
||||
"distribution", # Distribution routes
|
||||
"procurement" # Procurement and purchase orders
|
||||
"forecasting",
|
||||
"sales",
|
||||
"orders",
|
||||
"production",
|
||||
"inventory",
|
||||
"recipes",
|
||||
"suppliers",
|
||||
"pos",
|
||||
"distribution",
|
||||
"procurement"
|
||||
]
|
||||
|
||||
for service_name in services:
|
||||
try:
|
||||
await self._delete_service_data(service_name, virtual_tenant_id)
|
||||
except Exception as e:
|
||||
# Create deletion tasks for all services
|
||||
deletion_tasks = [
|
||||
self._delete_service_data(service_name, virtual_tenant_id, client)
|
||||
for service_name in services
|
||||
]
|
||||
|
||||
# Execute all deletions in parallel with exception handling
|
||||
results = await asyncio.gather(*deletion_tasks, return_exceptions=True)
|
||||
|
||||
# Log any failures
|
||||
for service_name, result in zip(services, results):
|
||||
if isinstance(result, Exception):
|
||||
logger.error(
|
||||
"Failed to delete service data",
|
||||
service=service_name,
|
||||
error=str(e)
|
||||
error=str(result)
|
||||
)
|
||||
|
||||
# Delete from Redis
|
||||
@@ -303,16 +335,20 @@ class DemoDataCloner:
|
||||
|
||||
logger.info("Session data deleted", virtual_tenant_id=virtual_tenant_id)
|
||||
|
||||
async def _delete_service_data(self, service_name: str, virtual_tenant_id: str):
|
||||
"""Delete data from a specific service"""
|
||||
async def _delete_service_data(
|
||||
self,
|
||||
service_name: str,
|
||||
virtual_tenant_id: str,
|
||||
client: httpx.AsyncClient
|
||||
):
|
||||
"""Delete data from a specific service using provided HTTP client"""
|
||||
service_url = self._get_service_url(service_name)
|
||||
|
||||
# Get internal API key from settings
|
||||
from app.core.config import settings
|
||||
internal_api_key = settings.INTERNAL_API_KEY
|
||||
|
||||
async with httpx.AsyncClient(timeout=30.0) as client:
|
||||
await client.delete(
|
||||
f"{service_url}/internal/demo/tenant/{virtual_tenant_id}",
|
||||
headers={"X-Internal-API-Key": internal_api_key}
|
||||
)
|
||||
await client.delete(
|
||||
f"{service_url}/internal/demo/tenant/{virtual_tenant_id}",
|
||||
headers={"X-Internal-API-Key": internal_api_key}
|
||||
)
|
||||
|
||||
Reference in New Issue
Block a user