Add frontend loading imporvements

This commit is contained in:
Urtzi Alfaro
2025-12-27 21:30:42 +01:00
parent 6e3a6590d6
commit 54662dde79
21 changed files with 799 additions and 363 deletions

View File

@@ -224,6 +224,14 @@ async def create_demo_session(
algorithm=settings.JWT_ALGORITHM
)
# Map demo_account_type to subscription tier
subscription_tier = "enterprise" if session.demo_account_type == "enterprise" else "professional"
tenant_name = (
"Panadería Artesana España - Central"
if session.demo_account_type == "enterprise"
else "Panadería Artesana Madrid - Demo"
)
return {
"session_id": session.session_id,
"virtual_tenant_id": str(session.virtual_tenant_id),
@@ -232,7 +240,10 @@ async def create_demo_session(
"created_at": session.created_at,
"expires_at": session.expires_at,
"demo_config": session.session_metadata.get("demo_config", {}),
"session_token": session_token
"session_token": session_token,
"subscription_tier": subscription_tier,
"is_enterprise": session.demo_account_type == "enterprise",
"tenant_name": tenant_name
}
except Exception as e:

View File

@@ -48,6 +48,9 @@ class CloneOrchestrator:
self.internal_api_key = settings.INTERNAL_API_KEY
self.redis_manager = redis_manager # For real-time progress updates
# Shared HTTP client with connection pooling
self._http_client: Optional[httpx.AsyncClient] = None
# Define services that participate in cloning
# URLs should be internal Kubernetes service names
self.services = [
@@ -125,6 +128,20 @@ class CloneOrchestrator:
),
]
async def _get_http_client(self) -> httpx.AsyncClient:
"""Get or create shared HTTP client with connection pooling"""
if self._http_client is None or self._http_client.is_closed:
self._http_client = httpx.AsyncClient(
timeout=httpx.Timeout(30.0, connect=5.0),
limits=httpx.Limits(max_connections=100, max_keepalive_connections=20)
)
return self._http_client
async def close(self):
"""Close the HTTP client"""
if self._http_client and not self._http_client.is_closed:
await self._http_client.aclose()
async def _update_progress_in_redis(
self,
session_id: str,
@@ -352,30 +369,13 @@ class CloneOrchestrator:
"duration_ms": duration_ms
}
# If cloning completed successfully, trigger post-clone operations
# If cloning completed successfully, trigger post-clone operations in background
if overall_status in ["completed", "partial"]:
try:
# Trigger alert generation
alert_results = await self._trigger_alert_generation_post_clone(
virtual_tenant_id=virtual_tenant_id,
demo_account_type=demo_account_type
)
result["alert_generation"] = alert_results
# Trigger AI insights generation
insights_results = await self._trigger_ai_insights_generation_post_clone(
virtual_tenant_id=virtual_tenant_id,
demo_account_type=demo_account_type
)
result["ai_insights_generation"] = insights_results
except Exception as e:
logger.error(
"Failed to trigger post-clone operations (non-fatal)",
session_id=session_id,
error=str(e)
)
result["post_clone_error"] = str(e)
asyncio.create_task(self._run_post_clone_enrichments(
virtual_tenant_id=virtual_tenant_id,
demo_account_type=demo_account_type,
session_id=session_id
))
logger.info(
"Cloning completed",
@@ -528,92 +528,91 @@ class CloneOrchestrator:
timeout=service.timeout
)
async with httpx.AsyncClient(timeout=service.timeout) as client:
logger.debug(
"Sending clone request",
client = await self._get_http_client()
logger.debug(
"Sending clone request",
service=service.name,
base_tenant_id=base_tenant_id,
virtual_tenant_id=virtual_tenant_id,
demo_account_type=demo_account_type
)
response = await client.post(
f"{service.url}/internal/demo/clone",
params={
"base_tenant_id": base_tenant_id,
"virtual_tenant_id": virtual_tenant_id,
"demo_account_type": demo_account_type,
"session_id": session_id,
"session_created_at": session_created_at.isoformat()
},
headers={"X-Internal-API-Key": self.internal_api_key},
timeout=service.timeout
)
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
duration_seconds = duration_ms / 1000
logger.debug(
"Received response from service",
service=service.name,
status_code=response.status_code,
duration_ms=duration_ms
)
demo_cross_service_calls_total.labels(
source_service="demo-session",
target_service=service.name,
status="success"
).inc()
demo_cross_service_call_duration_seconds.labels(
source_service="demo-session",
target_service=service.name
).observe(duration_seconds)
demo_service_clone_duration_seconds.labels(
tier=demo_account_type,
service=service.name
).observe(duration_seconds)
if response.status_code == 200:
result = response.json()
logger.info(
"Service cloning completed",
service=service.name,
base_tenant_id=base_tenant_id,
virtual_tenant_id=virtual_tenant_id,
demo_account_type=demo_account_type
)
response = await client.post(
f"{service.url}/internal/demo/clone",
params={
"base_tenant_id": base_tenant_id,
"virtual_tenant_id": virtual_tenant_id,
"demo_account_type": demo_account_type,
"session_id": session_id,
"session_created_at": session_created_at.isoformat()
},
headers={"X-Internal-API-Key": self.internal_api_key}
)
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
duration_seconds = duration_ms / 1000
logger.debug(
"Received response from service",
service=service.name,
status_code=response.status_code,
records_cloned=result.get("records_cloned", 0),
duration_ms=duration_ms
)
return result
else:
error_msg = f"HTTP {response.status_code}: {response.text}"
logger.error(
"Service cloning failed",
service=service.name,
status_code=response.status_code,
error=error_msg,
response_text=response.text
)
# Update Prometheus metrics
demo_cross_service_calls_total.labels(
source_service="demo-session",
target_service=service.name,
status="success"
status="failed"
).inc()
demo_cross_service_call_duration_seconds.labels(
source_service="demo-session",
target_service=service.name
).observe(duration_seconds)
demo_service_clone_duration_seconds.labels(
demo_cloning_errors_total.labels(
tier=demo_account_type,
service=service.name
).observe(duration_seconds)
service=service.name,
error_type="http_error"
).inc()
if response.status_code == 200:
result = response.json()
logger.info(
"Service cloning completed",
service=service.name,
records_cloned=result.get("records_cloned", 0),
duration_ms=duration_ms
)
return result
else:
error_msg = f"HTTP {response.status_code}: {response.text}"
logger.error(
"Service cloning failed",
service=service.name,
status_code=response.status_code,
error=error_msg,
response_text=response.text
)
# Update error metrics
demo_cross_service_calls_total.labels(
source_service="demo-session",
target_service=service.name,
status="failed"
).inc()
demo_cloning_errors_total.labels(
tier=demo_account_type,
service=service.name,
error_type="http_error"
).inc()
return {
"service": service.name,
"status": "failed",
"error": error_msg,
"records_cloned": 0,
"duration_ms": duration_ms,
"response_status": response.status_code,
"response_text": response.text
}
return {
"service": service.name,
"status": "failed",
"error": error_msg,
"records_cloned": 0,
"duration_ms": duration_ms,
"response_status": response.status_code,
"response_text": response.text
}
except httpx.TimeoutException:
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
@@ -798,28 +797,29 @@ class CloneOrchestrator:
try:
# First, create child tenant via tenant service
tenant_url = os.getenv("TENANT_SERVICE_URL", "http://tenant-service:8000")
async with httpx.AsyncClient(timeout=30.0) as client:
response = await client.post(
f"{tenant_url}/internal/demo/create-child",
json={
"base_tenant_id": child_base_id,
"virtual_tenant_id": virtual_child_id,
"parent_tenant_id": virtual_parent_id,
"child_name": child_name,
"location": location,
"session_id": session_id
},
headers={"X-Internal-API-Key": self.internal_api_key}
)
client = await self._get_http_client()
response = await client.post(
f"{tenant_url}/internal/demo/create-child",
json={
"base_tenant_id": child_base_id,
"virtual_tenant_id": virtual_child_id,
"parent_tenant_id": virtual_parent_id,
"child_name": child_name,
"location": location,
"session_id": session_id
},
headers={"X-Internal-API-Key": self.internal_api_key},
timeout=30.0
)
if response.status_code != 200:
return {
"child_id": virtual_child_id,
"child_name": child_name,
"status": "failed",
"error": f"Tenant creation failed: HTTP {response.status_code}",
"records_cloned": 0
}
if response.status_code != 200:
return {
"child_id": virtual_child_id,
"child_name": child_name,
"status": "failed",
"error": f"Tenant creation failed: HTTP {response.status_code}",
"records_cloned": 0
}
# Then clone data from all services for this child
records_cloned = 0
@@ -942,9 +942,6 @@ class CloneOrchestrator:
logger.error("Failed to trigger production alerts", tenant_id=virtual_tenant_id, error=str(e))
results["production_alerts"] = {"error": str(e)}
# Wait 1.5s for alert enrichment
await asyncio.sleep(1.5)
logger.info(
"Alert generation post-clone completed",
tenant_id=virtual_tenant_id,
@@ -1052,9 +1049,6 @@ class CloneOrchestrator:
logger.error("Failed to trigger demand insights", tenant_id=virtual_tenant_id, error=str(e))
results["demand_insights"] = {"error": str(e)}
# Wait 2s for insights to be processed
await asyncio.sleep(2.0)
logger.info(
"AI insights generation post-clone completed",
tenant_id=virtual_tenant_id,
@@ -1063,3 +1057,47 @@ class CloneOrchestrator:
results["total_insights_generated"] = total_insights
return results
async def _run_post_clone_enrichments(
self,
virtual_tenant_id: str,
demo_account_type: str,
session_id: str
) -> None:
"""
Background task for non-blocking enrichments (alerts and AI insights).
Runs in fire-and-forget mode to avoid blocking session readiness.
"""
try:
logger.info(
"Starting background enrichments",
session_id=session_id,
tenant_id=virtual_tenant_id
)
await asyncio.gather(
self._trigger_alert_generation_post_clone(virtual_tenant_id, demo_account_type),
self._trigger_ai_insights_generation_post_clone(virtual_tenant_id, demo_account_type),
return_exceptions=True
)
if self.redis_manager:
client = await self.redis_manager.get_client()
await client.set(
f"session:{session_id}:enrichments_complete",
"true",
ex=7200
)
logger.info(
"Background enrichments completed",
session_id=session_id,
tenant_id=virtual_tenant_id
)
except Exception as e:
logger.error(
"Background enrichments failed",
session_id=session_id,
error=str(e)
)