Add frontend loading imporvements
This commit is contained in:
@@ -224,6 +224,14 @@ async def create_demo_session(
|
||||
algorithm=settings.JWT_ALGORITHM
|
||||
)
|
||||
|
||||
# Map demo_account_type to subscription tier
|
||||
subscription_tier = "enterprise" if session.demo_account_type == "enterprise" else "professional"
|
||||
tenant_name = (
|
||||
"Panadería Artesana España - Central"
|
||||
if session.demo_account_type == "enterprise"
|
||||
else "Panadería Artesana Madrid - Demo"
|
||||
)
|
||||
|
||||
return {
|
||||
"session_id": session.session_id,
|
||||
"virtual_tenant_id": str(session.virtual_tenant_id),
|
||||
@@ -232,7 +240,10 @@ async def create_demo_session(
|
||||
"created_at": session.created_at,
|
||||
"expires_at": session.expires_at,
|
||||
"demo_config": session.session_metadata.get("demo_config", {}),
|
||||
"session_token": session_token
|
||||
"session_token": session_token,
|
||||
"subscription_tier": subscription_tier,
|
||||
"is_enterprise": session.demo_account_type == "enterprise",
|
||||
"tenant_name": tenant_name
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
|
||||
@@ -48,6 +48,9 @@ class CloneOrchestrator:
|
||||
self.internal_api_key = settings.INTERNAL_API_KEY
|
||||
self.redis_manager = redis_manager # For real-time progress updates
|
||||
|
||||
# Shared HTTP client with connection pooling
|
||||
self._http_client: Optional[httpx.AsyncClient] = None
|
||||
|
||||
# Define services that participate in cloning
|
||||
# URLs should be internal Kubernetes service names
|
||||
self.services = [
|
||||
@@ -125,6 +128,20 @@ class CloneOrchestrator:
|
||||
),
|
||||
]
|
||||
|
||||
async def _get_http_client(self) -> httpx.AsyncClient:
|
||||
"""Get or create shared HTTP client with connection pooling"""
|
||||
if self._http_client is None or self._http_client.is_closed:
|
||||
self._http_client = httpx.AsyncClient(
|
||||
timeout=httpx.Timeout(30.0, connect=5.0),
|
||||
limits=httpx.Limits(max_connections=100, max_keepalive_connections=20)
|
||||
)
|
||||
return self._http_client
|
||||
|
||||
async def close(self):
|
||||
"""Close the HTTP client"""
|
||||
if self._http_client and not self._http_client.is_closed:
|
||||
await self._http_client.aclose()
|
||||
|
||||
async def _update_progress_in_redis(
|
||||
self,
|
||||
session_id: str,
|
||||
@@ -352,30 +369,13 @@ class CloneOrchestrator:
|
||||
"duration_ms": duration_ms
|
||||
}
|
||||
|
||||
# If cloning completed successfully, trigger post-clone operations
|
||||
# If cloning completed successfully, trigger post-clone operations in background
|
||||
if overall_status in ["completed", "partial"]:
|
||||
try:
|
||||
# Trigger alert generation
|
||||
alert_results = await self._trigger_alert_generation_post_clone(
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
demo_account_type=demo_account_type
|
||||
)
|
||||
result["alert_generation"] = alert_results
|
||||
|
||||
# Trigger AI insights generation
|
||||
insights_results = await self._trigger_ai_insights_generation_post_clone(
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
demo_account_type=demo_account_type
|
||||
)
|
||||
result["ai_insights_generation"] = insights_results
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to trigger post-clone operations (non-fatal)",
|
||||
session_id=session_id,
|
||||
error=str(e)
|
||||
)
|
||||
result["post_clone_error"] = str(e)
|
||||
asyncio.create_task(self._run_post_clone_enrichments(
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
demo_account_type=demo_account_type,
|
||||
session_id=session_id
|
||||
))
|
||||
|
||||
logger.info(
|
||||
"Cloning completed",
|
||||
@@ -528,92 +528,91 @@ class CloneOrchestrator:
|
||||
timeout=service.timeout
|
||||
)
|
||||
|
||||
async with httpx.AsyncClient(timeout=service.timeout) as client:
|
||||
logger.debug(
|
||||
"Sending clone request",
|
||||
client = await self._get_http_client()
|
||||
logger.debug(
|
||||
"Sending clone request",
|
||||
service=service.name,
|
||||
base_tenant_id=base_tenant_id,
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
demo_account_type=demo_account_type
|
||||
)
|
||||
|
||||
response = await client.post(
|
||||
f"{service.url}/internal/demo/clone",
|
||||
params={
|
||||
"base_tenant_id": base_tenant_id,
|
||||
"virtual_tenant_id": virtual_tenant_id,
|
||||
"demo_account_type": demo_account_type,
|
||||
"session_id": session_id,
|
||||
"session_created_at": session_created_at.isoformat()
|
||||
},
|
||||
headers={"X-Internal-API-Key": self.internal_api_key},
|
||||
timeout=service.timeout
|
||||
)
|
||||
|
||||
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
||||
duration_seconds = duration_ms / 1000
|
||||
|
||||
logger.debug(
|
||||
"Received response from service",
|
||||
service=service.name,
|
||||
status_code=response.status_code,
|
||||
duration_ms=duration_ms
|
||||
)
|
||||
|
||||
demo_cross_service_calls_total.labels(
|
||||
source_service="demo-session",
|
||||
target_service=service.name,
|
||||
status="success"
|
||||
).inc()
|
||||
demo_cross_service_call_duration_seconds.labels(
|
||||
source_service="demo-session",
|
||||
target_service=service.name
|
||||
).observe(duration_seconds)
|
||||
demo_service_clone_duration_seconds.labels(
|
||||
tier=demo_account_type,
|
||||
service=service.name
|
||||
).observe(duration_seconds)
|
||||
|
||||
if response.status_code == 200:
|
||||
result = response.json()
|
||||
logger.info(
|
||||
"Service cloning completed",
|
||||
service=service.name,
|
||||
base_tenant_id=base_tenant_id,
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
demo_account_type=demo_account_type
|
||||
)
|
||||
|
||||
response = await client.post(
|
||||
f"{service.url}/internal/demo/clone",
|
||||
params={
|
||||
"base_tenant_id": base_tenant_id,
|
||||
"virtual_tenant_id": virtual_tenant_id,
|
||||
"demo_account_type": demo_account_type,
|
||||
"session_id": session_id,
|
||||
"session_created_at": session_created_at.isoformat()
|
||||
},
|
||||
headers={"X-Internal-API-Key": self.internal_api_key}
|
||||
)
|
||||
|
||||
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
||||
duration_seconds = duration_ms / 1000
|
||||
|
||||
logger.debug(
|
||||
"Received response from service",
|
||||
service=service.name,
|
||||
status_code=response.status_code,
|
||||
records_cloned=result.get("records_cloned", 0),
|
||||
duration_ms=duration_ms
|
||||
)
|
||||
return result
|
||||
else:
|
||||
error_msg = f"HTTP {response.status_code}: {response.text}"
|
||||
logger.error(
|
||||
"Service cloning failed",
|
||||
service=service.name,
|
||||
status_code=response.status_code,
|
||||
error=error_msg,
|
||||
response_text=response.text
|
||||
)
|
||||
|
||||
# Update Prometheus metrics
|
||||
demo_cross_service_calls_total.labels(
|
||||
source_service="demo-session",
|
||||
target_service=service.name,
|
||||
status="success"
|
||||
status="failed"
|
||||
).inc()
|
||||
demo_cross_service_call_duration_seconds.labels(
|
||||
source_service="demo-session",
|
||||
target_service=service.name
|
||||
).observe(duration_seconds)
|
||||
demo_service_clone_duration_seconds.labels(
|
||||
demo_cloning_errors_total.labels(
|
||||
tier=demo_account_type,
|
||||
service=service.name
|
||||
).observe(duration_seconds)
|
||||
service=service.name,
|
||||
error_type="http_error"
|
||||
).inc()
|
||||
|
||||
if response.status_code == 200:
|
||||
result = response.json()
|
||||
logger.info(
|
||||
"Service cloning completed",
|
||||
service=service.name,
|
||||
records_cloned=result.get("records_cloned", 0),
|
||||
duration_ms=duration_ms
|
||||
)
|
||||
return result
|
||||
else:
|
||||
error_msg = f"HTTP {response.status_code}: {response.text}"
|
||||
logger.error(
|
||||
"Service cloning failed",
|
||||
service=service.name,
|
||||
status_code=response.status_code,
|
||||
error=error_msg,
|
||||
response_text=response.text
|
||||
)
|
||||
|
||||
# Update error metrics
|
||||
demo_cross_service_calls_total.labels(
|
||||
source_service="demo-session",
|
||||
target_service=service.name,
|
||||
status="failed"
|
||||
).inc()
|
||||
demo_cloning_errors_total.labels(
|
||||
tier=demo_account_type,
|
||||
service=service.name,
|
||||
error_type="http_error"
|
||||
).inc()
|
||||
|
||||
return {
|
||||
"service": service.name,
|
||||
"status": "failed",
|
||||
"error": error_msg,
|
||||
"records_cloned": 0,
|
||||
"duration_ms": duration_ms,
|
||||
"response_status": response.status_code,
|
||||
"response_text": response.text
|
||||
}
|
||||
return {
|
||||
"service": service.name,
|
||||
"status": "failed",
|
||||
"error": error_msg,
|
||||
"records_cloned": 0,
|
||||
"duration_ms": duration_ms,
|
||||
"response_status": response.status_code,
|
||||
"response_text": response.text
|
||||
}
|
||||
|
||||
except httpx.TimeoutException:
|
||||
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
||||
@@ -798,28 +797,29 @@ class CloneOrchestrator:
|
||||
try:
|
||||
# First, create child tenant via tenant service
|
||||
tenant_url = os.getenv("TENANT_SERVICE_URL", "http://tenant-service:8000")
|
||||
async with httpx.AsyncClient(timeout=30.0) as client:
|
||||
response = await client.post(
|
||||
f"{tenant_url}/internal/demo/create-child",
|
||||
json={
|
||||
"base_tenant_id": child_base_id,
|
||||
"virtual_tenant_id": virtual_child_id,
|
||||
"parent_tenant_id": virtual_parent_id,
|
||||
"child_name": child_name,
|
||||
"location": location,
|
||||
"session_id": session_id
|
||||
},
|
||||
headers={"X-Internal-API-Key": self.internal_api_key}
|
||||
)
|
||||
client = await self._get_http_client()
|
||||
response = await client.post(
|
||||
f"{tenant_url}/internal/demo/create-child",
|
||||
json={
|
||||
"base_tenant_id": child_base_id,
|
||||
"virtual_tenant_id": virtual_child_id,
|
||||
"parent_tenant_id": virtual_parent_id,
|
||||
"child_name": child_name,
|
||||
"location": location,
|
||||
"session_id": session_id
|
||||
},
|
||||
headers={"X-Internal-API-Key": self.internal_api_key},
|
||||
timeout=30.0
|
||||
)
|
||||
|
||||
if response.status_code != 200:
|
||||
return {
|
||||
"child_id": virtual_child_id,
|
||||
"child_name": child_name,
|
||||
"status": "failed",
|
||||
"error": f"Tenant creation failed: HTTP {response.status_code}",
|
||||
"records_cloned": 0
|
||||
}
|
||||
if response.status_code != 200:
|
||||
return {
|
||||
"child_id": virtual_child_id,
|
||||
"child_name": child_name,
|
||||
"status": "failed",
|
||||
"error": f"Tenant creation failed: HTTP {response.status_code}",
|
||||
"records_cloned": 0
|
||||
}
|
||||
|
||||
# Then clone data from all services for this child
|
||||
records_cloned = 0
|
||||
@@ -942,9 +942,6 @@ class CloneOrchestrator:
|
||||
logger.error("Failed to trigger production alerts", tenant_id=virtual_tenant_id, error=str(e))
|
||||
results["production_alerts"] = {"error": str(e)}
|
||||
|
||||
# Wait 1.5s for alert enrichment
|
||||
await asyncio.sleep(1.5)
|
||||
|
||||
logger.info(
|
||||
"Alert generation post-clone completed",
|
||||
tenant_id=virtual_tenant_id,
|
||||
@@ -1052,9 +1049,6 @@ class CloneOrchestrator:
|
||||
logger.error("Failed to trigger demand insights", tenant_id=virtual_tenant_id, error=str(e))
|
||||
results["demand_insights"] = {"error": str(e)}
|
||||
|
||||
# Wait 2s for insights to be processed
|
||||
await asyncio.sleep(2.0)
|
||||
|
||||
logger.info(
|
||||
"AI insights generation post-clone completed",
|
||||
tenant_id=virtual_tenant_id,
|
||||
@@ -1063,3 +1057,47 @@ class CloneOrchestrator:
|
||||
|
||||
results["total_insights_generated"] = total_insights
|
||||
return results
|
||||
|
||||
async def _run_post_clone_enrichments(
|
||||
self,
|
||||
virtual_tenant_id: str,
|
||||
demo_account_type: str,
|
||||
session_id: str
|
||||
) -> None:
|
||||
"""
|
||||
Background task for non-blocking enrichments (alerts and AI insights).
|
||||
Runs in fire-and-forget mode to avoid blocking session readiness.
|
||||
"""
|
||||
try:
|
||||
logger.info(
|
||||
"Starting background enrichments",
|
||||
session_id=session_id,
|
||||
tenant_id=virtual_tenant_id
|
||||
)
|
||||
|
||||
await asyncio.gather(
|
||||
self._trigger_alert_generation_post_clone(virtual_tenant_id, demo_account_type),
|
||||
self._trigger_ai_insights_generation_post_clone(virtual_tenant_id, demo_account_type),
|
||||
return_exceptions=True
|
||||
)
|
||||
|
||||
if self.redis_manager:
|
||||
client = await self.redis_manager.get_client()
|
||||
await client.set(
|
||||
f"session:{session_id}:enrichments_complete",
|
||||
"true",
|
||||
ex=7200
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Background enrichments completed",
|
||||
session_id=session_id,
|
||||
tenant_id=virtual_tenant_id
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Background enrichments failed",
|
||||
session_id=session_id,
|
||||
error=str(e)
|
||||
)
|
||||
|
||||
@@ -600,4 +600,35 @@ async def delete_demo_tenant_data(
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to delete demo data: {str(e)}"
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@router.get("/internal/count")
|
||||
async def get_ingredient_count(
|
||||
tenant_id: str,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
_: bool = Depends(verify_internal_api_key)
|
||||
):
|
||||
"""
|
||||
Get count of active ingredients for onboarding status check.
|
||||
Internal endpoint for tenant service.
|
||||
"""
|
||||
try:
|
||||
from sqlalchemy import select, func
|
||||
|
||||
count = await db.scalar(
|
||||
select(func.count()).select_from(Ingredient)
|
||||
.where(
|
||||
Ingredient.tenant_id == UUID(tenant_id),
|
||||
Ingredient.is_active == True
|
||||
)
|
||||
)
|
||||
|
||||
return {
|
||||
"count": count or 0,
|
||||
"tenant_id": tenant_id
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get ingredient count", tenant_id=tenant_id, error=str(e))
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get ingredient count: {str(e)}")
|
||||
@@ -431,4 +431,36 @@ async def delete_demo_tenant_data(
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to delete demo data: {str(e)}"
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@router.get("/internal/count")
|
||||
async def get_recipe_count(
|
||||
tenant_id: str,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
_: bool = Depends(verify_internal_api_key)
|
||||
):
|
||||
"""
|
||||
Get count of active recipes for onboarding status check.
|
||||
Internal endpoint for tenant service.
|
||||
"""
|
||||
try:
|
||||
from sqlalchemy import select, func
|
||||
from app.models.recipes import RecipeStatus
|
||||
|
||||
count = await db.scalar(
|
||||
select(func.count()).select_from(Recipe)
|
||||
.where(
|
||||
Recipe.tenant_id == UUID(tenant_id),
|
||||
Recipe.status == RecipeStatus.ACTIVE
|
||||
)
|
||||
)
|
||||
|
||||
return {
|
||||
"count": count or 0,
|
||||
"tenant_id": tenant_id
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get recipe count", tenant_id=tenant_id, error=str(e))
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get recipe count: {str(e)}")
|
||||
|
||||
@@ -406,4 +406,36 @@ async def delete_demo_tenant_data(
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to delete demo data: {str(e)}"
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@router.get("/internal/count")
|
||||
async def get_supplier_count(
|
||||
tenant_id: str,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
_: bool = Depends(verify_internal_api_key)
|
||||
):
|
||||
"""
|
||||
Get count of active suppliers for onboarding status check.
|
||||
Internal endpoint for tenant service.
|
||||
"""
|
||||
try:
|
||||
from sqlalchemy import select, func
|
||||
from app.models.suppliers import SupplierStatus
|
||||
|
||||
count = await db.scalar(
|
||||
select(func.count()).select_from(Supplier)
|
||||
.where(
|
||||
Supplier.tenant_id == UUID(tenant_id),
|
||||
Supplier.status == SupplierStatus.active
|
||||
)
|
||||
)
|
||||
|
||||
return {
|
||||
"count": count or 0,
|
||||
"tenant_id": tenant_id
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get supplier count", tenant_id=tenant_id, error=str(e))
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get supplier count: {str(e)}")
|
||||
|
||||
133
services/tenant/app/api/onboarding.py
Normal file
133
services/tenant/app/api/onboarding.py
Normal file
@@ -0,0 +1,133 @@
|
||||
"""
|
||||
Onboarding Status API
|
||||
Provides lightweight onboarding status checks by aggregating counts from multiple services
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
import structlog
|
||||
import asyncio
|
||||
import httpx
|
||||
import os
|
||||
|
||||
from app.core.database import get_db
|
||||
from app.core.config import settings
|
||||
from shared.auth.decorators import get_current_tenant_id_dep
|
||||
from shared.routing.route_builder import RouteBuilder
|
||||
|
||||
logger = structlog.get_logger()
|
||||
router = APIRouter()
|
||||
route_builder = RouteBuilder("tenants")
|
||||
|
||||
|
||||
@router.get(route_builder.build_base_route("{tenant_id}/onboarding/status", include_tenant_prefix=False))
|
||||
async def get_onboarding_status(
|
||||
tenant_id: str,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get lightweight onboarding status by fetching counts from each service.
|
||||
|
||||
Returns:
|
||||
- ingredients_count: Number of active ingredients
|
||||
- suppliers_count: Number of active suppliers
|
||||
- recipes_count: Number of active recipes
|
||||
- has_minimum_setup: Boolean indicating if minimum requirements are met
|
||||
- progress_percentage: Overall onboarding progress (0-100)
|
||||
"""
|
||||
try:
|
||||
# Service URLs from environment
|
||||
inventory_url = os.getenv("INVENTORY_SERVICE_URL", "http://inventory-service:8000")
|
||||
suppliers_url = os.getenv("SUPPLIERS_SERVICE_URL", "http://suppliers-service:8000")
|
||||
recipes_url = os.getenv("RECIPES_SERVICE_URL", "http://recipes-service:8000")
|
||||
|
||||
internal_api_key = settings.INTERNAL_API_KEY
|
||||
|
||||
# Fetch counts from all services in parallel
|
||||
async with httpx.AsyncClient(timeout=10.0) as client:
|
||||
results = await asyncio.gather(
|
||||
client.get(
|
||||
f"{inventory_url}/internal/count",
|
||||
params={"tenant_id": tenant_id},
|
||||
headers={"X-Internal-API-Key": internal_api_key}
|
||||
),
|
||||
client.get(
|
||||
f"{suppliers_url}/internal/count",
|
||||
params={"tenant_id": tenant_id},
|
||||
headers={"X-Internal-API-Key": internal_api_key}
|
||||
),
|
||||
client.get(
|
||||
f"{recipes_url}/internal/count",
|
||||
params={"tenant_id": tenant_id},
|
||||
headers={"X-Internal-API-Key": internal_api_key}
|
||||
),
|
||||
return_exceptions=True
|
||||
)
|
||||
|
||||
# Extract counts with fallback to 0
|
||||
ingredients_count = 0
|
||||
suppliers_count = 0
|
||||
recipes_count = 0
|
||||
|
||||
if not isinstance(results[0], Exception) and results[0].status_code == 200:
|
||||
ingredients_count = results[0].json().get("count", 0)
|
||||
|
||||
if not isinstance(results[1], Exception) and results[1].status_code == 200:
|
||||
suppliers_count = results[1].json().get("count", 0)
|
||||
|
||||
if not isinstance(results[2], Exception) and results[2].status_code == 200:
|
||||
recipes_count = results[2].json().get("count", 0)
|
||||
|
||||
# Calculate minimum setup requirements
|
||||
# Minimum: 3 ingredients, 1 supplier, 1 recipe
|
||||
has_minimum_ingredients = ingredients_count >= 3
|
||||
has_minimum_suppliers = suppliers_count >= 1
|
||||
has_minimum_recipes = recipes_count >= 1
|
||||
|
||||
has_minimum_setup = all([
|
||||
has_minimum_ingredients,
|
||||
has_minimum_suppliers,
|
||||
has_minimum_recipes
|
||||
])
|
||||
|
||||
# Calculate progress percentage
|
||||
# Each requirement contributes 33.33%
|
||||
progress = 0
|
||||
if has_minimum_ingredients:
|
||||
progress += 33
|
||||
if has_minimum_suppliers:
|
||||
progress += 33
|
||||
if has_minimum_recipes:
|
||||
progress += 34
|
||||
|
||||
return {
|
||||
"ingredients_count": ingredients_count,
|
||||
"suppliers_count": suppliers_count,
|
||||
"recipes_count": recipes_count,
|
||||
"has_minimum_setup": has_minimum_setup,
|
||||
"progress_percentage": progress,
|
||||
"requirements": {
|
||||
"ingredients": {
|
||||
"current": ingredients_count,
|
||||
"minimum": 3,
|
||||
"met": has_minimum_ingredients
|
||||
},
|
||||
"suppliers": {
|
||||
"current": suppliers_count,
|
||||
"minimum": 1,
|
||||
"met": has_minimum_suppliers
|
||||
},
|
||||
"recipes": {
|
||||
"current": recipes_count,
|
||||
"minimum": 1,
|
||||
"met": has_minimum_recipes
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get onboarding status", tenant_id=tenant_id, error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to get onboarding status: {str(e)}"
|
||||
)
|
||||
@@ -745,10 +745,30 @@ async def get_usage_summary(
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
limit_service: SubscriptionLimitService = Depends(get_subscription_limit_service)
|
||||
):
|
||||
"""Get usage summary vs limits for a tenant"""
|
||||
"""Get usage summary vs limits for a tenant (cached for 30s for performance)"""
|
||||
|
||||
try:
|
||||
# Try to get from cache first (30s TTL)
|
||||
from shared.redis_utils import get_redis_client
|
||||
import json
|
||||
|
||||
cache_key = f"usage_summary:{tenant_id}"
|
||||
redis_client = await get_redis_client()
|
||||
|
||||
if redis_client:
|
||||
cached = await redis_client.get(cache_key)
|
||||
if cached:
|
||||
logger.debug("Usage summary cache hit", tenant_id=str(tenant_id))
|
||||
return json.loads(cached)
|
||||
|
||||
# Cache miss - fetch fresh data
|
||||
usage = await limit_service.get_usage_summary(str(tenant_id))
|
||||
|
||||
# Store in cache with 30s TTL
|
||||
if redis_client:
|
||||
await redis_client.setex(cache_key, 30, json.dumps(usage))
|
||||
logger.debug("Usage summary cached", tenant_id=str(tenant_id))
|
||||
|
||||
return usage
|
||||
|
||||
except Exception as e:
|
||||
|
||||
@@ -7,7 +7,7 @@ from fastapi import FastAPI
|
||||
from sqlalchemy import text
|
||||
from app.core.config import settings
|
||||
from app.core.database import database_manager
|
||||
from app.api import tenants, tenant_members, tenant_operations, webhooks, plans, subscription, tenant_settings, whatsapp_admin, usage_forecast, enterprise_upgrade, tenant_locations, tenant_hierarchy, internal_demo, network_alerts
|
||||
from app.api import tenants, tenant_members, tenant_operations, webhooks, plans, subscription, tenant_settings, whatsapp_admin, usage_forecast, enterprise_upgrade, tenant_locations, tenant_hierarchy, internal_demo, network_alerts, onboarding
|
||||
from shared.service_base import StandardFastAPIService
|
||||
|
||||
|
||||
@@ -158,6 +158,7 @@ service.add_router(internal_demo.router, tags=["internal-demo"]) # Internal dem
|
||||
service.add_router(tenant_hierarchy.router, tags=["tenant-hierarchy"]) # Tenant hierarchy endpoints
|
||||
service.add_router(internal_demo.router, tags=["internal-demo"]) # Internal demo data cloning
|
||||
service.add_router(network_alerts.router, tags=["network-alerts"]) # Network alerts aggregation endpoints
|
||||
service.add_router(onboarding.router, tags=["onboarding"]) # Onboarding status endpoints
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
|
||||
@@ -437,18 +437,21 @@ class SubscriptionLimitService:
|
||||
current_users = len(members)
|
||||
current_locations = 1 # Each tenant has one primary location
|
||||
|
||||
# Get current usage - Products & Inventory
|
||||
current_products = await self._get_ingredient_count(tenant_id)
|
||||
current_recipes = await self._get_recipe_count(tenant_id)
|
||||
current_suppliers = await self._get_supplier_count(tenant_id)
|
||||
# Get current usage - Products & Inventory (parallel calls for performance)
|
||||
import asyncio
|
||||
current_products, current_recipes, current_suppliers = await asyncio.gather(
|
||||
self._get_ingredient_count(tenant_id),
|
||||
self._get_recipe_count(tenant_id),
|
||||
self._get_supplier_count(tenant_id)
|
||||
)
|
||||
|
||||
# Get current usage - IA & Analytics (Redis-based daily quotas)
|
||||
training_jobs_usage = await self._get_training_jobs_today(tenant_id, subscription.plan)
|
||||
forecasts_usage = await self._get_forecasts_today(tenant_id, subscription.plan)
|
||||
|
||||
# Get current usage - API & Storage (Redis-based)
|
||||
api_calls_usage = await self._get_api_calls_this_hour(tenant_id, subscription.plan)
|
||||
storage_usage = await self._get_file_storage_usage_gb(tenant_id, subscription.plan)
|
||||
# Get current usage - IA & Analytics + API & Storage (parallel Redis calls for performance)
|
||||
training_jobs_usage, forecasts_usage, api_calls_usage, storage_usage = await asyncio.gather(
|
||||
self._get_training_jobs_today(tenant_id, subscription.plan),
|
||||
self._get_forecasts_today(tenant_id, subscription.plan),
|
||||
self._get_api_calls_this_hour(tenant_id, subscription.plan),
|
||||
self._get_file_storage_usage_gb(tenant_id, subscription.plan)
|
||||
)
|
||||
|
||||
# Get limits from subscription
|
||||
recipes_limit = await self._get_limit_from_plan(subscription.plan, 'recipes')
|
||||
|
||||
Reference in New Issue
Block a user