Initial commit - production deployment
This commit is contained in:
1
shared/__init__.py
Executable file
1
shared/__init__.py
Executable file
@@ -0,0 +1 @@
|
||||
# Shared package initialization
|
||||
0
shared/auth/__init__.py
Executable file
0
shared/auth/__init__.py
Executable file
478
shared/auth/access_control.py
Executable file
478
shared/auth/access_control.py
Executable file
@@ -0,0 +1,478 @@
|
||||
"""
|
||||
Subscription Tier and Role-Based Access Control Decorators
|
||||
Provides unified access control across all microservices
|
||||
"""
|
||||
|
||||
from enum import Enum
|
||||
from functools import wraps
|
||||
from typing import List, Callable, Dict, Any, Optional
|
||||
from fastapi import HTTPException, status, Request, Depends
|
||||
import structlog
|
||||
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class SubscriptionTier(Enum):
|
||||
"""
|
||||
Subscription tier hierarchy
|
||||
Matches project-wide subscription plans in tenant service
|
||||
"""
|
||||
STARTER = "starter"
|
||||
PROFESSIONAL = "professional"
|
||||
ENTERPRISE = "enterprise"
|
||||
|
||||
|
||||
class UserRole(Enum):
|
||||
"""
|
||||
User role hierarchy
|
||||
Matches project-wide role definitions in tenant member model
|
||||
"""
|
||||
VIEWER = "viewer"
|
||||
MEMBER = "member"
|
||||
ADMIN = "admin"
|
||||
OWNER = "owner"
|
||||
|
||||
|
||||
# Tier hierarchy for comparison (higher number = higher tier)
|
||||
TIER_HIERARCHY = {
|
||||
SubscriptionTier.STARTER: 1,
|
||||
SubscriptionTier.PROFESSIONAL: 2,
|
||||
SubscriptionTier.ENTERPRISE: 3,
|
||||
}
|
||||
|
||||
# Role hierarchy for comparison (higher number = more permissions)
|
||||
ROLE_HIERARCHY = {
|
||||
UserRole.VIEWER: 1,
|
||||
UserRole.MEMBER: 2,
|
||||
UserRole.ADMIN: 3,
|
||||
UserRole.OWNER: 4,
|
||||
}
|
||||
|
||||
|
||||
def check_tier_access(user_tier: str, required_tiers: List[str]) -> bool:
|
||||
"""
|
||||
Check if user's subscription tier meets the requirement
|
||||
|
||||
Args:
|
||||
user_tier: Current user's subscription tier
|
||||
required_tiers: List of allowed tiers
|
||||
|
||||
Returns:
|
||||
bool: True if access is allowed
|
||||
"""
|
||||
try:
|
||||
user_tier_enum = SubscriptionTier(user_tier.lower())
|
||||
user_tier_level = TIER_HIERARCHY.get(user_tier_enum, 0)
|
||||
|
||||
# Get minimum required tier level
|
||||
min_required_level = min(
|
||||
TIER_HIERARCHY.get(SubscriptionTier(tier.lower()), 999)
|
||||
for tier in required_tiers
|
||||
)
|
||||
|
||||
return user_tier_level >= min_required_level
|
||||
except (ValueError, KeyError) as e:
|
||||
logger.warning("Invalid tier comparison", user_tier=user_tier, required=required_tiers, error=str(e))
|
||||
return False
|
||||
|
||||
|
||||
def check_role_access(user_role: str, required_roles: List[str]) -> bool:
|
||||
"""
|
||||
Check if user's role meets the requirement
|
||||
|
||||
Args:
|
||||
user_role: Current user's role
|
||||
required_roles: List of allowed roles
|
||||
|
||||
Returns:
|
||||
bool: True if access is allowed
|
||||
"""
|
||||
try:
|
||||
user_role_enum = UserRole(user_role.lower())
|
||||
user_role_level = ROLE_HIERARCHY.get(user_role_enum, 0)
|
||||
|
||||
# Get minimum required role level
|
||||
min_required_level = min(
|
||||
ROLE_HIERARCHY.get(UserRole(role.lower()), 999)
|
||||
for role in required_roles
|
||||
)
|
||||
|
||||
return user_role_level >= min_required_level
|
||||
except (ValueError, KeyError) as e:
|
||||
logger.warning("Invalid role comparison", user_role=user_role, required=required_roles, error=str(e))
|
||||
return False
|
||||
|
||||
|
||||
def require_subscription_tier(allowed_tiers: List[str]):
|
||||
"""
|
||||
Decorator to enforce subscription tier access control
|
||||
|
||||
Usage:
|
||||
@router.get("/analytics/advanced")
|
||||
@require_subscription_tier(['professional', 'enterprise'])
|
||||
async def get_advanced_analytics(...):
|
||||
...
|
||||
|
||||
Args:
|
||||
allowed_tiers: List of subscription tiers allowed to access this endpoint
|
||||
"""
|
||||
def decorator(func: Callable) -> Callable:
|
||||
@wraps(func)
|
||||
async def wrapper(*args, **kwargs):
|
||||
# Get current user from kwargs (injected by get_current_user_dep)
|
||||
current_user = kwargs.get('current_user')
|
||||
|
||||
if not current_user:
|
||||
# Try to find in args
|
||||
for arg in args:
|
||||
if isinstance(arg, dict) and 'user_id' in arg:
|
||||
current_user = arg
|
||||
break
|
||||
|
||||
if not current_user:
|
||||
logger.error("Current user not found in request context")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Authentication required"
|
||||
)
|
||||
|
||||
# Get tenant's subscription tier from user context
|
||||
# The gateway should inject this information
|
||||
subscription_tier = current_user.get('subscription_tier')
|
||||
|
||||
if not subscription_tier:
|
||||
logger.warning("Subscription tier not found in user context", user_id=current_user.get('user_id'))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="Subscription information unavailable"
|
||||
)
|
||||
|
||||
# Check tier access
|
||||
has_access = check_tier_access(subscription_tier, allowed_tiers)
|
||||
|
||||
if not has_access:
|
||||
logger.warning(
|
||||
"Subscription tier access denied",
|
||||
user_tier=subscription_tier,
|
||||
required_tiers=allowed_tiers,
|
||||
user_id=current_user.get('user_id')
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_402_PAYMENT_REQUIRED,
|
||||
detail={
|
||||
"error": "subscription_tier_insufficient",
|
||||
"message": f"This feature requires a {' or '.join(allowed_tiers)} subscription plan",
|
||||
"current_plan": subscription_tier,
|
||||
"required_plans": allowed_tiers,
|
||||
"upgrade_url": "/app/settings/profile"
|
||||
}
|
||||
)
|
||||
|
||||
logger.debug("Subscription tier check passed", tier=subscription_tier, required=allowed_tiers)
|
||||
return await func(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
return decorator
|
||||
|
||||
|
||||
def require_user_role(allowed_roles: List[str]):
|
||||
"""
|
||||
Decorator to enforce role-based access control
|
||||
|
||||
Usage:
|
||||
@router.delete("/ingredients/{id}")
|
||||
@require_user_role(['admin', 'manager'])
|
||||
async def delete_ingredient(...):
|
||||
...
|
||||
|
||||
Args:
|
||||
allowed_roles: List of user roles allowed to access this endpoint
|
||||
"""
|
||||
def decorator(func: Callable) -> Callable:
|
||||
@wraps(func)
|
||||
async def wrapper(*args, **kwargs):
|
||||
# Get current user from kwargs
|
||||
current_user = kwargs.get('current_user')
|
||||
|
||||
if not current_user:
|
||||
# Try to find in args
|
||||
for arg in args:
|
||||
if isinstance(arg, dict) and 'user_id' in arg:
|
||||
current_user = arg
|
||||
break
|
||||
|
||||
if not current_user:
|
||||
logger.error("Current user not found in request context")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Authentication required"
|
||||
)
|
||||
|
||||
# Get user's role
|
||||
user_role = current_user.get('role', 'user')
|
||||
|
||||
# Check role access
|
||||
has_access = check_role_access(user_role, allowed_roles)
|
||||
|
||||
if not has_access:
|
||||
logger.warning(
|
||||
"Role-based access denied",
|
||||
user_role=user_role,
|
||||
required_roles=allowed_roles,
|
||||
user_id=current_user.get('user_id')
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail={
|
||||
"error": "insufficient_permissions",
|
||||
"message": f"This action requires {' or '.join(allowed_roles)} role",
|
||||
"current_role": user_role,
|
||||
"required_roles": allowed_roles
|
||||
}
|
||||
)
|
||||
|
||||
logger.debug("Role check passed", role=user_role, required=allowed_roles)
|
||||
return await func(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
return decorator
|
||||
|
||||
|
||||
def require_tier_and_role(
|
||||
allowed_tiers: List[str],
|
||||
allowed_roles: List[str]
|
||||
):
|
||||
"""
|
||||
Combined decorator for both tier and role enforcement
|
||||
|
||||
Usage:
|
||||
@router.post("/analytics/custom-report")
|
||||
@require_tier_and_role(['professional', 'enterprise'], ['admin', 'manager'])
|
||||
async def create_custom_report(...):
|
||||
...
|
||||
|
||||
Args:
|
||||
allowed_tiers: List of subscription tiers allowed
|
||||
allowed_roles: List of user roles allowed
|
||||
"""
|
||||
def decorator(func: Callable) -> Callable:
|
||||
@wraps(func)
|
||||
async def wrapper(*args, **kwargs):
|
||||
# Get current user from kwargs
|
||||
current_user = kwargs.get('current_user')
|
||||
|
||||
if not current_user:
|
||||
# Try to find in args
|
||||
for arg in args:
|
||||
if isinstance(arg, dict) and 'user_id' in arg:
|
||||
current_user = arg
|
||||
break
|
||||
|
||||
if not current_user:
|
||||
logger.error("Current user not found in request context")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Authentication required"
|
||||
)
|
||||
|
||||
# Check subscription tier
|
||||
subscription_tier = current_user.get('subscription_tier')
|
||||
if subscription_tier:
|
||||
tier_access = check_tier_access(subscription_tier, allowed_tiers)
|
||||
if not tier_access:
|
||||
logger.warning(
|
||||
"Combined access control: tier check failed",
|
||||
user_tier=subscription_tier,
|
||||
required_tiers=allowed_tiers
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_402_PAYMENT_REQUIRED,
|
||||
detail={
|
||||
"error": "subscription_tier_insufficient",
|
||||
"message": f"This feature requires a {' or '.join(allowed_tiers)} subscription plan",
|
||||
"current_plan": subscription_tier,
|
||||
"required_plans": allowed_tiers,
|
||||
"upgrade_url": "/app/settings/profile"
|
||||
}
|
||||
)
|
||||
|
||||
# Check user role
|
||||
user_role = current_user.get('role', 'member')
|
||||
role_access = check_role_access(user_role, allowed_roles)
|
||||
|
||||
if not role_access:
|
||||
logger.warning(
|
||||
"Combined access control: role check failed",
|
||||
user_role=user_role,
|
||||
required_roles=allowed_roles
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail={
|
||||
"error": "insufficient_permissions",
|
||||
"message": f"This action requires {' or '.join(allowed_roles)} role",
|
||||
"current_role": user_role,
|
||||
"required_roles": allowed_roles
|
||||
}
|
||||
)
|
||||
|
||||
logger.debug(
|
||||
"Combined access control passed",
|
||||
tier=subscription_tier,
|
||||
role=user_role,
|
||||
required_tiers=allowed_tiers,
|
||||
required_roles=allowed_roles
|
||||
)
|
||||
return await func(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
return decorator
|
||||
|
||||
|
||||
# Convenience decorators for common patterns
|
||||
analytics_tier_required = require_subscription_tier(['professional', 'enterprise'])
|
||||
enterprise_tier_required = require_subscription_tier(['enterprise'])
|
||||
admin_role_required = require_user_role(['admin', 'owner'])
|
||||
owner_role_required = require_user_role(['owner'])
|
||||
|
||||
|
||||
def service_only_access(func: Callable) -> Callable:
|
||||
"""
|
||||
Decorator to restrict endpoint access to service-to-service calls only
|
||||
|
||||
This decorator validates that:
|
||||
1. The request has a valid service token (type='service' in JWT)
|
||||
2. The token is from an authorized internal service
|
||||
|
||||
Usage:
|
||||
@router.delete("/tenant/{tenant_id}")
|
||||
@service_only_access
|
||||
async def delete_tenant_data(
|
||||
tenant_id: str,
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db = Depends(get_db)
|
||||
):
|
||||
# Service-only logic here
|
||||
|
||||
The decorator expects current_user to be injected via get_current_user_dep
|
||||
dependency, which should already contain the user/service context from JWT.
|
||||
"""
|
||||
|
||||
@wraps(func)
|
||||
async def wrapper(*args, **kwargs):
|
||||
# Get current user from kwargs (injected by get_current_user_dep)
|
||||
current_user = kwargs.get('current_user')
|
||||
|
||||
if not current_user:
|
||||
# Try to find in args
|
||||
for arg in args:
|
||||
if isinstance(arg, dict) and 'user_id' in arg:
|
||||
current_user = arg
|
||||
break
|
||||
|
||||
if not current_user:
|
||||
logger.error("Service-only access: current user not found in request context")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Authentication required"
|
||||
)
|
||||
|
||||
# Check if this is a service token
|
||||
user_type = current_user.get('type', '')
|
||||
is_service = current_user.get('is_service', False)
|
||||
|
||||
if user_type != 'service' and not is_service:
|
||||
logger.warning(
|
||||
"Service-only access denied: not a service token",
|
||||
user_id=current_user.get('user_id'),
|
||||
user_type=user_type,
|
||||
is_service=is_service
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="This endpoint is only accessible to internal services"
|
||||
)
|
||||
|
||||
# Log successful service access
|
||||
service_name = current_user.get('service', current_user.get('user_id', 'unknown'))
|
||||
logger.info(
|
||||
"Service-only access granted",
|
||||
service=service_name,
|
||||
endpoint=func.__name__
|
||||
)
|
||||
|
||||
return await func(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
def require_verified_subscription_tier(
|
||||
allowed_tiers: List[str],
|
||||
verify_in_database: bool = False
|
||||
):
|
||||
"""
|
||||
Subscription tier enforcement with optional database verification.
|
||||
|
||||
Args:
|
||||
allowed_tiers: List of allowed subscription tiers
|
||||
verify_in_database: If True, verify against database (for critical operations)
|
||||
"""
|
||||
def decorator(func):
|
||||
@wraps(func)
|
||||
async def wrapper(*args, **kwargs):
|
||||
request = kwargs.get('request') or args[0]
|
||||
|
||||
# Get tier from gateway-injected header (from verified JWT)
|
||||
header_tier = request.headers.get("x-subscription-tier", "starter").lower()
|
||||
|
||||
if header_tier not in [t.lower() for t in allowed_tiers]:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_402_PAYMENT_REQUIRED,
|
||||
detail={
|
||||
"error": "subscription_required",
|
||||
"message": f"This feature requires {', '.join(allowed_tiers)} subscription",
|
||||
"current_tier": header_tier,
|
||||
"required_tiers": allowed_tiers
|
||||
}
|
||||
)
|
||||
|
||||
# For critical operations, verify against database
|
||||
if verify_in_database:
|
||||
tenant_id = request.headers.get("x-tenant-id")
|
||||
if tenant_id:
|
||||
db_tier = await _verify_subscription_in_database(tenant_id)
|
||||
if db_tier.lower() != header_tier:
|
||||
logger.error(
|
||||
"Subscription tier mismatch detected!",
|
||||
header_tier=header_tier,
|
||||
db_tier=db_tier,
|
||||
tenant_id=tenant_id,
|
||||
user_id=request.headers.get("x-user-id")
|
||||
)
|
||||
# Use database tier (authoritative)
|
||||
if db_tier.lower() not in [t.lower() for t in allowed_tiers]:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_402_PAYMENT_REQUIRED,
|
||||
detail={
|
||||
"error": "subscription_verification_failed",
|
||||
"message": "Subscription tier verification failed"
|
||||
}
|
||||
)
|
||||
|
||||
return await func(*args, **kwargs)
|
||||
return wrapper
|
||||
return decorator
|
||||
|
||||
|
||||
async def _verify_subscription_in_database(tenant_id: str) -> str:
|
||||
"""
|
||||
Direct database verification of subscription tier.
|
||||
Used for critical operations as defense-in-depth.
|
||||
"""
|
||||
from shared.clients.subscription_client import SubscriptionClient
|
||||
|
||||
client = SubscriptionClient()
|
||||
subscription = await client.get_subscription(tenant_id)
|
||||
return subscription.get("plan", "starter")
|
||||
704
shared/auth/decorators.py
Executable file
704
shared/auth/decorators.py
Executable file
@@ -0,0 +1,704 @@
|
||||
# ================================================================
|
||||
# shared/auth/decorators.py - ENHANCED WITH ADMIN ROLE DECORATOR
|
||||
# ================================================================
|
||||
"""
|
||||
Enhanced authentication decorators for microservices including admin role validation.
|
||||
Designed to work with gateway authentication middleware and provide centralized
|
||||
role-based access control across all services.
|
||||
"""
|
||||
|
||||
from functools import wraps
|
||||
from fastapi import HTTPException, status, Request, Depends
|
||||
from fastapi.security import HTTPBearer
|
||||
from typing import Callable, Optional, Dict, Any, List
|
||||
import structlog
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
# Bearer token scheme for services that need it
|
||||
security = HTTPBearer(auto_error=False)
|
||||
|
||||
def require_authentication(func: Callable) -> Callable:
|
||||
"""
|
||||
Decorator to require authentication - trusts gateway validation
|
||||
Services behind the gateway should use this decorator
|
||||
"""
|
||||
|
||||
@wraps(func)
|
||||
async def wrapper(*args, **kwargs):
|
||||
# Find request object in arguments
|
||||
request = None
|
||||
for arg in args:
|
||||
if isinstance(arg, Request):
|
||||
request = arg
|
||||
break
|
||||
|
||||
if not request:
|
||||
request = kwargs.get('request')
|
||||
|
||||
if not request:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Request object not found"
|
||||
)
|
||||
|
||||
# Check if user context exists (set by gateway)
|
||||
if not hasattr(request.state, 'user') or not request.state.user:
|
||||
# Check headers as fallback (for direct service calls in dev)
|
||||
user_info = extract_user_from_headers(request)
|
||||
if not user_info:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Authentication required"
|
||||
)
|
||||
request.state.user = user_info
|
||||
|
||||
return await func(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
def require_tenant_access(func: Callable) -> Callable:
|
||||
"""Decorator to require tenant access"""
|
||||
|
||||
@wraps(func)
|
||||
async def wrapper(*args, **kwargs):
|
||||
request = None
|
||||
for arg in args:
|
||||
if isinstance(arg, Request):
|
||||
request = arg
|
||||
break
|
||||
|
||||
if not request:
|
||||
request = kwargs.get('request')
|
||||
|
||||
if not request or not hasattr(request.state, 'tenant_id'):
|
||||
# Try to extract from headers
|
||||
tenant_id = extract_tenant_from_headers(request)
|
||||
if not tenant_id:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="Tenant access required"
|
||||
)
|
||||
request.state.tenant_id = tenant_id
|
||||
|
||||
return await func(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
def require_role(role: str):
|
||||
"""Decorator to require specific role"""
|
||||
|
||||
def decorator(func: Callable) -> Callable:
|
||||
@wraps(func)
|
||||
async def wrapper(*args, **kwargs):
|
||||
request = None
|
||||
for arg in args:
|
||||
if isinstance(arg, Request):
|
||||
request = arg
|
||||
break
|
||||
|
||||
if not request:
|
||||
request = kwargs.get('request')
|
||||
|
||||
user = get_current_user(request)
|
||||
user_role = user.get('role', '').lower()
|
||||
|
||||
if user_role != role.lower() and user_role != 'admin':
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail=f"{role} role required"
|
||||
)
|
||||
|
||||
return await func(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
def require_admin_role(func: Callable) -> Callable:
|
||||
"""
|
||||
Decorator to require admin role - simplified version for FastAPI dependencies
|
||||
|
||||
This decorator ensures only users with 'admin' role can access the endpoint.
|
||||
Can be used as a FastAPI dependency or function decorator.
|
||||
|
||||
Usage as dependency:
|
||||
@router.delete("/admin/users/{user_id}")
|
||||
async def delete_user(
|
||||
user_id: str,
|
||||
current_user = Depends(get_current_user_dep),
|
||||
_admin_check = Depends(require_admin_role),
|
||||
):
|
||||
# Admin-only logic here
|
||||
|
||||
Usage as decorator:
|
||||
@require_admin_role
|
||||
@router.delete("/admin/users/{user_id}")
|
||||
async def delete_user(...):
|
||||
# Admin-only logic here
|
||||
"""
|
||||
|
||||
@wraps(func)
|
||||
async def wrapper(*args, **kwargs):
|
||||
# Find request object in arguments
|
||||
request = None
|
||||
current_user = None
|
||||
|
||||
# Extract request and current_user from arguments
|
||||
for arg in args:
|
||||
if isinstance(arg, Request):
|
||||
request = arg
|
||||
elif isinstance(arg, dict) and 'user_id' in arg:
|
||||
current_user = arg
|
||||
|
||||
# Check kwargs for request and current_user
|
||||
if not request:
|
||||
request = kwargs.get('request')
|
||||
if not current_user:
|
||||
current_user = kwargs.get('current_user')
|
||||
|
||||
# If we still don't have current_user, try to get it from request
|
||||
if not current_user and request:
|
||||
current_user = get_current_user(request)
|
||||
|
||||
if not current_user:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Authentication required"
|
||||
)
|
||||
|
||||
# Check if user has admin role
|
||||
user_role = current_user.get('role', '').lower()
|
||||
|
||||
if user_role != 'admin':
|
||||
logger.warning("Non-admin user attempted admin operation",
|
||||
user_id=current_user.get('user_id'),
|
||||
role=user_role)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="Admin role required"
|
||||
)
|
||||
|
||||
logger.info("Admin operation authorized",
|
||||
user_id=current_user.get('user_id'),
|
||||
endpoint=func.__name__)
|
||||
|
||||
return await func(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
def require_roles(allowed_roles: List[str]):
|
||||
"""
|
||||
Decorator to require one of multiple roles
|
||||
|
||||
Args:
|
||||
allowed_roles: List of roles that are allowed to access the endpoint
|
||||
|
||||
Usage:
|
||||
@require_roles(['admin', 'manager'])
|
||||
@router.post("/sensitive-operation")
|
||||
async def sensitive_operation(...):
|
||||
# Only admins and managers can access
|
||||
"""
|
||||
|
||||
def decorator(func: Callable) -> Callable:
|
||||
@wraps(func)
|
||||
async def wrapper(*args, **kwargs):
|
||||
request = None
|
||||
current_user = None
|
||||
|
||||
# Extract request and current_user from arguments
|
||||
for arg in args:
|
||||
if isinstance(arg, Request):
|
||||
request = arg
|
||||
elif isinstance(arg, dict) and 'user_id' in arg:
|
||||
current_user = arg
|
||||
|
||||
# Check kwargs
|
||||
if not request:
|
||||
request = kwargs.get('request')
|
||||
if not current_user:
|
||||
current_user = kwargs.get('current_user')
|
||||
|
||||
# Get user from request if not provided
|
||||
if not current_user and request:
|
||||
current_user = get_current_user(request)
|
||||
|
||||
if not current_user:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Authentication required"
|
||||
)
|
||||
|
||||
# Check if user has one of the allowed roles
|
||||
user_role = current_user.get('role', '').lower()
|
||||
allowed_roles_lower = [role.lower() for role in allowed_roles]
|
||||
|
||||
if user_role not in allowed_roles_lower:
|
||||
logger.warning("Unauthorized role attempted restricted operation",
|
||||
user_id=current_user.get('user_id'),
|
||||
user_role=user_role,
|
||||
allowed_roles=allowed_roles)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail=f"One of these roles required: {', '.join(allowed_roles)}"
|
||||
)
|
||||
|
||||
logger.info("Role-based operation authorized",
|
||||
user_id=current_user.get('user_id'),
|
||||
user_role=user_role,
|
||||
endpoint=func.__name__)
|
||||
|
||||
return await func(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
def require_tenant_admin(func: Callable) -> Callable:
|
||||
"""
|
||||
Decorator to require admin role within a specific tenant context
|
||||
|
||||
This checks that the user is an admin AND has access to the tenant
|
||||
being operated on. Useful for tenant-scoped admin operations.
|
||||
"""
|
||||
|
||||
@wraps(func)
|
||||
async def wrapper(*args, **kwargs):
|
||||
request = None
|
||||
current_user = None
|
||||
|
||||
# Extract request and current_user from arguments
|
||||
for arg in args:
|
||||
if isinstance(arg, Request):
|
||||
request = arg
|
||||
elif isinstance(arg, dict) and 'user_id' in arg:
|
||||
current_user = arg
|
||||
|
||||
if not request:
|
||||
request = kwargs.get('request')
|
||||
if not current_user:
|
||||
current_user = kwargs.get('current_user')
|
||||
|
||||
if not current_user and request:
|
||||
current_user = get_current_user(request)
|
||||
|
||||
if not current_user:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Authentication required"
|
||||
)
|
||||
|
||||
# Check admin role first
|
||||
user_role = current_user.get('role', '').lower()
|
||||
if user_role != 'admin':
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="Admin role required"
|
||||
)
|
||||
|
||||
# Check tenant access
|
||||
tenant_id = get_current_tenant_id(request) if request else None
|
||||
if not tenant_id:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="Tenant context required"
|
||||
)
|
||||
|
||||
# Additional tenant admin validation could go here
|
||||
# For now, we trust that admin users have access to operate on any tenant
|
||||
|
||||
logger.info("Tenant admin operation authorized",
|
||||
user_id=current_user.get('user_id'),
|
||||
tenant_id=tenant_id,
|
||||
endpoint=func.__name__)
|
||||
|
||||
return await func(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
def get_current_user(request: Request) -> Dict[str, Any]:
|
||||
"""Get current user from request state or headers"""
|
||||
if hasattr(request.state, 'user') and request.state.user:
|
||||
return request.state.user
|
||||
|
||||
# Fallback to headers (for dev/testing)
|
||||
user_info = extract_user_from_headers(request)
|
||||
if not user_info:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="User not authenticated"
|
||||
)
|
||||
|
||||
return user_info
|
||||
|
||||
def get_current_tenant_id(request: Request) -> Optional[str]:
|
||||
"""Get current tenant ID from request state or headers"""
|
||||
if hasattr(request.state, 'tenant_id'):
|
||||
return request.state.tenant_id
|
||||
|
||||
# Fallback to headers
|
||||
return extract_tenant_from_headers(request)
|
||||
|
||||
def extract_user_from_headers(request: Request) -> Optional[Dict[str, Any]]:
|
||||
"""Extract user information from forwarded headers (gateway sets these)"""
|
||||
user_id = request.headers.get("x-user-id")
|
||||
logger.info(f"🔍 Extracting user from headers",
|
||||
user_id=user_id,
|
||||
has_user_id=bool(user_id),
|
||||
path=request.url.path)
|
||||
|
||||
if not user_id:
|
||||
logger.warning(f"❌ No x-user-id header found", path=request.url.path)
|
||||
return None
|
||||
|
||||
user_context = {
|
||||
"user_id": user_id,
|
||||
"email": request.headers.get("x-user-email", ""),
|
||||
"role": request.headers.get("x-user-role", "user"),
|
||||
"tenant_id": request.headers.get("x-tenant-id"),
|
||||
"permissions": request.headers.get("X-User-Permissions", "").split(",") if request.headers.get("X-User-Permissions") else [],
|
||||
"full_name": request.headers.get("x-user-full-name", ""),
|
||||
"subscription_tier": request.headers.get("x-subscription-tier", ""),
|
||||
"is_demo": request.headers.get("x-is-demo", "").lower() == "true",
|
||||
"demo_session_id": request.headers.get("x-demo-session-id", ""),
|
||||
"demo_account_type": request.headers.get("x-demo-account-type", "")
|
||||
}
|
||||
|
||||
logger.info(f"✅ User context extracted from headers",
|
||||
user_context=user_context,
|
||||
path=request.url.path)
|
||||
|
||||
# ✅ ADD THIS: Handle service tokens properly
|
||||
user_type = request.headers.get("x-user-type", "")
|
||||
service_name = request.headers.get("x-service-name", "")
|
||||
|
||||
if user_type == "service" or service_name:
|
||||
user_context.update({
|
||||
"type": "service",
|
||||
"service": service_name,
|
||||
"role": "admin", # Service tokens always have admin role
|
||||
"is_service": True
|
||||
})
|
||||
|
||||
return user_context
|
||||
|
||||
def extract_tenant_from_headers(request: Request) -> Optional[str]:
|
||||
"""Extract tenant ID from headers"""
|
||||
return request.headers.get("x-tenant-id")
|
||||
|
||||
def extract_user_from_jwt(auth_header: str) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Extract user information from JWT token
|
||||
This is a fallback for when gateway doesn't inject x-user-* headers
|
||||
"""
|
||||
try:
|
||||
from jose import jwt
|
||||
from shared.config.base import is_internal_service
|
||||
|
||||
# Remove "Bearer " prefix
|
||||
token = auth_header.replace("Bearer ", "").strip()
|
||||
|
||||
# Decode without verification (we trust tokens from gateway)
|
||||
# In production, you'd verify with the secret key
|
||||
payload = jwt.decode(token, key="dummy", options={"verify_signature": False})
|
||||
|
||||
logger.debug("JWT payload decoded", payload_keys=list(payload.keys()))
|
||||
|
||||
# Extract user information from JWT payload
|
||||
user_id = payload.get("sub") or payload.get("user_id") or payload.get("service")
|
||||
|
||||
if not user_id:
|
||||
logger.warning("No user_id found in JWT payload", payload=payload)
|
||||
return None
|
||||
|
||||
# Check if this is a service token
|
||||
token_type = payload.get("type", "")
|
||||
service_name = payload.get("service", "")
|
||||
|
||||
if token_type == "service" or is_internal_service(user_id) or is_internal_service(service_name):
|
||||
# This is a service token
|
||||
service_identifier = service_name or user_id
|
||||
user_context = {
|
||||
"user_id": service_identifier,
|
||||
"type": "service",
|
||||
"service": service_identifier,
|
||||
"role": "admin", # Services get admin privileges
|
||||
"is_service": True,
|
||||
"permissions": ["read", "write", "admin"],
|
||||
"email": f"{service_identifier}@internal.service",
|
||||
"full_name": f"{service_identifier.replace('-', ' ').title()}"
|
||||
}
|
||||
logger.info("Service authenticated via JWT", service=service_identifier)
|
||||
else:
|
||||
# This is a user token
|
||||
user_context = {
|
||||
"user_id": user_id,
|
||||
"type": "user",
|
||||
"email": payload.get("email", ""),
|
||||
"role": payload.get("role", "user"),
|
||||
"tenant_id": payload.get("tenant_id"),
|
||||
"permissions": payload.get("permissions", []),
|
||||
"full_name": payload.get("full_name", ""),
|
||||
"subscription_tier": payload.get("subscription_tier", ""),
|
||||
"is_service": False
|
||||
}
|
||||
logger.info("User authenticated via JWT", user_id=user_id)
|
||||
|
||||
return user_context
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to extract user from JWT", error=str(e), error_type=type(e).__name__)
|
||||
return None
|
||||
|
||||
# ================================================================
|
||||
# FASTAPI DEPENDENCY FUNCTIONS
|
||||
# ================================================================
|
||||
|
||||
async def get_current_user_dep(request: Request) -> Dict[str, Any]:
|
||||
"""FastAPI dependency to get current user - ENHANCED with JWT fallback for services"""
|
||||
try:
|
||||
# Enhanced logging for debugging
|
||||
logger.info(
|
||||
"🔐 Authentication attempt",
|
||||
path=request.url.path,
|
||||
method=request.method,
|
||||
has_auth_header=bool(request.headers.get("authorization")),
|
||||
has_x_user_id=bool(request.headers.get("x-user-id")),
|
||||
has_x_is_demo=bool(request.headers.get("x-is-demo")),
|
||||
has_x_demo_session_id=bool(request.headers.get("x-demo-session-id")),
|
||||
x_user_id=request.headers.get("x-user-id", "MISSING"),
|
||||
x_is_demo=request.headers.get("x-is-demo", "MISSING"),
|
||||
x_demo_session_id=request.headers.get("x-demo-session-id", "MISSING"),
|
||||
client_ip=request.client.host if request.client else "unknown"
|
||||
)
|
||||
|
||||
# Try to get user from headers first (preferred method)
|
||||
user = None
|
||||
try:
|
||||
user = get_current_user(request)
|
||||
except HTTPException:
|
||||
# If headers are missing, try JWT token as fallback
|
||||
auth_header = request.headers.get("authorization", "")
|
||||
if auth_header.startswith("Bearer "):
|
||||
user = extract_user_from_jwt(auth_header)
|
||||
if user:
|
||||
logger.info(
|
||||
"User authenticated via JWT fallback",
|
||||
user_id=user.get("user_id"),
|
||||
user_type=user.get("type", "user"),
|
||||
is_service=user.get("type") == "service",
|
||||
path=request.url.path
|
||||
)
|
||||
|
||||
# If still no user, raise original exception
|
||||
if not user:
|
||||
raise
|
||||
|
||||
logger.info(
|
||||
"User authenticated successfully",
|
||||
user_id=user.get("user_id"),
|
||||
user_type=user.get("type", "user"),
|
||||
is_service=user.get("type") == "service",
|
||||
role=user.get("role"),
|
||||
path=request.url.path
|
||||
)
|
||||
|
||||
return user
|
||||
|
||||
except HTTPException as e:
|
||||
logger.warning(
|
||||
"Authentication failed - 401",
|
||||
path=request.url.path,
|
||||
status_code=e.status_code,
|
||||
detail=e.detail,
|
||||
has_x_user_id=bool(request.headers.get("x-user-id")),
|
||||
has_auth_header=bool(request.headers.get("authorization")),
|
||||
x_user_type=request.headers.get("x-user-type", "none"),
|
||||
x_service_name=request.headers.get("x-service-name", "none"),
|
||||
client_ip=request.client.host if request.client else "unknown"
|
||||
)
|
||||
raise
|
||||
|
||||
async def get_current_tenant_id_dep(request: Request) -> Optional[str]:
|
||||
"""FastAPI dependency to get current tenant ID"""
|
||||
return get_current_tenant_id(request)
|
||||
|
||||
async def require_admin_role_dep(
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep)
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
FastAPI dependency that requires admin role
|
||||
|
||||
Usage:
|
||||
@router.delete("/admin/users/{user_id}")
|
||||
async def delete_user(
|
||||
user_id: str,
|
||||
admin_user: Dict[str, Any] = Depends(require_admin_role_dep)
|
||||
):
|
||||
# admin_user is guaranteed to have admin role
|
||||
"""
|
||||
|
||||
user_role = current_user.get('role', '').lower()
|
||||
|
||||
if user_role != 'admin':
|
||||
logger.warning("Non-admin user attempted admin operation",
|
||||
user_id=current_user.get('user_id'),
|
||||
role=user_role)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="Admin role required"
|
||||
)
|
||||
|
||||
logger.info("Admin operation authorized via dependency",
|
||||
user_id=current_user.get('user_id'))
|
||||
|
||||
return current_user
|
||||
|
||||
async def require_roles_dep(allowed_roles: List[str]):
|
||||
"""
|
||||
FastAPI dependency factory that requires one of multiple roles
|
||||
|
||||
Usage:
|
||||
require_manager_or_admin = require_roles_dep(['admin', 'manager'])
|
||||
|
||||
@router.post("/sensitive-operation")
|
||||
async def sensitive_operation(
|
||||
user: Dict[str, Any] = Depends(require_manager_or_admin)
|
||||
):
|
||||
# Only admins and managers can access
|
||||
"""
|
||||
|
||||
async def check_roles(
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep)
|
||||
) -> Dict[str, Any]:
|
||||
user_role = current_user.get('role', '').lower()
|
||||
allowed_roles_lower = [role.lower() for role in allowed_roles]
|
||||
|
||||
if user_role not in allowed_roles_lower:
|
||||
logger.warning("Unauthorized role attempted restricted operation",
|
||||
user_id=current_user.get('user_id'),
|
||||
user_role=user_role,
|
||||
allowed_roles=allowed_roles)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail=f"One of these roles required: {', '.join(allowed_roles)}"
|
||||
)
|
||||
|
||||
logger.info("Role-based operation authorized via dependency",
|
||||
user_id=current_user.get('user_id'),
|
||||
user_role=user_role)
|
||||
|
||||
return current_user
|
||||
|
||||
return check_roles
|
||||
|
||||
# ================================================================
|
||||
# UTILITY FUNCTIONS FOR ROLE CHECKING
|
||||
# ================================================================
|
||||
|
||||
def is_admin_user(user: Dict[str, Any]) -> bool:
|
||||
"""Check if user has admin role"""
|
||||
return user.get('role', '').lower() == 'admin'
|
||||
|
||||
def is_user_in_roles(user: Dict[str, Any], allowed_roles: List[str]) -> bool:
|
||||
"""Check if user has one of the allowed roles"""
|
||||
user_role = user.get('role', '').lower()
|
||||
allowed_roles_lower = [role.lower() for role in allowed_roles]
|
||||
return user_role in allowed_roles_lower
|
||||
|
||||
def get_user_permissions(user: Dict[str, Any]) -> List[str]:
|
||||
"""Get user permissions list"""
|
||||
return user.get('permissions', [])
|
||||
|
||||
def has_permission(user: Dict[str, Any], permission: str) -> bool:
|
||||
"""Check if user has specific permission"""
|
||||
permissions = get_user_permissions(user)
|
||||
return permission in permissions
|
||||
|
||||
# ================================================================
|
||||
# ADVANCED ROLE DECORATORS
|
||||
# ================================================================
|
||||
|
||||
def require_permission(permission: str):
|
||||
"""
|
||||
Decorator to require specific permission
|
||||
|
||||
Usage:
|
||||
@require_permission('delete_users')
|
||||
@router.delete("/users/{user_id}")
|
||||
async def delete_user(...):
|
||||
# Only users with 'delete_users' permission can access
|
||||
"""
|
||||
|
||||
def decorator(func: Callable) -> Callable:
|
||||
@wraps(func)
|
||||
async def wrapper(*args, **kwargs):
|
||||
current_user = None
|
||||
|
||||
# Extract current_user from arguments
|
||||
for arg in args:
|
||||
if isinstance(arg, dict) and 'user_id' in arg:
|
||||
current_user = arg
|
||||
break
|
||||
|
||||
if not current_user:
|
||||
current_user = kwargs.get('current_user')
|
||||
|
||||
if not current_user:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Authentication required"
|
||||
)
|
||||
|
||||
# Check permission
|
||||
if not has_permission(current_user, permission):
|
||||
# Admins bypass permission checks
|
||||
if not is_admin_user(current_user):
|
||||
logger.warning("User lacks required permission",
|
||||
user_id=current_user.get('user_id'),
|
||||
required_permission=permission,
|
||||
user_permissions=get_user_permissions(current_user))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail=f"Permission '{permission}' required"
|
||||
)
|
||||
|
||||
logger.info("Permission-based operation authorized",
|
||||
user_id=current_user.get('user_id'),
|
||||
permission=permission)
|
||||
|
||||
return await func(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
# Export all decorators and functions
|
||||
__all__ = [
|
||||
# Main decorators
|
||||
'require_authentication',
|
||||
'require_tenant_access',
|
||||
'require_role',
|
||||
'require_admin_role',
|
||||
'require_roles',
|
||||
'require_tenant_admin',
|
||||
'require_permission',
|
||||
|
||||
# FastAPI dependencies
|
||||
'get_current_user_dep',
|
||||
'get_current_tenant_id_dep',
|
||||
'require_admin_role_dep',
|
||||
'require_roles_dep',
|
||||
|
||||
# Utility functions
|
||||
'get_current_user',
|
||||
'get_current_tenant_id',
|
||||
'extract_user_from_headers',
|
||||
'extract_user_from_jwt',
|
||||
'extract_tenant_from_headers',
|
||||
'is_admin_user',
|
||||
'is_user_in_roles',
|
||||
'get_user_permissions',
|
||||
'has_permission'
|
||||
]
|
||||
292
shared/auth/jwt_handler.py
Executable file
292
shared/auth/jwt_handler.py
Executable file
@@ -0,0 +1,292 @@
|
||||
# shared/auth/jwt_handler.py
|
||||
"""
|
||||
Enhanced JWT Handler with proper token structure and validation
|
||||
FIXED VERSION - Consistent token format between all services
|
||||
"""
|
||||
|
||||
from jose import jwt, JWTError
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Optional, Dict, Any
|
||||
import structlog
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
class JWTHandler:
|
||||
"""Enhanced JWT token handling with consistent format"""
|
||||
|
||||
def __init__(self, secret_key: str, algorithm: str = "HS256"):
|
||||
self.secret_key = secret_key
|
||||
self.algorithm = algorithm
|
||||
|
||||
def create_access_token_from_payload(self, payload: Dict[str, Any]) -> str:
|
||||
"""
|
||||
Create JWT ACCESS token from complete payload
|
||||
✅ FIXED: Only creates access tokens with access token structure
|
||||
"""
|
||||
try:
|
||||
# Ensure this is marked as an access token
|
||||
payload["type"] = "access"
|
||||
|
||||
encoded_jwt = jwt.encode(payload, self.secret_key, algorithm=self.algorithm)
|
||||
logger.debug(f"Created access token with payload keys: {list(payload.keys())}")
|
||||
return encoded_jwt
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Access token creation failed: {e}")
|
||||
raise ValueError(f"Failed to encode access token: {str(e)}")
|
||||
|
||||
def create_refresh_token_from_payload(self, payload: Dict[str, Any]) -> str:
|
||||
"""
|
||||
Create JWT REFRESH token from complete payload
|
||||
✅ FIXED: Only creates refresh tokens with refresh token structure
|
||||
"""
|
||||
try:
|
||||
# Ensure this is marked as a refresh token
|
||||
payload["type"] = "refresh"
|
||||
|
||||
encoded_jwt = jwt.encode(payload, self.secret_key, algorithm=self.algorithm)
|
||||
logger.debug(f"Created refresh token with payload keys: {list(payload.keys())}")
|
||||
return encoded_jwt
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Refresh token creation failed: {e}")
|
||||
raise ValueError(f"Failed to encode refresh token: {str(e)}")
|
||||
|
||||
def create_access_token(self, user_data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str:
|
||||
"""
|
||||
Create JWT access token with STANDARD structure (legacy method)
|
||||
✅ FIXED: Consistent payload format for access tokens
|
||||
"""
|
||||
to_encode = {
|
||||
"sub": user_data["user_id"],
|
||||
"user_id": user_data["user_id"],
|
||||
"email": user_data["email"],
|
||||
"type": "access"
|
||||
}
|
||||
|
||||
# Add optional fields if present
|
||||
if "full_name" in user_data:
|
||||
to_encode["full_name"] = user_data["full_name"]
|
||||
if "is_verified" in user_data:
|
||||
to_encode["is_verified"] = user_data["is_verified"]
|
||||
if "is_active" in user_data:
|
||||
to_encode["is_active"] = user_data["is_active"]
|
||||
|
||||
# Set expiration
|
||||
if expires_delta:
|
||||
expire = datetime.now(timezone.utc) + expires_delta
|
||||
else:
|
||||
expire = datetime.now(timezone.utc) + timedelta(minutes=30)
|
||||
|
||||
to_encode.update({
|
||||
"exp": expire,
|
||||
"iat": datetime.now(timezone.utc),
|
||||
"iss": "bakery-auth"
|
||||
})
|
||||
|
||||
encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm)
|
||||
logger.debug(f"Created access token for user {user_data['email']}")
|
||||
return encoded_jwt
|
||||
|
||||
def create_refresh_token(self, user_data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str:
|
||||
"""
|
||||
Create JWT refresh token with MINIMAL payload (legacy method)
|
||||
✅ FIXED: Consistent refresh token structure, different from access
|
||||
"""
|
||||
to_encode = {
|
||||
"sub": user_data["user_id"],
|
||||
"user_id": user_data["user_id"],
|
||||
"type": "refresh"
|
||||
}
|
||||
|
||||
# Add unique identifier to prevent duplicates
|
||||
if "jti" in user_data:
|
||||
to_encode["jti"] = user_data["jti"]
|
||||
else:
|
||||
import uuid
|
||||
to_encode["jti"] = str(uuid.uuid4())
|
||||
|
||||
# Include email only if available (optional for refresh tokens)
|
||||
if "email" in user_data and user_data["email"]:
|
||||
to_encode["email"] = user_data["email"]
|
||||
|
||||
# Set expiration
|
||||
if expires_delta:
|
||||
expire = datetime.now(timezone.utc) + expires_delta
|
||||
else:
|
||||
expire = datetime.now(timezone.utc) + timedelta(days=30)
|
||||
|
||||
to_encode.update({
|
||||
"exp": expire,
|
||||
"iat": datetime.now(timezone.utc),
|
||||
"iss": "bakery-auth"
|
||||
})
|
||||
|
||||
encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm)
|
||||
logger.debug(f"Created refresh token for user {user_data['user_id']}")
|
||||
return encoded_jwt
|
||||
|
||||
def create_service_token(
|
||||
self,
|
||||
service_name: str,
|
||||
expires_delta: Optional[timedelta] = None,
|
||||
tenant_id: Optional[str] = None
|
||||
) -> str:
|
||||
"""
|
||||
Create JWT SERVICE token for inter-service communication
|
||||
✅ UNIFIED: Single source of truth for all service token creation
|
||||
✅ ENHANCED: Supports tenant context for tenant-scoped operations
|
||||
|
||||
Args:
|
||||
service_name: Name of the service (e.g., 'auth-service', 'demo-session')
|
||||
expires_delta: Optional expiration time (defaults to 1 hour for inter-service calls)
|
||||
tenant_id: Optional tenant ID for tenant-scoped service operations
|
||||
|
||||
Returns:
|
||||
Encoded JWT service token
|
||||
"""
|
||||
to_encode = {
|
||||
"sub": service_name,
|
||||
"user_id": f"{service_name}-service",
|
||||
"email": f"{service_name}-service@internal",
|
||||
"service": service_name,
|
||||
"type": "service",
|
||||
"role": "admin", # Services have admin privileges
|
||||
"is_service": True,
|
||||
"full_name": f"{service_name.title()} Service",
|
||||
"is_verified": True,
|
||||
"is_active": True
|
||||
}
|
||||
|
||||
# Include tenant context when provided for tenant-scoped operations
|
||||
if tenant_id:
|
||||
to_encode["tenant_id"] = tenant_id
|
||||
|
||||
# Set expiration (default to 1 hour for inter-service calls)
|
||||
if expires_delta:
|
||||
expire = datetime.now(timezone.utc) + expires_delta
|
||||
else:
|
||||
expire = datetime.now(timezone.utc) + timedelta(hours=1) # 1 hour default
|
||||
|
||||
to_encode.update({
|
||||
"exp": expire,
|
||||
"iat": datetime.now(timezone.utc),
|
||||
"iss": "bakery-auth"
|
||||
})
|
||||
|
||||
encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm)
|
||||
logger.debug(f"Created service token for service {service_name}", tenant_id=tenant_id)
|
||||
return encoded_jwt
|
||||
|
||||
def verify_token(self, token: str) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Verify and decode JWT token
|
||||
"""
|
||||
try:
|
||||
payload = jwt.decode(token, self.secret_key, algorithms=[self.algorithm])
|
||||
|
||||
# Check if token is expired
|
||||
exp_timestamp = payload.get("exp")
|
||||
if exp_timestamp:
|
||||
exp_datetime = datetime.fromtimestamp(exp_timestamp, tz=timezone.utc)
|
||||
if datetime.now(timezone.utc) > exp_datetime:
|
||||
logger.debug("Token is expired")
|
||||
return None
|
||||
|
||||
logger.debug(f"Token verified successfully, type: {payload.get('type', 'unknown')}")
|
||||
return payload
|
||||
|
||||
except JWTError as e:
|
||||
logger.warning(f"JWT verification failed: {e}")
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"Token verification error: {e}")
|
||||
return None
|
||||
|
||||
def decode_token_no_verify(self, token: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Decode JWT token without verification (for inspection purposes)
|
||||
"""
|
||||
try:
|
||||
# Decode without verification - need to provide key but disable verification
|
||||
payload = jwt.decode(token, self.secret_key, algorithms=[self.algorithm], options={"verify_signature": False})
|
||||
return payload
|
||||
except Exception as e:
|
||||
logger.error(f"Token decoding failed: {e}")
|
||||
raise ValueError("Invalid token format")
|
||||
|
||||
def get_token_type(self, token: str) -> Optional[str]:
|
||||
"""
|
||||
Get the type of token (access or refresh) without full verification
|
||||
"""
|
||||
try:
|
||||
payload = self.decode_token_no_verify(token)
|
||||
return payload.get("type")
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def is_token_expired(self, token: str) -> bool:
|
||||
"""
|
||||
Check if token is expired without full verification
|
||||
"""
|
||||
try:
|
||||
payload = self.decode_token_no_verify(token)
|
||||
exp_timestamp = payload.get("exp")
|
||||
if exp_timestamp:
|
||||
exp_datetime = datetime.fromtimestamp(exp_timestamp, tz=timezone.utc)
|
||||
return datetime.now(timezone.utc) > exp_datetime
|
||||
return True
|
||||
except Exception:
|
||||
return True
|
||||
|
||||
def extract_user_id(self, token: str) -> Optional[str]:
|
||||
"""
|
||||
Extract user ID from token without full verification
|
||||
Useful for quick user identification
|
||||
"""
|
||||
try:
|
||||
payload = self.decode_token_no_verify(token)
|
||||
if payload:
|
||||
return payload.get("user_id")
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to extract user ID from token: {e}")
|
||||
|
||||
return None
|
||||
|
||||
|
||||
|
||||
def get_token_info(self, token: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Get comprehensive token information for debugging
|
||||
"""
|
||||
info = {
|
||||
"valid": False,
|
||||
"expired": True,
|
||||
"user_id": None,
|
||||
"email": None,
|
||||
"type": None,
|
||||
"exp": None,
|
||||
"iat": None
|
||||
}
|
||||
|
||||
try:
|
||||
# Try unsafe decode first
|
||||
payload = self.decode_token_no_verify(token)
|
||||
if payload:
|
||||
info.update({
|
||||
"user_id": payload.get("user_id"),
|
||||
"email": payload.get("email"),
|
||||
"type": payload.get("type"),
|
||||
"exp": payload.get("exp"),
|
||||
"iat": payload.get("iat"),
|
||||
"expired": self.is_token_expired(token)
|
||||
})
|
||||
|
||||
# Try full verification
|
||||
verified_payload = self.verify_token(token)
|
||||
info["valid"] = verified_payload is not None
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to get token info: {e}")
|
||||
|
||||
return info
|
||||
529
shared/auth/tenant_access.py
Executable file
529
shared/auth/tenant_access.py
Executable file
@@ -0,0 +1,529 @@
|
||||
# ================================================================
|
||||
# shared/auth/tenant_access.py - Complete Implementation
|
||||
# ================================================================
|
||||
"""
|
||||
Tenant access control utilities for microservices
|
||||
Provides both gateway-level and service-level tenant access verification
|
||||
"""
|
||||
|
||||
import re
|
||||
from typing import Dict, Any, Optional
|
||||
import httpx
|
||||
import structlog
|
||||
from fastapi import HTTPException, Depends
|
||||
import asyncio
|
||||
|
||||
# Import FastAPI dependencies
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
|
||||
# Import settings (adjust import path based on your project structure)
|
||||
try:
|
||||
from app.core.config import settings
|
||||
except ImportError:
|
||||
try:
|
||||
from core.config import settings
|
||||
except ImportError:
|
||||
# Fallback for different project structures
|
||||
import os
|
||||
class Settings:
|
||||
TENANT_SERVICE_URL = os.getenv("TENANT_SERVICE_URL", "http://tenant-service:8000")
|
||||
settings = Settings()
|
||||
|
||||
# Setup logging
|
||||
logger = structlog.get_logger()
|
||||
|
||||
class TenantAccessManager:
|
||||
"""
|
||||
Centralized tenant access management for both gateway and service level
|
||||
"""
|
||||
|
||||
def __init__(self, redis_client=None):
|
||||
"""
|
||||
Initialize tenant access manager
|
||||
|
||||
Args:
|
||||
redis_client: Optional Redis client for caching
|
||||
"""
|
||||
self.redis_client = redis_client
|
||||
|
||||
async def verify_basic_tenant_access(self, user_id: str, tenant_id: str) -> bool:
|
||||
"""
|
||||
Gateway-level: Basic tenant access verification with caching
|
||||
|
||||
Args:
|
||||
user_id: User ID to verify
|
||||
tenant_id: Tenant ID to check access for
|
||||
|
||||
Returns:
|
||||
bool: True if user has access to tenant
|
||||
"""
|
||||
# Check cache first (5-minute TTL)
|
||||
cache_key = f"tenant_access:{user_id}:{tenant_id}"
|
||||
if self.redis_client:
|
||||
try:
|
||||
cached_result = await self.redis_client.get(cache_key)
|
||||
if cached_result is not None:
|
||||
return cached_result.decode() == "true" if isinstance(cached_result, bytes) else cached_result == "true"
|
||||
except Exception as cache_error:
|
||||
logger.warning(f"Cache lookup failed: {cache_error}")
|
||||
|
||||
# Verify with tenant service
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=2.0) as client: # Short timeout for gateway
|
||||
response = await client.get(
|
||||
f"{settings.TENANT_SERVICE_URL}/api/v1/tenants/{tenant_id}/access/{user_id}"
|
||||
)
|
||||
|
||||
has_access = response.status_code == 200
|
||||
|
||||
# If direct access check fails, check hierarchical access
|
||||
if not has_access:
|
||||
hierarchical_access = await self._check_hierarchical_access(user_id, tenant_id)
|
||||
has_access = hierarchical_access
|
||||
|
||||
# Cache result (5 minutes)
|
||||
if self.redis_client:
|
||||
try:
|
||||
await self.redis_client.setex(cache_key, 300, "true" if has_access else "false")
|
||||
except Exception as cache_error:
|
||||
logger.warning(f"Cache set failed: {cache_error}")
|
||||
|
||||
logger.debug(f"Tenant access check",
|
||||
user_id=user_id,
|
||||
tenant_id=tenant_id,
|
||||
has_access=has_access)
|
||||
|
||||
return has_access
|
||||
|
||||
except asyncio.TimeoutError:
|
||||
logger.error(f"Timeout verifying tenant access: user={user_id}, tenant={tenant_id}")
|
||||
# Fail open for availability (let service handle detailed check)
|
||||
return True
|
||||
except httpx.RequestError as e:
|
||||
logger.error(f"Request error verifying tenant access: {e}")
|
||||
# Fail open for availability
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Gateway tenant access verification failed: {e}")
|
||||
# Fail open for availability (let service handle detailed check)
|
||||
return True
|
||||
|
||||
async def _check_hierarchical_access(self, user_id: str, tenant_id: str) -> bool:
|
||||
"""
|
||||
Check if user has hierarchical access (parent tenant access to child)
|
||||
|
||||
Args:
|
||||
user_id: User ID to verify
|
||||
tenant_id: Target tenant ID to check access for
|
||||
|
||||
Returns:
|
||||
bool: True if user has hierarchical access to the tenant
|
||||
"""
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=3.0) as client:
|
||||
response = await client.get(
|
||||
f"{settings.TENANT_SERVICE_URL}/api/v1/tenants/{tenant_id}/hierarchy"
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
hierarchy_data = response.json()
|
||||
parent_tenant_id = hierarchy_data.get("parent_tenant_id")
|
||||
|
||||
# If this is a child tenant, check if user has access to parent
|
||||
if parent_tenant_id:
|
||||
# Check if user has access to parent tenant
|
||||
parent_access = await self._check_parent_access(user_id, parent_tenant_id)
|
||||
if parent_access:
|
||||
# For aggregated data only, allow parent access to child
|
||||
# Detailed child data requires direct access
|
||||
user_role = await self.get_user_role_in_tenant(user_id, parent_tenant_id)
|
||||
if user_role in ["owner", "admin", "network_admin"]:
|
||||
return True
|
||||
|
||||
return False
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to check hierarchical access: {e}")
|
||||
return False
|
||||
|
||||
async def _check_parent_access(self, user_id: str, parent_tenant_id: str) -> bool:
|
||||
"""
|
||||
Check if user has access to parent tenant (owner, admin, or network_admin role)
|
||||
|
||||
Args:
|
||||
user_id: User ID
|
||||
parent_tenant_id: Parent tenant ID
|
||||
|
||||
Returns:
|
||||
bool: True if user has access to parent tenant
|
||||
"""
|
||||
user_role = await self.get_user_role_in_tenant(user_id, parent_tenant_id)
|
||||
return user_role in ["owner", "admin", "network_admin"]
|
||||
|
||||
async def verify_hierarchical_access(self, user_id: str, tenant_id: str) -> dict:
|
||||
"""
|
||||
Verify hierarchical access and return access type and permissions
|
||||
|
||||
Args:
|
||||
user_id: User ID
|
||||
tenant_id: Target tenant ID
|
||||
|
||||
Returns:
|
||||
dict: Access information including access_type, can_view_children, etc.
|
||||
"""
|
||||
# First check direct access
|
||||
direct_access = await self._check_direct_access(user_id, tenant_id)
|
||||
|
||||
if direct_access:
|
||||
return {
|
||||
"access_type": "direct",
|
||||
"has_access": True,
|
||||
"can_view_children": False,
|
||||
"tenant_id": tenant_id
|
||||
}
|
||||
|
||||
# Check if this is a child tenant and user has parent access
|
||||
hierarchy_info = await self._get_tenant_hierarchy(tenant_id)
|
||||
|
||||
if hierarchy_info and hierarchy_info.get("parent_tenant_id"):
|
||||
parent_tenant_id = hierarchy_info["parent_tenant_id"]
|
||||
parent_access = await self._check_parent_access(user_id, parent_tenant_id)
|
||||
|
||||
if parent_access:
|
||||
user_role = await self.get_user_role_in_tenant(user_id, parent_tenant_id)
|
||||
|
||||
# Network admins have full access across entire hierarchy
|
||||
if user_role == "network_admin":
|
||||
return {
|
||||
"access_type": "hierarchical",
|
||||
"has_access": True,
|
||||
"tenant_id": tenant_id,
|
||||
"parent_tenant_id": parent_tenant_id,
|
||||
"is_network_admin": True,
|
||||
"can_view_children": True
|
||||
}
|
||||
# Regular admins have read-only access to children aggregated data
|
||||
elif user_role in ["owner", "admin"]:
|
||||
return {
|
||||
"access_type": "hierarchical",
|
||||
"has_access": True,
|
||||
"tenant_id": tenant_id,
|
||||
"parent_tenant_id": parent_tenant_id,
|
||||
"is_network_admin": False,
|
||||
"can_view_children": True # Can view aggregated data, not detailed
|
||||
}
|
||||
|
||||
return {
|
||||
"access_type": "none",
|
||||
"has_access": False,
|
||||
"tenant_id": tenant_id,
|
||||
"can_view_children": False
|
||||
}
|
||||
|
||||
async def _check_direct_access(self, user_id: str, tenant_id: str) -> bool:
|
||||
"""
|
||||
Check direct access to tenant (without hierarchy)
|
||||
"""
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=2.0) as client:
|
||||
response = await client.get(
|
||||
f"{settings.TENANT_SERVICE_URL}/api/v1/tenants/{tenant_id}/access/{user_id}"
|
||||
)
|
||||
return response.status_code == 200
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to check direct access: {e}")
|
||||
return False
|
||||
|
||||
async def _get_tenant_hierarchy(self, tenant_id: str) -> dict:
|
||||
"""
|
||||
Get tenant hierarchy information
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
|
||||
Returns:
|
||||
dict: Hierarchy information
|
||||
"""
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=3.0) as client:
|
||||
response = await client.get(
|
||||
f"{settings.TENANT_SERVICE_URL}/api/v1/tenants/{tenant_id}/hierarchy"
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
return {}
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get tenant hierarchy: {e}")
|
||||
return {}
|
||||
|
||||
async def get_accessible_tenants_hierarchy(self, user_id: str) -> list:
|
||||
"""
|
||||
Get all tenants a user has access to, organized in hierarchy
|
||||
|
||||
Args:
|
||||
user_id: User ID
|
||||
|
||||
Returns:
|
||||
list: List of tenants with hierarchy structure
|
||||
"""
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=5.0) as client:
|
||||
response = await client.get(
|
||||
f"{settings.TENANT_SERVICE_URL}/api/v1/tenants/users/{user_id}/hierarchy"
|
||||
)
|
||||
if response.status_code == 200:
|
||||
tenants = response.json()
|
||||
logger.debug(f"Retrieved user tenants with hierarchy",
|
||||
user_id=user_id,
|
||||
tenant_count=len(tenants))
|
||||
return tenants
|
||||
else:
|
||||
logger.warning(f"Failed to get user tenants hierarchy: {response.status_code}")
|
||||
return []
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get user tenants hierarchy: {e}")
|
||||
return []
|
||||
|
||||
async def get_user_role_in_tenant(self, user_id: str, tenant_id: str) -> Optional[str]:
|
||||
"""
|
||||
Get user's role within a specific tenant
|
||||
|
||||
Args:
|
||||
user_id: User ID
|
||||
tenant_id: Tenant ID
|
||||
|
||||
Returns:
|
||||
Optional[str]: User's role in tenant (owner, admin, manager, user, network_admin) or None
|
||||
"""
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=3.0) as client:
|
||||
response = await client.get(
|
||||
f"{settings.TENANT_SERVICE_URL}/api/v1/tenants/{tenant_id}/members/{user_id}"
|
||||
)
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
role = data.get("role")
|
||||
logger.debug(f"User role in tenant",
|
||||
user_id=user_id,
|
||||
tenant_id=tenant_id,
|
||||
role=role)
|
||||
return role
|
||||
elif response.status_code == 404:
|
||||
logger.debug(f"User not found in tenant",
|
||||
user_id=user_id,
|
||||
tenant_id=tenant_id)
|
||||
return None
|
||||
else:
|
||||
logger.warning(f"Unexpected response getting user role: {response.status_code}")
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get user role in tenant: {e}")
|
||||
return None
|
||||
|
||||
async def verify_resource_permission(
|
||||
self,
|
||||
user_id: str,
|
||||
tenant_id: str,
|
||||
resource: str,
|
||||
action: str
|
||||
) -> bool:
|
||||
"""
|
||||
Fine-grained resource permission check (used by services)
|
||||
|
||||
Args:
|
||||
user_id: User ID
|
||||
tenant_id: Tenant ID
|
||||
resource: Resource type (sales, training, forecasts, etc.)
|
||||
action: Action being performed (read, write, delete, etc.)
|
||||
|
||||
Returns:
|
||||
bool: True if user has permission
|
||||
"""
|
||||
user_role = await self.get_user_role_in_tenant(user_id, tenant_id)
|
||||
|
||||
if not user_role:
|
||||
return False
|
||||
|
||||
# Role-based permission matrix
|
||||
permissions = {
|
||||
"owner": ["*"], # Owners can do everything
|
||||
"admin": ["read", "write", "delete", "manage"],
|
||||
"manager": ["read", "write"],
|
||||
"user": ["read"]
|
||||
}
|
||||
|
||||
allowed_actions = permissions.get(user_role, [])
|
||||
has_permission = "*" in allowed_actions or action in allowed_actions
|
||||
|
||||
logger.debug(f"Resource permission check",
|
||||
user_id=user_id,
|
||||
tenant_id=tenant_id,
|
||||
resource=resource,
|
||||
action=action,
|
||||
user_role=user_role,
|
||||
has_permission=has_permission)
|
||||
|
||||
return has_permission
|
||||
|
||||
async def get_user_tenants(self, user_id: str) -> list:
|
||||
"""
|
||||
Get all tenants a user has access to
|
||||
|
||||
Args:
|
||||
user_id: User ID
|
||||
|
||||
Returns:
|
||||
list: List of tenant dictionaries
|
||||
"""
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=5.0) as client:
|
||||
response = await client.get(
|
||||
f"{settings.TENANT_SERVICE_URL}/api/v1/tenants/users/{user_id}"
|
||||
)
|
||||
if response.status_code == 200:
|
||||
tenants = response.json()
|
||||
logger.debug(f"Retrieved user tenants",
|
||||
user_id=user_id,
|
||||
tenant_count=len(tenants))
|
||||
return tenants
|
||||
else:
|
||||
logger.warning(f"Failed to get user tenants: {response.status_code}")
|
||||
return []
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get user tenants: {e}")
|
||||
return []
|
||||
|
||||
# Global instance for easy import
|
||||
tenant_access_manager = TenantAccessManager()
|
||||
|
||||
|
||||
|
||||
# ================================================================
|
||||
# FASTAPI DEPENDENCIES
|
||||
# ================================================================
|
||||
|
||||
async def verify_tenant_access_dep(
|
||||
tenant_id: str,
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep)
|
||||
) -> str:
|
||||
"""
|
||||
FastAPI dependency to verify tenant access and return tenant_id
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID from path parameter
|
||||
current_user: Current user from auth dependency
|
||||
|
||||
Returns:
|
||||
str: Validated tenant_id
|
||||
|
||||
Raises:
|
||||
HTTPException: If user doesn't have access to tenant
|
||||
"""
|
||||
has_access = await tenant_access_manager.verify_basic_tenant_access(current_user["user_id"], tenant_id)
|
||||
if not has_access:
|
||||
logger.warning(f"Access denied to tenant",
|
||||
user_id=current_user["user_id"],
|
||||
tenant_id=tenant_id)
|
||||
raise HTTPException(
|
||||
status_code=403,
|
||||
detail=f"User {current_user['user_id']} does not have access to tenant {tenant_id}"
|
||||
)
|
||||
|
||||
logger.debug(f"Tenant access verified",
|
||||
user_id=current_user["user_id"],
|
||||
tenant_id=tenant_id)
|
||||
|
||||
return tenant_id
|
||||
|
||||
async def verify_tenant_permission_dep(
|
||||
tenant_id: str,
|
||||
resource: str,
|
||||
action: str,
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep)
|
||||
) -> str:
|
||||
"""
|
||||
FastAPI dependency to verify tenant access AND resource permission
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID from path parameter
|
||||
resource: Resource type being accessed
|
||||
action: Action being performed
|
||||
current_user: Current user from auth dependency
|
||||
|
||||
Returns:
|
||||
str: Validated tenant_id
|
||||
|
||||
Raises:
|
||||
HTTPException: If user doesn't have access or permission
|
||||
"""
|
||||
# First verify basic tenant access
|
||||
has_access = await tenant_access_manager.verify_basic_tenant_access(current_user["user_id"], tenant_id)
|
||||
if not has_access:
|
||||
raise HTTPException(
|
||||
status_code=403,
|
||||
detail=f"Access denied to tenant {tenant_id}"
|
||||
)
|
||||
|
||||
# Then verify specific resource permission
|
||||
has_permission = await tenant_access_manager.verify_resource_permission(
|
||||
current_user["user_id"], tenant_id, resource, action
|
||||
)
|
||||
if not has_permission:
|
||||
raise HTTPException(
|
||||
status_code=403,
|
||||
detail=f"Insufficient permissions for {action} on {resource}"
|
||||
)
|
||||
|
||||
logger.debug(f"Tenant access and permission verified",
|
||||
user_id=current_user["user_id"],
|
||||
tenant_id=tenant_id,
|
||||
resource=resource,
|
||||
action=action)
|
||||
|
||||
return tenant_id
|
||||
|
||||
# ================================================================
|
||||
# UTILITY FUNCTIONS
|
||||
# ================================================================
|
||||
|
||||
def extract_tenant_id_from_path(path: str) -> Optional[str]:
|
||||
"""
|
||||
More robust tenant ID extraction using regex pattern matching
|
||||
Only matches actual tenant-scoped paths with UUID format
|
||||
"""
|
||||
# Pattern for tenant-scoped paths: /api/v1/tenants/{uuid}/...
|
||||
tenant_pattern = r'/api/v1/tenants/([0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})/.*'
|
||||
|
||||
match = re.match(tenant_pattern, path, re.IGNORECASE)
|
||||
if match:
|
||||
return match.group(1)
|
||||
|
||||
def is_tenant_scoped_path(path: str) -> bool:
|
||||
"""
|
||||
Check if path is tenant-scoped (contains /tenants/{tenant_id}/)
|
||||
|
||||
Args:
|
||||
path: URL path
|
||||
|
||||
Returns:
|
||||
bool: True if path is tenant-scoped
|
||||
"""
|
||||
return extract_tenant_id_from_path(path) is not None
|
||||
|
||||
# ================================================================
|
||||
# EXPORTS
|
||||
# ================================================================
|
||||
|
||||
__all__ = [
|
||||
# Classes
|
||||
"TenantAccessManager",
|
||||
"tenant_access_manager",
|
||||
|
||||
# Dependencies
|
||||
"verify_tenant_access_dep",
|
||||
"verify_tenant_permission_dep",
|
||||
|
||||
# Utilities
|
||||
"extract_tenant_id_from_path",
|
||||
"is_tenant_scoped_path"
|
||||
]
|
||||
297
shared/clients/__init__.py
Executable file
297
shared/clients/__init__.py
Executable file
@@ -0,0 +1,297 @@
|
||||
# shared/clients/__init__.py
|
||||
"""
|
||||
Service Client Factory and Convenient Imports
|
||||
Provides easy access to all service clients
|
||||
"""
|
||||
|
||||
from .base_service_client import BaseServiceClient, ServiceAuthenticator
|
||||
from .auth_client import AuthServiceClient
|
||||
from .training_client import TrainingServiceClient
|
||||
from .sales_client import SalesServiceClient
|
||||
from .external_client import ExternalServiceClient
|
||||
from .forecast_client import ForecastServiceClient
|
||||
from .inventory_client import InventoryServiceClient
|
||||
from .orders_client import OrdersServiceClient
|
||||
from .production_client import ProductionServiceClient
|
||||
from .recipes_client import RecipesServiceClient
|
||||
from .suppliers_client import SuppliersServiceClient
|
||||
from .tenant_client import TenantServiceClient
|
||||
from .ai_insights_client import AIInsightsClient
|
||||
from .alerts_client import AlertsServiceClient
|
||||
from .alert_processor_client import AlertProcessorClient, get_alert_processor_client
|
||||
from .procurement_client import ProcurementServiceClient
|
||||
from .distribution_client import DistributionServiceClient
|
||||
|
||||
# Import config
|
||||
from shared.config.base import BaseServiceSettings
|
||||
|
||||
# Cache clients to avoid recreating them
|
||||
_client_cache = {}
|
||||
|
||||
def get_training_client(config: BaseServiceSettings = None, service_name: str = "unknown") -> TrainingServiceClient:
|
||||
"""Get or create a training service client"""
|
||||
if config is None:
|
||||
from app.core.config import settings as config
|
||||
|
||||
cache_key = f"training_{service_name}"
|
||||
if cache_key not in _client_cache:
|
||||
_client_cache[cache_key] = TrainingServiceClient(config, service_name)
|
||||
return _client_cache[cache_key]
|
||||
|
||||
def get_sales_client(config: BaseServiceSettings = None, service_name: str = "unknown") -> SalesServiceClient:
|
||||
"""Get or create a sales service client"""
|
||||
if config is None:
|
||||
from app.core.config import settings as config
|
||||
|
||||
cache_key = f"sales_{service_name}"
|
||||
if cache_key not in _client_cache:
|
||||
_client_cache[cache_key] = SalesServiceClient(config, service_name)
|
||||
return _client_cache[cache_key]
|
||||
|
||||
def get_external_client(config: BaseServiceSettings = None, service_name: str = "unknown") -> ExternalServiceClient:
|
||||
"""Get or create an external service client"""
|
||||
if config is None:
|
||||
from app.core.config import settings as config
|
||||
|
||||
cache_key = f"external_{service_name}"
|
||||
if cache_key not in _client_cache:
|
||||
_client_cache[cache_key] = ExternalServiceClient(config, service_name)
|
||||
return _client_cache[cache_key]
|
||||
|
||||
def get_forecast_client(config: BaseServiceSettings = None, service_name: str = "unknown") -> ForecastServiceClient:
|
||||
"""Get or create a forecast service client"""
|
||||
if config is None:
|
||||
from app.core.config import settings as config
|
||||
|
||||
cache_key = f"forecast_{service_name}"
|
||||
if cache_key not in _client_cache:
|
||||
_client_cache[cache_key] = ForecastServiceClient(config, service_name)
|
||||
return _client_cache[cache_key]
|
||||
|
||||
def get_inventory_client(config: BaseServiceSettings = None, service_name: str = "unknown") -> InventoryServiceClient:
|
||||
"""Get or create an inventory service client"""
|
||||
if config is None:
|
||||
from app.core.config import settings as config
|
||||
|
||||
cache_key = f"inventory_{service_name}"
|
||||
if cache_key not in _client_cache:
|
||||
_client_cache[cache_key] = InventoryServiceClient(config, service_name)
|
||||
return _client_cache[cache_key]
|
||||
|
||||
def get_orders_client(config: BaseServiceSettings = None, service_name: str = "unknown") -> OrdersServiceClient:
|
||||
"""Get or create an orders service client"""
|
||||
if config is None:
|
||||
from app.core.config import settings as config
|
||||
|
||||
cache_key = f"orders_{service_name}"
|
||||
if cache_key not in _client_cache:
|
||||
_client_cache[cache_key] = OrdersServiceClient(config)
|
||||
return _client_cache[cache_key]
|
||||
|
||||
def get_production_client(config: BaseServiceSettings = None, service_name: str = "unknown") -> ProductionServiceClient:
|
||||
"""Get or create a production service client"""
|
||||
if config is None:
|
||||
from app.core.config import settings as config
|
||||
|
||||
cache_key = f"production_{service_name}"
|
||||
if cache_key not in _client_cache:
|
||||
_client_cache[cache_key] = ProductionServiceClient(config, service_name)
|
||||
return _client_cache[cache_key]
|
||||
|
||||
def get_recipes_client(config: BaseServiceSettings = None, service_name: str = "unknown") -> RecipesServiceClient:
|
||||
"""Get or create a recipes service client"""
|
||||
if config is None:
|
||||
from app.core.config import settings as config
|
||||
|
||||
cache_key = f"recipes_{service_name}"
|
||||
if cache_key not in _client_cache:
|
||||
_client_cache[cache_key] = RecipesServiceClient(config, service_name)
|
||||
return _client_cache[cache_key]
|
||||
|
||||
def get_suppliers_client(config: BaseServiceSettings = None, service_name: str = "unknown") -> SuppliersServiceClient:
|
||||
"""Get or create a suppliers service client"""
|
||||
if config is None:
|
||||
from app.core.config import settings as config
|
||||
|
||||
cache_key = f"suppliers_{service_name}"
|
||||
if cache_key not in _client_cache:
|
||||
_client_cache[cache_key] = SuppliersServiceClient(config, service_name)
|
||||
return _client_cache[cache_key]
|
||||
|
||||
def get_alerts_client(config: BaseServiceSettings = None, service_name: str = "unknown") -> AlertsServiceClient:
|
||||
"""Get or create an alerts service client"""
|
||||
if config is None:
|
||||
from app.core.config import settings as config
|
||||
|
||||
cache_key = f"alerts_{service_name}"
|
||||
if cache_key not in _client_cache:
|
||||
_client_cache[cache_key] = AlertsServiceClient(config, service_name)
|
||||
return _client_cache[cache_key]
|
||||
|
||||
def get_tenant_client(config: BaseServiceSettings = None, service_name: str = "unknown") -> TenantServiceClient:
|
||||
"""Get or create a tenant service client"""
|
||||
if config is None:
|
||||
from app.core.config import settings as config
|
||||
|
||||
cache_key = f"tenant_{service_name}"
|
||||
if cache_key not in _client_cache:
|
||||
_client_cache[cache_key] = TenantServiceClient(config)
|
||||
return _client_cache[cache_key]
|
||||
|
||||
def get_procurement_client(config: BaseServiceSettings = None, service_name: str = "unknown") -> ProcurementServiceClient:
|
||||
"""Get or create a procurement service client"""
|
||||
if config is None:
|
||||
from app.core.config import settings as config
|
||||
|
||||
cache_key = f"procurement_{service_name}"
|
||||
if cache_key not in _client_cache:
|
||||
_client_cache[cache_key] = ProcurementServiceClient(config, service_name)
|
||||
return _client_cache[cache_key]
|
||||
|
||||
def get_distribution_client(config: BaseServiceSettings = None, service_name: str = "unknown") -> DistributionServiceClient:
|
||||
"""Get or create a distribution service client"""
|
||||
if config is None:
|
||||
from app.core.config import settings as config
|
||||
|
||||
cache_key = f"distribution_{service_name}"
|
||||
if cache_key not in _client_cache:
|
||||
_client_cache[cache_key] = DistributionServiceClient(config, service_name)
|
||||
return _client_cache[cache_key]
|
||||
|
||||
|
||||
# Note: get_alert_processor_client is already defined in alert_processor_client.py
|
||||
# and imported above, so we don't need to redefine it here
|
||||
|
||||
|
||||
class ServiceClients:
|
||||
"""Convenient wrapper for all service clients"""
|
||||
|
||||
def __init__(self, config: BaseServiceSettings = None, service_name: str = "unknown"):
|
||||
self.service_name = service_name
|
||||
self.config = config or self._get_default_config()
|
||||
|
||||
# Initialize clients lazily
|
||||
self._training_client = None
|
||||
self._sales_client = None
|
||||
self._external_client = None
|
||||
self._forecast_client = None
|
||||
self._inventory_client = None
|
||||
self._orders_client = None
|
||||
self._production_client = None
|
||||
self._recipes_client = None
|
||||
self._suppliers_client = None
|
||||
|
||||
def _get_default_config(self):
|
||||
"""Get default config from app settings"""
|
||||
try:
|
||||
from app.core.config import settings
|
||||
return settings
|
||||
except ImportError:
|
||||
raise ImportError("Could not import app config. Please provide config explicitly.")
|
||||
|
||||
@property
|
||||
def training(self) -> TrainingServiceClient:
|
||||
"""Get training service client"""
|
||||
if self._training_client is None:
|
||||
self._training_client = get_training_client(self.config, self.service_name)
|
||||
return self._training_client
|
||||
|
||||
@property
|
||||
def sales(self) -> SalesServiceClient:
|
||||
"""Get sales service client"""
|
||||
if self._sales_client is None:
|
||||
self._sales_client = get_sales_client(self.config, self.service_name)
|
||||
return self._sales_client
|
||||
|
||||
@property
|
||||
def external(self) -> ExternalServiceClient:
|
||||
"""Get external service client"""
|
||||
if self._external_client is None:
|
||||
self._external_client = get_external_client(self.config, self.service_name)
|
||||
return self._external_client
|
||||
|
||||
@property
|
||||
def forecast(self) -> ForecastServiceClient:
|
||||
"""Get forecast service client"""
|
||||
if self._forecast_client is None:
|
||||
self._forecast_client = get_forecast_client(self.config, self.service_name)
|
||||
return self._forecast_client
|
||||
|
||||
@property
|
||||
def inventory(self) -> InventoryServiceClient:
|
||||
"""Get inventory service client"""
|
||||
if self._inventory_client is None:
|
||||
self._inventory_client = get_inventory_client(self.config, self.service_name)
|
||||
return self._inventory_client
|
||||
|
||||
@property
|
||||
def orders(self) -> OrdersServiceClient:
|
||||
"""Get orders service client"""
|
||||
if self._orders_client is None:
|
||||
self._orders_client = get_orders_client(self.config, self.service_name)
|
||||
return self._orders_client
|
||||
|
||||
@property
|
||||
def production(self) -> ProductionServiceClient:
|
||||
"""Get production service client"""
|
||||
if self._production_client is None:
|
||||
self._production_client = get_production_client(self.config, self.service_name)
|
||||
return self._production_client
|
||||
|
||||
@property
|
||||
def recipes(self) -> RecipesServiceClient:
|
||||
"""Get recipes service client"""
|
||||
if self._recipes_client is None:
|
||||
self._recipes_client = get_recipes_client(self.config, self.service_name)
|
||||
return self._recipes_client
|
||||
|
||||
@property
|
||||
def suppliers(self) -> SuppliersServiceClient:
|
||||
"""Get suppliers service client"""
|
||||
if self._suppliers_client is None:
|
||||
self._suppliers_client = get_suppliers_client(self.config, self.service_name)
|
||||
return self._suppliers_client
|
||||
|
||||
# Convenience function to get all clients
|
||||
def get_service_clients(config: BaseServiceSettings = None, service_name: str = "unknown") -> ServiceClients:
|
||||
"""Get a wrapper with all service clients"""
|
||||
return ServiceClients(config, service_name)
|
||||
|
||||
# Export all classes for direct import
|
||||
__all__ = [
|
||||
'BaseServiceClient',
|
||||
'ServiceAuthenticator',
|
||||
'AuthServiceClient',
|
||||
'TrainingServiceClient',
|
||||
'SalesServiceClient',
|
||||
'ExternalServiceClient',
|
||||
'ForecastServiceClient',
|
||||
'InventoryServiceClient',
|
||||
'OrdersServiceClient',
|
||||
'ProductionServiceClient',
|
||||
'RecipesServiceClient',
|
||||
'SuppliersServiceClient',
|
||||
'AlertsServiceClient',
|
||||
'AlertProcessorClient',
|
||||
'TenantServiceClient',
|
||||
'DistributionServiceClient',
|
||||
'ServiceClients',
|
||||
'get_training_client',
|
||||
'get_sales_client',
|
||||
'get_external_client',
|
||||
'get_forecast_client',
|
||||
'get_inventory_client',
|
||||
'get_orders_client',
|
||||
'get_production_client',
|
||||
'get_recipes_client',
|
||||
'get_suppliers_client',
|
||||
'get_alerts_client',
|
||||
'get_alert_processor_client',
|
||||
'get_tenant_client',
|
||||
'get_procurement_client',
|
||||
'get_distribution_client',
|
||||
'get_service_clients',
|
||||
'create_forecast_client'
|
||||
]
|
||||
# Backward compatibility aliases
|
||||
create_forecast_client = get_forecast_client
|
||||
391
shared/clients/ai_insights_client.py
Executable file
391
shared/clients/ai_insights_client.py
Executable file
@@ -0,0 +1,391 @@
|
||||
"""
|
||||
AI Insights Service HTTP Client
|
||||
Shared client for all services to post and retrieve AI insights
|
||||
"""
|
||||
|
||||
import httpx
|
||||
from typing import Dict, List, Any, Optional
|
||||
from uuid import UUID
|
||||
import structlog
|
||||
from datetime import datetime
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class AIInsightsClient:
|
||||
"""
|
||||
HTTP client for AI Insights Service.
|
||||
Allows services to post insights, retrieve orchestration-ready insights, and record feedback.
|
||||
"""
|
||||
|
||||
def __init__(self, base_url: str, timeout: int = 30):
|
||||
"""
|
||||
Initialize AI Insights client.
|
||||
|
||||
Args:
|
||||
base_url: Base URL of AI Insights Service (e.g., http://ai-insights-service:8000)
|
||||
timeout: Request timeout in seconds
|
||||
"""
|
||||
self.base_url = base_url.rstrip('/')
|
||||
self.timeout = timeout
|
||||
self.client = httpx.AsyncClient(timeout=self.timeout)
|
||||
|
||||
async def close(self):
|
||||
"""Close the HTTP client."""
|
||||
await self.client.aclose()
|
||||
|
||||
async def create_insight(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
insight_data: Dict[str, Any]
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Create a new insight in AI Insights Service.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
insight_data: Insight data dictionary with fields:
|
||||
- type: str (optimization, alert, prediction, recommendation, insight, anomaly)
|
||||
- priority: str (low, medium, high, critical)
|
||||
- category: str (forecasting, procurement, production, inventory, etc.)
|
||||
- title: str
|
||||
- description: str
|
||||
- impact_type: str
|
||||
- impact_value: float
|
||||
- impact_unit: str
|
||||
- confidence: int (0-100)
|
||||
- metrics_json: dict
|
||||
- actionable: bool
|
||||
- recommendation_actions: list (optional)
|
||||
- source_service: str
|
||||
- source_model: str (optional)
|
||||
|
||||
Returns:
|
||||
Created insight dict or None if failed
|
||||
"""
|
||||
url = f"{self.base_url}/api/v1/tenants/{tenant_id}/insights"
|
||||
|
||||
try:
|
||||
# Ensure tenant_id is in the data
|
||||
insight_data['tenant_id'] = str(tenant_id)
|
||||
|
||||
response = await self.client.post(url, json=insight_data)
|
||||
|
||||
if response.status_code == 201:
|
||||
logger.info(
|
||||
"Insight created successfully",
|
||||
tenant_id=str(tenant_id),
|
||||
insight_title=insight_data.get('title')
|
||||
)
|
||||
return response.json()
|
||||
else:
|
||||
logger.error(
|
||||
"Failed to create insight",
|
||||
status_code=response.status_code,
|
||||
response=response.text,
|
||||
insight_title=insight_data.get('title')
|
||||
)
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error creating insight",
|
||||
error=str(e),
|
||||
tenant_id=str(tenant_id)
|
||||
)
|
||||
return None
|
||||
|
||||
async def create_insights_bulk(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
insights: List[Dict[str, Any]]
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Create multiple insights in bulk.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
insights: List of insight data dictionaries
|
||||
|
||||
Returns:
|
||||
Dictionary with success/failure counts
|
||||
"""
|
||||
results = {
|
||||
'total': len(insights),
|
||||
'successful': 0,
|
||||
'failed': 0,
|
||||
'created_insights': []
|
||||
}
|
||||
|
||||
for insight_data in insights:
|
||||
result = await self.create_insight(tenant_id, insight_data)
|
||||
if result:
|
||||
results['successful'] += 1
|
||||
results['created_insights'].append(result)
|
||||
else:
|
||||
results['failed'] += 1
|
||||
|
||||
logger.info(
|
||||
"Bulk insight creation complete",
|
||||
total=results['total'],
|
||||
successful=results['successful'],
|
||||
failed=results['failed']
|
||||
)
|
||||
|
||||
return results
|
||||
|
||||
async def get_insights(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
filters: Optional[Dict[str, Any]] = None
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get insights for a tenant.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
filters: Optional filters:
|
||||
- category: str
|
||||
- priority: str
|
||||
- actionable_only: bool
|
||||
- min_confidence: int
|
||||
- page: int
|
||||
- page_size: int
|
||||
|
||||
Returns:
|
||||
Paginated insights response or None if failed
|
||||
"""
|
||||
url = f"{self.base_url}/api/v1/tenants/{tenant_id}/insights"
|
||||
|
||||
try:
|
||||
response = await self.client.get(url, params=filters or {})
|
||||
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
else:
|
||||
logger.error(
|
||||
"Failed to get insights",
|
||||
status_code=response.status_code
|
||||
)
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting insights", error=str(e))
|
||||
return None
|
||||
|
||||
async def get_orchestration_ready_insights(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
target_date: datetime,
|
||||
min_confidence: int = 70
|
||||
) -> Optional[Dict[str, List[Dict[str, Any]]]]:
|
||||
"""
|
||||
Get insights ready for orchestration workflow.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
target_date: Target date for orchestration
|
||||
min_confidence: Minimum confidence threshold
|
||||
|
||||
Returns:
|
||||
Categorized insights or None if failed:
|
||||
{
|
||||
"forecast_adjustments": [...],
|
||||
"procurement_recommendations": [...],
|
||||
"production_adjustments": [...],
|
||||
"inventory_optimization": [...],
|
||||
"risk_alerts": [...]
|
||||
}
|
||||
"""
|
||||
url = f"{self.base_url}/api/v1/tenants/{tenant_id}/insights/orchestration-ready"
|
||||
|
||||
params = {
|
||||
'target_date': target_date.isoformat(),
|
||||
'min_confidence': min_confidence
|
||||
}
|
||||
|
||||
try:
|
||||
response = await self.client.get(url, params=params)
|
||||
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
else:
|
||||
logger.error(
|
||||
"Failed to get orchestration insights",
|
||||
status_code=response.status_code
|
||||
)
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting orchestration insights", error=str(e))
|
||||
return None
|
||||
|
||||
async def record_feedback(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
insight_id: UUID,
|
||||
feedback_data: Dict[str, Any]
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Record feedback for an applied insight.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
insight_id: Insight UUID
|
||||
feedback_data: Feedback data with fields:
|
||||
- success: bool
|
||||
- applied_at: datetime (optional)
|
||||
- actual_impact_value: float (optional)
|
||||
- actual_impact_unit: str (optional)
|
||||
- notes: str (optional)
|
||||
|
||||
Returns:
|
||||
Feedback response or None if failed
|
||||
"""
|
||||
url = f"{self.base_url}/api/v1/tenants/{tenant_id}/insights/{insight_id}/feedback"
|
||||
|
||||
try:
|
||||
feedback_data['insight_id'] = str(insight_id)
|
||||
|
||||
response = await self.client.post(url, json=feedback_data)
|
||||
|
||||
if response.status_code in [200, 201]:
|
||||
logger.info(
|
||||
"Feedback recorded",
|
||||
insight_id=str(insight_id),
|
||||
success=feedback_data.get('success')
|
||||
)
|
||||
return response.json()
|
||||
else:
|
||||
logger.error(
|
||||
"Failed to record feedback",
|
||||
status_code=response.status_code
|
||||
)
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error recording feedback", error=str(e))
|
||||
return None
|
||||
|
||||
async def get_insights_summary(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
time_period_days: int = 30
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get aggregate metrics summary for insights.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
time_period_days: Time period for metrics (default 30 days)
|
||||
|
||||
Returns:
|
||||
Summary metrics or None if failed
|
||||
"""
|
||||
url = f"{self.base_url}/api/v1/tenants/{tenant_id}/insights/metrics/summary"
|
||||
|
||||
params = {'time_period_days': time_period_days}
|
||||
|
||||
try:
|
||||
response = await self.client.get(url, params=params)
|
||||
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
else:
|
||||
logger.error(
|
||||
"Failed to get insights summary",
|
||||
status_code=response.status_code
|
||||
)
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting insights summary", error=str(e))
|
||||
return None
|
||||
|
||||
async def post_accuracy_metrics(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
validation_date: datetime,
|
||||
metrics: Dict[str, Any]
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Post forecast accuracy metrics to AI Insights Service.
|
||||
Creates an insight with accuracy validation results.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
validation_date: Date the forecasts were validated for
|
||||
metrics: Dictionary with accuracy metrics:
|
||||
- overall_mape: Mean Absolute Percentage Error
|
||||
- overall_rmse: Root Mean Squared Error
|
||||
- overall_mae: Mean Absolute Error
|
||||
- products_validated: Number of products validated
|
||||
- poor_accuracy_products: List of products with MAPE > 30%
|
||||
|
||||
Returns:
|
||||
Created insight or None if failed
|
||||
"""
|
||||
mape = metrics.get('overall_mape', 0)
|
||||
products_validated = metrics.get('products_validated', 0)
|
||||
poor_count = len(metrics.get('poor_accuracy_products', []))
|
||||
|
||||
# Determine priority based on MAPE
|
||||
if mape > 40:
|
||||
priority = 'critical'
|
||||
elif mape > 30:
|
||||
priority = 'high'
|
||||
elif mape > 20:
|
||||
priority = 'medium'
|
||||
else:
|
||||
priority = 'low'
|
||||
|
||||
# Create insight
|
||||
insight_data = {
|
||||
'type': 'insight',
|
||||
'priority': priority,
|
||||
'category': 'forecasting',
|
||||
'title': f'Forecast Accuracy Validation - {validation_date.strftime("%Y-%m-%d")}',
|
||||
'description': (
|
||||
f'Validated {products_validated} product forecasts against actual sales. '
|
||||
f'Overall MAPE: {mape:.2f}%. '
|
||||
f'{poor_count} products require retraining (MAPE > 30%).'
|
||||
),
|
||||
'impact_type': 'accuracy',
|
||||
'impact_value': mape,
|
||||
'impact_unit': 'mape_percentage',
|
||||
'confidence': 100, # Validation is based on actual data
|
||||
'metrics_json': {
|
||||
'validation_date': validation_date.isoformat() if hasattr(validation_date, 'isoformat') else str(validation_date),
|
||||
'overall_mape': mape,
|
||||
'overall_rmse': metrics.get('overall_rmse', 0),
|
||||
'overall_mae': metrics.get('overall_mae', 0),
|
||||
'products_validated': products_validated,
|
||||
'poor_accuracy_count': poor_count,
|
||||
'poor_accuracy_products': metrics.get('poor_accuracy_products', [])
|
||||
},
|
||||
'actionable': poor_count > 0,
|
||||
'recommendation_actions': [
|
||||
f'Retrain models for {poor_count} products with poor accuracy'
|
||||
] if poor_count > 0 else [],
|
||||
'source_service': 'forecasting',
|
||||
'source_model': 'forecast_validation'
|
||||
}
|
||||
|
||||
return await self.create_insight(tenant_id, insight_data)
|
||||
|
||||
async def health_check(self) -> bool:
|
||||
"""
|
||||
Check if AI Insights Service is healthy.
|
||||
|
||||
Returns:
|
||||
True if healthy, False otherwise
|
||||
"""
|
||||
url = f"{self.base_url}/health"
|
||||
|
||||
try:
|
||||
response = await self.client.get(url)
|
||||
return response.status_code == 200
|
||||
|
||||
except Exception as e:
|
||||
logger.error("AI Insights Service health check failed", error=str(e))
|
||||
return False
|
||||
220
shared/clients/alert_processor_client.py
Executable file
220
shared/clients/alert_processor_client.py
Executable file
@@ -0,0 +1,220 @@
|
||||
# shared/clients/alert_processor_client.py
|
||||
"""
|
||||
Alert Processor Service Client - Inter-service communication
|
||||
Handles communication with the alert processor service for alert lifecycle management
|
||||
"""
|
||||
|
||||
import structlog
|
||||
from typing import Dict, Any, List, Optional
|
||||
from uuid import UUID
|
||||
|
||||
from shared.clients.base_service_client import BaseServiceClient
|
||||
from shared.config.base import BaseServiceSettings
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class AlertProcessorClient(BaseServiceClient):
|
||||
"""Client for communicating with the alert processor service via gateway"""
|
||||
|
||||
def __init__(self, config: BaseServiceSettings, calling_service_name: str = "unknown"):
|
||||
super().__init__(calling_service_name, config)
|
||||
|
||||
def get_service_base_path(self) -> str:
|
||||
"""Return the base path for alert processor service APIs"""
|
||||
return "/api/v1"
|
||||
|
||||
# ================================================================
|
||||
# ALERT LIFECYCLE MANAGEMENT
|
||||
# ================================================================
|
||||
|
||||
async def acknowledge_alerts_by_metadata(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
alert_type: str,
|
||||
metadata_filter: Dict[str, Any],
|
||||
acknowledged_by: Optional[str] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Acknowledge all active alerts matching alert type and metadata.
|
||||
|
||||
Used when user actions trigger alert acknowledgment (e.g., approving a PO).
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
alert_type: Alert type to filter (e.g., 'po_approval_needed')
|
||||
metadata_filter: Metadata fields to match (e.g., {'po_id': 'uuid'})
|
||||
acknowledged_by: Optional user ID who acknowledged
|
||||
|
||||
Returns:
|
||||
{
|
||||
"success": true,
|
||||
"acknowledged_count": 2,
|
||||
"alert_ids": ["uuid1", "uuid2"]
|
||||
}
|
||||
"""
|
||||
try:
|
||||
payload = {
|
||||
"alert_type": alert_type,
|
||||
"metadata_filter": metadata_filter
|
||||
}
|
||||
|
||||
if acknowledged_by:
|
||||
payload["acknowledged_by"] = acknowledged_by
|
||||
|
||||
result = await self.post(
|
||||
f"tenants/{tenant_id}/alerts/acknowledge-by-metadata",
|
||||
tenant_id=str(tenant_id),
|
||||
data=payload
|
||||
)
|
||||
|
||||
if result and result.get("success"):
|
||||
logger.info(
|
||||
"Acknowledged alerts by metadata",
|
||||
tenant_id=str(tenant_id),
|
||||
alert_type=alert_type,
|
||||
count=result.get("acknowledged_count", 0),
|
||||
calling_service=self.calling_service_name
|
||||
)
|
||||
|
||||
return result or {"success": False, "acknowledged_count": 0, "alert_ids": []}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error acknowledging alerts by metadata",
|
||||
error=str(e),
|
||||
tenant_id=str(tenant_id),
|
||||
alert_type=alert_type,
|
||||
metadata_filter=metadata_filter,
|
||||
calling_service=self.calling_service_name
|
||||
)
|
||||
return {"success": False, "acknowledged_count": 0, "alert_ids": [], "error": str(e)}
|
||||
|
||||
async def resolve_alerts_by_metadata(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
alert_type: str,
|
||||
metadata_filter: Dict[str, Any],
|
||||
resolved_by: Optional[str] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Resolve all active alerts matching alert type and metadata.
|
||||
|
||||
Used when user actions complete an alert's underlying issue (e.g., marking delivery received).
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
alert_type: Alert type to filter (e.g., 'delivery_overdue')
|
||||
metadata_filter: Metadata fields to match (e.g., {'po_id': 'uuid'})
|
||||
resolved_by: Optional user ID who resolved
|
||||
|
||||
Returns:
|
||||
{
|
||||
"success": true,
|
||||
"resolved_count": 1,
|
||||
"alert_ids": ["uuid1"]
|
||||
}
|
||||
"""
|
||||
try:
|
||||
payload = {
|
||||
"alert_type": alert_type,
|
||||
"metadata_filter": metadata_filter
|
||||
}
|
||||
|
||||
if resolved_by:
|
||||
payload["resolved_by"] = resolved_by
|
||||
|
||||
result = await self.post(
|
||||
f"tenants/{tenant_id}/alerts/resolve-by-metadata",
|
||||
tenant_id=str(tenant_id),
|
||||
data=payload
|
||||
)
|
||||
|
||||
if result and result.get("success"):
|
||||
logger.info(
|
||||
"Resolved alerts by metadata",
|
||||
tenant_id=str(tenant_id),
|
||||
alert_type=alert_type,
|
||||
count=result.get("resolved_count", 0),
|
||||
calling_service=self.calling_service_name
|
||||
)
|
||||
|
||||
return result or {"success": False, "resolved_count": 0, "alert_ids": []}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error resolving alerts by metadata",
|
||||
error=str(e),
|
||||
tenant_id=str(tenant_id),
|
||||
alert_type=alert_type,
|
||||
metadata_filter=metadata_filter,
|
||||
calling_service=self.calling_service_name
|
||||
)
|
||||
return {"success": False, "resolved_count": 0, "alert_ids": [], "error": str(e)}
|
||||
|
||||
async def get_active_alerts(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
priority_level: Optional[str] = None,
|
||||
limit: int = 100
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get active alerts for a tenant.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
priority_level: Optional priority filter (critical, important, standard, info)
|
||||
limit: Maximum number of alerts to return
|
||||
|
||||
Returns:
|
||||
List of alert dictionaries
|
||||
"""
|
||||
try:
|
||||
params = {
|
||||
"status": "active",
|
||||
"limit": limit
|
||||
}
|
||||
|
||||
if priority_level:
|
||||
params["priority_level"] = priority_level
|
||||
|
||||
result = await self.get(
|
||||
f"tenants/{tenant_id}/alerts",
|
||||
tenant_id=str(tenant_id),
|
||||
params=params
|
||||
)
|
||||
|
||||
alerts = result.get("alerts", []) if isinstance(result, dict) else []
|
||||
|
||||
logger.info(
|
||||
"Retrieved active alerts",
|
||||
tenant_id=str(tenant_id),
|
||||
count=len(alerts),
|
||||
calling_service=self.calling_service_name
|
||||
)
|
||||
|
||||
return alerts
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error fetching active alerts",
|
||||
error=str(e),
|
||||
tenant_id=str(tenant_id),
|
||||
calling_service=self.calling_service_name
|
||||
)
|
||||
return []
|
||||
|
||||
|
||||
# Factory function for easy import
|
||||
def get_alert_processor_client(config: BaseServiceSettings, calling_service_name: str) -> AlertProcessorClient:
|
||||
"""
|
||||
Factory function to create an AlertProcessorClient instance.
|
||||
|
||||
Args:
|
||||
config: Service configuration with gateway URL
|
||||
calling_service_name: Name of the service making the call (for logging)
|
||||
|
||||
Returns:
|
||||
AlertProcessorClient instance
|
||||
"""
|
||||
return AlertProcessorClient(config, calling_service_name)
|
||||
259
shared/clients/alerts_client.py
Executable file
259
shared/clients/alerts_client.py
Executable file
@@ -0,0 +1,259 @@
|
||||
# shared/clients/alerts_client.py
|
||||
"""
|
||||
Alerts Service Client for Inter-Service Communication
|
||||
Provides access to alert processor service from other services
|
||||
"""
|
||||
|
||||
import structlog
|
||||
from typing import Dict, Any, Optional, List
|
||||
from uuid import UUID
|
||||
from shared.clients.base_service_client import BaseServiceClient
|
||||
from shared.config.base import BaseServiceSettings
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class AlertsServiceClient(BaseServiceClient):
|
||||
"""Client for communicating with the Alert Processor Service"""
|
||||
|
||||
def __init__(self, config: BaseServiceSettings, calling_service_name: str = "unknown"):
|
||||
super().__init__(calling_service_name, config)
|
||||
|
||||
def get_service_base_path(self) -> str:
|
||||
return "/api/v1"
|
||||
|
||||
# ================================================================
|
||||
# DASHBOARD METHODS
|
||||
# ================================================================
|
||||
|
||||
async def get_alerts_summary(
|
||||
self,
|
||||
tenant_id: str
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get alerts summary for dashboard health status
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
|
||||
Returns:
|
||||
Dict with counts by severity:
|
||||
{
|
||||
"total_count": int,
|
||||
"active_count": int,
|
||||
"critical_count": int, # Maps to "urgent" severity
|
||||
"high_count": int,
|
||||
"medium_count": int,
|
||||
"low_count": int,
|
||||
"resolved_count": int,
|
||||
"acknowledged_count": int
|
||||
}
|
||||
"""
|
||||
try:
|
||||
# Gateway routes /tenants/{tenant_id}/alerts/... to alert_processor service
|
||||
return await self.get(
|
||||
"/alerts/summary",
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error("Error fetching alerts summary", error=str(e), tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def get_critical_alerts(
|
||||
self,
|
||||
tenant_id: str,
|
||||
limit: int = 20
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get critical/urgent alerts for dashboard
|
||||
|
||||
Note: "critical" in dashboard context maps to "urgent" severity in alert_processor
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
limit: Maximum number of alerts to return
|
||||
|
||||
Returns:
|
||||
Dict with:
|
||||
{
|
||||
"alerts": [...],
|
||||
"total": int,
|
||||
"limit": int,
|
||||
"offset": int
|
||||
}
|
||||
"""
|
||||
try:
|
||||
# Gateway routes /tenants/{tenant_id}/alerts/... to alert_processor service
|
||||
# "critical" in dashboard = "urgent" severity in alert_processor
|
||||
return await self.get(
|
||||
"/alerts",
|
||||
tenant_id=tenant_id,
|
||||
params={"severity": "urgent", "resolved": False, "limit": limit}
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error("Error fetching critical alerts", error=str(e), tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def get_alerts(
|
||||
self,
|
||||
tenant_id: str,
|
||||
priority_level: Optional[str] = None,
|
||||
status: Optional[str] = None,
|
||||
resolved: Optional[bool] = None,
|
||||
limit: int = 100,
|
||||
offset: int = 0
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get alerts with optional filters
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
priority_level: Filter by priority level (critical, important, standard, info)
|
||||
status: Filter by status (active, resolved, acknowledged, ignored)
|
||||
resolved: Filter by resolved status (None = all, True = resolved only, False = unresolved only)
|
||||
limit: Maximum number of alerts
|
||||
offset: Pagination offset
|
||||
|
||||
Returns:
|
||||
Dict with:
|
||||
{
|
||||
"alerts": [...],
|
||||
"total": int,
|
||||
"limit": int,
|
||||
"offset": int
|
||||
}
|
||||
"""
|
||||
try:
|
||||
params = {"limit": limit, "offset": offset}
|
||||
if priority_level:
|
||||
params["priority_level"] = priority_level
|
||||
if status:
|
||||
params["status"] = status
|
||||
if resolved is not None:
|
||||
params["resolved"] = resolved
|
||||
|
||||
return await self.get(
|
||||
"/alerts",
|
||||
tenant_id=tenant_id,
|
||||
params=params
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error("Error fetching alerts",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def get_alerts_by_severity(
|
||||
self,
|
||||
tenant_id: str,
|
||||
severity: str,
|
||||
limit: int = 100,
|
||||
resolved: Optional[bool] = None
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get alerts filtered by severity
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
severity: Severity level (low, medium, high, urgent)
|
||||
limit: Maximum number of alerts
|
||||
resolved: Filter by resolved status (None = all, True = resolved only, False = unresolved only)
|
||||
|
||||
Returns:
|
||||
Dict with alerts list and metadata
|
||||
"""
|
||||
try:
|
||||
params = {"severity": severity, "limit": limit}
|
||||
if resolved is not None:
|
||||
params["resolved"] = resolved
|
||||
|
||||
return await self.get(
|
||||
"/alerts",
|
||||
tenant_id=tenant_id,
|
||||
params=params
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error("Error fetching alerts by severity",
|
||||
error=str(e), severity=severity, tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def get_alert_by_id(
|
||||
self,
|
||||
tenant_id: str,
|
||||
alert_id: str
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get a specific alert by ID
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
alert_id: Alert UUID
|
||||
|
||||
Returns:
|
||||
Dict with alert details
|
||||
"""
|
||||
try:
|
||||
return await self.get(
|
||||
f"/alerts/{alert_id}",
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error("Error fetching alert", error=str(e),
|
||||
alert_id=alert_id, tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def get_dashboard_analytics(
|
||||
self,
|
||||
tenant_id: str,
|
||||
days: int = 7
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get dashboard analytics including prevented issues and estimated savings
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
days: Number of days to analyze (default: 7)
|
||||
|
||||
Returns:
|
||||
Dict with analytics data:
|
||||
{
|
||||
"period_days": int,
|
||||
"total_alerts": int,
|
||||
"active_alerts": int,
|
||||
"ai_handling_rate": float,
|
||||
"prevented_issues_count": int,
|
||||
"estimated_savings_eur": float,
|
||||
"total_financial_impact_at_risk_eur": float,
|
||||
"priority_distribution": {...},
|
||||
"type_class_distribution": {...},
|
||||
"active_by_type_class": {...},
|
||||
"period_comparison": {...}
|
||||
}
|
||||
"""
|
||||
try:
|
||||
return await self.get(
|
||||
"/alerts/analytics/dashboard",
|
||||
tenant_id=tenant_id,
|
||||
params={"days": days}
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error("Error fetching dashboard analytics", error=str(e), tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
# ================================================================
|
||||
# UTILITY METHODS
|
||||
# ================================================================
|
||||
|
||||
async def health_check(self) -> bool:
|
||||
"""Check if alerts service is healthy"""
|
||||
try:
|
||||
result = await self.get("../health") # Health endpoint is not tenant-scoped
|
||||
return result is not None
|
||||
except Exception as e:
|
||||
logger.error("Alerts service health check failed", error=str(e))
|
||||
return False
|
||||
|
||||
|
||||
# Factory function for dependency injection
|
||||
def create_alerts_client(config: BaseServiceSettings, calling_service_name: str = "unknown") -> AlertsServiceClient:
|
||||
"""Create alerts service client instance"""
|
||||
return AlertsServiceClient(config, calling_service_name)
|
||||
263
shared/clients/auth_client.py
Executable file
263
shared/clients/auth_client.py
Executable file
@@ -0,0 +1,263 @@
|
||||
# shared/clients/auth_client.py
|
||||
"""
|
||||
Auth Service Client for Inter-Service Communication
|
||||
Provides methods to interact with the authentication/onboarding service
|
||||
"""
|
||||
|
||||
from typing import Optional, Dict, Any, List
|
||||
import structlog
|
||||
|
||||
from shared.clients.base_service_client import BaseServiceClient
|
||||
from shared.config.base import BaseServiceSettings
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class AuthServiceClient(BaseServiceClient):
|
||||
"""Client for interacting with the Auth Service"""
|
||||
|
||||
def __init__(self, config: BaseServiceSettings):
|
||||
super().__init__("auth", config)
|
||||
|
||||
def get_service_base_path(self) -> str:
|
||||
"""Return the base path for auth service APIs"""
|
||||
return "/api/v1/auth"
|
||||
|
||||
async def get_user_onboarding_progress(self, user_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get user's onboarding progress including step data
|
||||
|
||||
Args:
|
||||
user_id: User ID to fetch progress for
|
||||
|
||||
Returns:
|
||||
Dict with user progress including steps with data, or None if failed
|
||||
"""
|
||||
try:
|
||||
# Use the service endpoint that accepts user_id as parameter
|
||||
result = await self.get(f"/users/{user_id}/onboarding/progress")
|
||||
|
||||
if result:
|
||||
logger.info("Retrieved user onboarding progress",
|
||||
user_id=user_id,
|
||||
current_step=result.get("current_step"))
|
||||
return result
|
||||
else:
|
||||
logger.warning("No onboarding progress found",
|
||||
user_id=user_id)
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get user onboarding progress",
|
||||
user_id=user_id,
|
||||
error=str(e))
|
||||
return None
|
||||
|
||||
async def get_user_step_data(self, user_id: str, step_name: str) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get data for a specific onboarding step
|
||||
|
||||
Args:
|
||||
user_id: User ID
|
||||
step_name: Name of the step (e.g., "user_registered")
|
||||
|
||||
Returns:
|
||||
Step data dictionary or None if not found
|
||||
"""
|
||||
try:
|
||||
progress = await self.get_user_onboarding_progress(user_id)
|
||||
|
||||
if not progress:
|
||||
logger.warning("No progress data returned",
|
||||
user_id=user_id)
|
||||
return None
|
||||
|
||||
logger.debug("Retrieved progress data",
|
||||
user_id=user_id,
|
||||
steps_count=len(progress.get("steps", [])),
|
||||
current_step=progress.get("current_step"))
|
||||
|
||||
# Find the specific step
|
||||
for step in progress.get("steps", []):
|
||||
if step.get("step_name") == step_name:
|
||||
step_data = step.get("data", {})
|
||||
logger.info("Found step data",
|
||||
user_id=user_id,
|
||||
step_name=step_name,
|
||||
data_keys=list(step_data.keys()) if step_data else [],
|
||||
has_subscription_plan="subscription_plan" in step_data)
|
||||
return step_data
|
||||
|
||||
logger.warning("Step not found in progress",
|
||||
user_id=user_id,
|
||||
step_name=step_name,
|
||||
available_steps=[s.get("step_name") for s in progress.get("steps", [])])
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get step data",
|
||||
user_id=user_id,
|
||||
step_name=step_name,
|
||||
error=str(e))
|
||||
return None
|
||||
|
||||
async def get_subscription_plan_from_registration(self, user_id: str) -> str:
|
||||
"""
|
||||
Get the subscription plan selected during user registration
|
||||
|
||||
Args:
|
||||
user_id: User ID
|
||||
|
||||
Returns:
|
||||
Plan name (e.g., "starter", "professional", "enterprise") or "starter" as default
|
||||
"""
|
||||
try:
|
||||
step_data = await self.get_user_step_data(user_id, "user_registered")
|
||||
|
||||
if step_data and "subscription_plan" in step_data:
|
||||
plan = step_data["subscription_plan"]
|
||||
logger.info("Retrieved subscription plan from registration",
|
||||
user_id=user_id,
|
||||
plan=plan)
|
||||
return plan
|
||||
else:
|
||||
logger.info("No subscription plan in registration data, using default",
|
||||
user_id=user_id,
|
||||
default_plan="starter")
|
||||
return "starter"
|
||||
|
||||
except Exception as e:
|
||||
logger.warning("Failed to retrieve subscription plan, using default",
|
||||
user_id=user_id,
|
||||
error=str(e),
|
||||
default_plan="starter")
|
||||
return "starter"
|
||||
|
||||
async def create_user_by_owner(self, user_data: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""
|
||||
Create a new user account via the auth service (owner/admin only - pilot phase).
|
||||
|
||||
This method calls the auth service endpoint that allows tenant owners
|
||||
to directly create users with passwords during the pilot phase.
|
||||
|
||||
Args:
|
||||
user_data: Dictionary containing:
|
||||
- email: User email (required)
|
||||
- full_name: Full name (required)
|
||||
- password: Password (required)
|
||||
- phone: Phone number (optional)
|
||||
- role: User role (optional, default: "user")
|
||||
- language: Language preference (optional, default: "es")
|
||||
- timezone: Timezone (optional, default: "Europe/Madrid")
|
||||
|
||||
Returns:
|
||||
Dict with created user data including user ID
|
||||
|
||||
Raises:
|
||||
Exception if user creation fails
|
||||
"""
|
||||
try:
|
||||
logger.info(
|
||||
"Creating user via auth service",
|
||||
email=user_data.get("email"),
|
||||
role=user_data.get("role", "user")
|
||||
)
|
||||
|
||||
result = await self.post("/users/create-by-owner", user_data)
|
||||
|
||||
if result and result.get("id"):
|
||||
logger.info(
|
||||
"User created successfully via auth service",
|
||||
user_id=result.get("id"),
|
||||
email=result.get("email")
|
||||
)
|
||||
return result
|
||||
else:
|
||||
logger.error("User creation returned no user ID")
|
||||
raise Exception("User creation failed: No user ID returned")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to create user via auth service",
|
||||
email=user_data.get("email"),
|
||||
error=str(e)
|
||||
)
|
||||
raise
|
||||
|
||||
async def get_user_details(self, user_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get detailed user information including payment details
|
||||
|
||||
Args:
|
||||
user_id: User ID to fetch details for
|
||||
|
||||
Returns:
|
||||
Dict with user details including:
|
||||
- id, email, full_name, is_active, is_verified
|
||||
- phone, language, timezone, role
|
||||
- payment_customer_id, default_payment_method_id
|
||||
- created_at, last_login, etc.
|
||||
Returns None if user not found or request fails
|
||||
"""
|
||||
try:
|
||||
logger.info("Fetching user details from auth service",
|
||||
user_id=user_id)
|
||||
|
||||
result = await self.get(f"/users/{user_id}")
|
||||
|
||||
if result and result.get("id"):
|
||||
logger.info("Successfully retrieved user details",
|
||||
user_id=user_id,
|
||||
email=result.get("email"),
|
||||
has_payment_info="payment_customer_id" in result)
|
||||
return result
|
||||
else:
|
||||
logger.warning("No user details found",
|
||||
user_id=user_id)
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get user details from auth service",
|
||||
user_id=user_id,
|
||||
error=str(e))
|
||||
return None
|
||||
|
||||
async def update_user_tenant_id(self, user_id: str, tenant_id: str) -> bool:
|
||||
"""
|
||||
Update the user's tenant_id after tenant registration
|
||||
|
||||
Args:
|
||||
user_id: User ID to update
|
||||
tenant_id: Tenant ID to link to the user
|
||||
|
||||
Returns:
|
||||
True if successful, False otherwise
|
||||
"""
|
||||
try:
|
||||
logger.info("Updating user tenant_id",
|
||||
user_id=user_id,
|
||||
tenant_id=tenant_id)
|
||||
|
||||
result = await self.patch(
|
||||
f"/users/{user_id}/tenant",
|
||||
{"tenant_id": tenant_id}
|
||||
)
|
||||
|
||||
if result:
|
||||
logger.info("Successfully updated user tenant_id",
|
||||
user_id=user_id,
|
||||
tenant_id=tenant_id)
|
||||
return True
|
||||
else:
|
||||
logger.warning("Failed to update user tenant_id",
|
||||
user_id=user_id,
|
||||
tenant_id=tenant_id)
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error updating user tenant_id",
|
||||
user_id=user_id,
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
return False
|
||||
|
||||
438
shared/clients/base_service_client.py
Executable file
438
shared/clients/base_service_client.py
Executable file
@@ -0,0 +1,438 @@
|
||||
# shared/clients/base_service_client.py
|
||||
"""
|
||||
Base Service Client for Inter-Service Communication
|
||||
Provides a reusable foundation for all service-to-service API calls
|
||||
"""
|
||||
|
||||
import time
|
||||
import asyncio
|
||||
import httpx
|
||||
import structlog
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Dict, Any, Optional, List, Union
|
||||
from urllib.parse import urljoin
|
||||
|
||||
from shared.auth.jwt_handler import JWTHandler
|
||||
from shared.config.base import BaseServiceSettings
|
||||
from shared.clients.circuit_breaker import CircuitBreaker, CircuitBreakerOpenException
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
class ServiceAuthenticator:
|
||||
"""Handles service-to-service authentication via gateway"""
|
||||
|
||||
def __init__(self, service_name: str, config: BaseServiceSettings):
|
||||
self.service_name = service_name
|
||||
self.config = config
|
||||
self.jwt_handler = JWTHandler(config.JWT_SECRET_KEY)
|
||||
self._cached_token = None
|
||||
self._token_expires_at = 0
|
||||
self._cached_tenant_id = None # Track tenant context for cached tokens
|
||||
|
||||
async def get_service_token(self, tenant_id: Optional[str] = None) -> str:
|
||||
"""Get a valid service token, using cache when possible"""
|
||||
current_time = int(time.time())
|
||||
|
||||
# Return cached token if still valid (with 5 min buffer) and tenant context matches
|
||||
if (self._cached_token and
|
||||
self._token_expires_at > current_time + 300 and
|
||||
(tenant_id is None or self._cached_tenant_id == tenant_id)):
|
||||
return self._cached_token
|
||||
|
||||
# Create new service token using unified JWT handler
|
||||
try:
|
||||
token = self.jwt_handler.create_service_token(
|
||||
service_name=self.service_name,
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
|
||||
# Extract expiration from token for caching
|
||||
import json
|
||||
from jose import jwt
|
||||
payload = jwt.decode(token, self.jwt_handler.secret_key, algorithms=[self.jwt_handler.algorithm], options={"verify_signature": False})
|
||||
token_expires_at = payload.get("exp", current_time + 3600)
|
||||
|
||||
self._cached_token = token
|
||||
self._token_expires_at = token_expires_at
|
||||
self._cached_tenant_id = tenant_id # Store tenant context for caching
|
||||
|
||||
logger.debug("Created new service token", service=self.service_name, expires_at=token_expires_at, tenant_id=tenant_id)
|
||||
return token
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to create service token: {e}", service=self.service_name)
|
||||
raise ValueError(f"Service token creation failed: {e}")
|
||||
|
||||
def get_request_headers(self, tenant_id: Optional[str] = None) -> Dict[str, str]:
|
||||
"""Get standard headers for service requests"""
|
||||
headers = {
|
||||
"X-Service": f"{self.service_name}-service",
|
||||
"User-Agent": f"{self.service_name}-service/1.0.0"
|
||||
}
|
||||
|
||||
if tenant_id:
|
||||
headers["x-tenant-id"] = str(tenant_id)
|
||||
|
||||
return headers
|
||||
|
||||
|
||||
class BaseServiceClient(ABC):
|
||||
"""
|
||||
Base class for all inter-service communication clients
|
||||
Provides common functionality for API calls through the gateway
|
||||
"""
|
||||
|
||||
def __init__(self, service_name: str, config: BaseServiceSettings):
|
||||
self.service_name = service_name
|
||||
self.config = config
|
||||
self.gateway_url = config.GATEWAY_URL
|
||||
self.authenticator = ServiceAuthenticator(service_name, config)
|
||||
|
||||
# HTTP client configuration
|
||||
self.timeout = config.HTTP_TIMEOUT
|
||||
self.retries = config.HTTP_RETRIES
|
||||
self.retry_delay = config.HTTP_RETRY_DELAY
|
||||
|
||||
# Circuit breaker for fault tolerance
|
||||
self.circuit_breaker = CircuitBreaker(
|
||||
service_name=f"{service_name}-client",
|
||||
failure_threshold=5,
|
||||
timeout=60,
|
||||
success_threshold=2
|
||||
)
|
||||
|
||||
|
||||
@abstractmethod
|
||||
def get_service_base_path(self) -> str:
|
||||
"""Return the base path for this service's APIs"""
|
||||
pass
|
||||
|
||||
async def _make_request(
|
||||
self,
|
||||
method: str,
|
||||
endpoint: str,
|
||||
tenant_id: Optional[str] = None,
|
||||
data: Optional[Dict[str, Any]] = None,
|
||||
params: Optional[Dict[str, Any]] = None,
|
||||
headers: Optional[Dict[str, str]] = None,
|
||||
timeout: Optional[Union[int, httpx.Timeout]] = None
|
||||
) -> Optional[Union[Dict[str, Any], List[Dict[str, Any]]]]:
|
||||
"""
|
||||
Make an authenticated request to another service via gateway with circuit breaker protection.
|
||||
|
||||
Args:
|
||||
method: HTTP method (GET, POST, PUT, DELETE)
|
||||
endpoint: API endpoint (will be prefixed with service base path)
|
||||
tenant_id: Optional tenant ID for tenant-scoped requests
|
||||
data: Request body data (for POST/PUT)
|
||||
params: Query parameters
|
||||
headers: Additional headers
|
||||
timeout: Request timeout override
|
||||
|
||||
Returns:
|
||||
Response data or None if request failed
|
||||
"""
|
||||
try:
|
||||
# Wrap request in circuit breaker
|
||||
return await self.circuit_breaker.call(
|
||||
self._do_request,
|
||||
method,
|
||||
endpoint,
|
||||
tenant_id,
|
||||
data,
|
||||
params,
|
||||
headers,
|
||||
timeout
|
||||
)
|
||||
except CircuitBreakerOpenException as e:
|
||||
logger.error(
|
||||
"Circuit breaker open - request rejected",
|
||||
service=self.service_name,
|
||||
endpoint=endpoint,
|
||||
error=str(e)
|
||||
)
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Unexpected error in request",
|
||||
service=self.service_name,
|
||||
endpoint=endpoint,
|
||||
error=str(e)
|
||||
)
|
||||
return None
|
||||
|
||||
async def _do_request(
|
||||
self,
|
||||
method: str,
|
||||
endpoint: str,
|
||||
tenant_id: Optional[str] = None,
|
||||
data: Optional[Dict[str, Any]] = None,
|
||||
params: Optional[Dict[str, Any]] = None,
|
||||
headers: Optional[Dict[str, str]] = None,
|
||||
timeout: Optional[Union[int, httpx.Timeout]] = None
|
||||
) -> Optional[Union[Dict[str, Any], List[Dict[str, Any]]]]:
|
||||
"""
|
||||
Internal method to execute HTTP request with retries.
|
||||
Called by _make_request through circuit breaker.
|
||||
"""
|
||||
try:
|
||||
# Get service token with tenant context for tenant-scoped requests
|
||||
token = await self.authenticator.get_service_token(tenant_id)
|
||||
|
||||
# Build headers
|
||||
request_headers = self.authenticator.get_request_headers(tenant_id)
|
||||
request_headers["Authorization"] = f"Bearer {token}"
|
||||
request_headers["Content-Type"] = "application/json"
|
||||
|
||||
# Propagate request ID for distributed tracing if provided
|
||||
if headers and "X-Request-ID" in headers:
|
||||
request_headers["X-Request-ID"] = headers["X-Request-ID"]
|
||||
|
||||
if headers:
|
||||
request_headers.update(headers)
|
||||
|
||||
# Build URL
|
||||
base_path = self.get_service_base_path()
|
||||
if tenant_id:
|
||||
# For tenant-scoped endpoints
|
||||
full_endpoint = f"{base_path}/tenants/{tenant_id}/{endpoint.lstrip('/')}"
|
||||
else:
|
||||
# For non-tenant endpoints
|
||||
full_endpoint = f"{base_path}/{endpoint.lstrip('/')}"
|
||||
|
||||
url = urljoin(self.gateway_url, full_endpoint)
|
||||
|
||||
# Debug logging for URL construction
|
||||
logger.debug(
|
||||
"Making service request",
|
||||
service=self.service_name,
|
||||
method=method,
|
||||
url=url,
|
||||
tenant_id=tenant_id,
|
||||
endpoint=endpoint,
|
||||
params=params
|
||||
)
|
||||
|
||||
# Make request with retries
|
||||
for attempt in range(self.retries + 1):
|
||||
try:
|
||||
# Handle different timeout configurations
|
||||
if isinstance(timeout, httpx.Timeout):
|
||||
client_timeout = timeout
|
||||
else:
|
||||
client_timeout = timeout or self.timeout
|
||||
|
||||
async with httpx.AsyncClient(timeout=client_timeout) as client:
|
||||
response = await client.request(
|
||||
method=method,
|
||||
url=url,
|
||||
json=data,
|
||||
params=params,
|
||||
headers=request_headers
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
elif response.status_code == 201:
|
||||
return response.json()
|
||||
elif response.status_code == 204:
|
||||
return {} # No content success
|
||||
elif response.status_code == 401:
|
||||
# Token might be expired, clear cache and retry once
|
||||
if attempt == 0:
|
||||
self.authenticator._cached_token = None
|
||||
logger.warning("Token expired, retrying with new token")
|
||||
continue
|
||||
else:
|
||||
logger.error("Authentication failed after retry")
|
||||
return None
|
||||
elif response.status_code == 404:
|
||||
logger.warning(
|
||||
"Endpoint not found",
|
||||
url=url,
|
||||
service=self.service_name,
|
||||
endpoint=endpoint,
|
||||
constructed_endpoint=full_endpoint,
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
return None
|
||||
else:
|
||||
error_detail = "Unknown error"
|
||||
try:
|
||||
error_json = response.json()
|
||||
error_detail = error_json.get('detail', f"HTTP {response.status_code}")
|
||||
except:
|
||||
error_detail = f"HTTP {response.status_code}: {response.text}"
|
||||
|
||||
logger.error(f"Request failed: {error_detail}",
|
||||
url=url, status_code=response.status_code)
|
||||
return None
|
||||
|
||||
except httpx.TimeoutException:
|
||||
if attempt < self.retries:
|
||||
logger.warning(f"Request timeout, retrying ({attempt + 1}/{self.retries})")
|
||||
import asyncio
|
||||
await asyncio.sleep(self.retry_delay * (2 ** attempt)) # Exponential backoff
|
||||
continue
|
||||
else:
|
||||
logger.error(f"Request timeout after {self.retries} retries", url=url)
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
if attempt < self.retries:
|
||||
logger.warning(f"Request failed, retrying ({attempt + 1}/{self.retries}): {e}")
|
||||
import asyncio
|
||||
await asyncio.sleep(self.retry_delay * (2 ** attempt))
|
||||
continue
|
||||
else:
|
||||
logger.error(f"Request failed after {self.retries} retries: {e}", url=url)
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error in _make_request: {e}")
|
||||
return None
|
||||
|
||||
async def _make_paginated_request(
|
||||
self,
|
||||
endpoint: str,
|
||||
tenant_id: Optional[str] = None,
|
||||
params: Optional[Dict[str, Any]] = None,
|
||||
page_size: int = 1000,
|
||||
max_pages: int = 100,
|
||||
timeout: Optional[Union[int, httpx.Timeout]] = None
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Make paginated GET requests to fetch all records
|
||||
Handles both direct list and paginated object responses
|
||||
|
||||
Args:
|
||||
endpoint: API endpoint
|
||||
tenant_id: Optional tenant ID
|
||||
params: Base query parameters
|
||||
page_size: Records per page (default 1000)
|
||||
max_pages: Maximum pages to fetch (safety limit)
|
||||
timeout: Request timeout override
|
||||
|
||||
Returns:
|
||||
List of all records from all pages
|
||||
"""
|
||||
all_records = []
|
||||
page = 0
|
||||
base_params = params or {}
|
||||
|
||||
logger.info(f"Starting paginated request to {endpoint}",
|
||||
tenant_id=tenant_id, page_size=page_size)
|
||||
|
||||
while page < max_pages:
|
||||
# Prepare pagination parameters
|
||||
page_params = base_params.copy()
|
||||
page_params.update({
|
||||
"limit": page_size,
|
||||
"offset": page * page_size
|
||||
})
|
||||
|
||||
logger.debug(f"Fetching page {page + 1} (offset: {page * page_size})",
|
||||
tenant_id=tenant_id)
|
||||
|
||||
# Make request for this page
|
||||
result = await self._make_request(
|
||||
"GET",
|
||||
endpoint,
|
||||
tenant_id=tenant_id,
|
||||
params=page_params,
|
||||
timeout=timeout
|
||||
)
|
||||
|
||||
if result is None:
|
||||
logger.error(f"Failed to fetch page {page + 1}", tenant_id=tenant_id)
|
||||
break
|
||||
|
||||
# Handle different response formats
|
||||
if isinstance(result, list):
|
||||
# Direct list response (no pagination metadata)
|
||||
records = result
|
||||
logger.debug(f"Retrieved {len(records)} records from page {page + 1} (direct list)")
|
||||
|
||||
if len(records) == 0:
|
||||
logger.info("No records in response, pagination complete")
|
||||
break
|
||||
elif len(records) < page_size:
|
||||
# Got fewer than requested, this is the last page
|
||||
all_records.extend(records)
|
||||
logger.info(f"Final page: retrieved {len(records)} records, total: {len(all_records)}")
|
||||
break
|
||||
else:
|
||||
# Got full page, there might be more
|
||||
all_records.extend(records)
|
||||
logger.debug(f"Full page retrieved: {len(records)} records, continuing to next page")
|
||||
|
||||
elif isinstance(result, dict):
|
||||
# Paginated response format
|
||||
records = result.get('records', result.get('data', []))
|
||||
total_available = result.get('total', 0)
|
||||
|
||||
logger.debug(f"Retrieved {len(records)} records from page {page + 1} (paginated response)")
|
||||
|
||||
if not records:
|
||||
logger.info("No more records found in paginated response")
|
||||
break
|
||||
|
||||
all_records.extend(records)
|
||||
|
||||
# Check if we've got all available records
|
||||
if len(all_records) >= total_available:
|
||||
logger.info(f"Retrieved all available records: {len(all_records)}/{total_available}")
|
||||
break
|
||||
|
||||
else:
|
||||
logger.warning(f"Unexpected response format: {type(result)}")
|
||||
break
|
||||
|
||||
page += 1
|
||||
|
||||
if page >= max_pages:
|
||||
logger.warning(f"Reached maximum page limit ({max_pages}), stopping pagination")
|
||||
|
||||
logger.info(f"Pagination complete: fetched {len(all_records)} total records",
|
||||
tenant_id=tenant_id, pages_fetched=page)
|
||||
|
||||
return all_records
|
||||
|
||||
async def get(self, endpoint: str, tenant_id: Optional[str] = None, params: Optional[Dict[str, Any]] = None) -> Optional[Dict[str, Any]]:
|
||||
"""Make a GET request"""
|
||||
return await self._make_request("GET", endpoint, tenant_id=tenant_id, params=params)
|
||||
|
||||
async def get_paginated(
|
||||
self,
|
||||
endpoint: str,
|
||||
tenant_id: Optional[str] = None,
|
||||
params: Optional[Dict[str, Any]] = None,
|
||||
page_size: int = 1000,
|
||||
max_pages: int = 100,
|
||||
timeout: Optional[Union[int, httpx.Timeout]] = None
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Make a paginated GET request to fetch all records"""
|
||||
return await self._make_paginated_request(
|
||||
endpoint,
|
||||
tenant_id=tenant_id,
|
||||
params=params,
|
||||
page_size=page_size,
|
||||
max_pages=max_pages,
|
||||
timeout=timeout
|
||||
)
|
||||
|
||||
async def post(self, endpoint: str, data: Optional[Dict[str, Any]] = None, tenant_id: Optional[str] = None, params: Optional[Dict[str, Any]] = None) -> Optional[Dict[str, Any]]:
|
||||
"""Make a POST request with optional query parameters"""
|
||||
return await self._make_request("POST", endpoint, tenant_id=tenant_id, data=data, params=params)
|
||||
|
||||
async def put(self, endpoint: str, data: Dict[str, Any], tenant_id: Optional[str] = None) -> Optional[Dict[str, Any]]:
|
||||
"""Make a PUT request"""
|
||||
return await self._make_request("PUT", endpoint, tenant_id=tenant_id, data=data)
|
||||
|
||||
async def patch(self, endpoint: str, data: Dict[str, Any], tenant_id: Optional[str] = None) -> Optional[Dict[str, Any]]:
|
||||
"""Make a PATCH request"""
|
||||
return await self._make_request("PATCH", endpoint, tenant_id=tenant_id, data=data)
|
||||
|
||||
async def delete(self, endpoint: str, tenant_id: Optional[str] = None) -> Optional[Dict[str, Any]]:
|
||||
"""Make a DELETE request"""
|
||||
return await self._make_request("DELETE", endpoint, tenant_id=tenant_id)
|
||||
215
shared/clients/circuit_breaker.py
Executable file
215
shared/clients/circuit_breaker.py
Executable file
@@ -0,0 +1,215 @@
|
||||
"""
|
||||
Circuit Breaker implementation for inter-service communication
|
||||
Prevents cascading failures by failing fast when a service is unhealthy
|
||||
"""
|
||||
|
||||
import time
|
||||
import structlog
|
||||
from enum import Enum
|
||||
from typing import Callable, Any, Optional
|
||||
import asyncio
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class CircuitState(Enum):
|
||||
"""Circuit breaker states"""
|
||||
CLOSED = "closed" # Normal operation, requests pass through
|
||||
OPEN = "open" # Service is failing, reject requests immediately
|
||||
HALF_OPEN = "half_open" # Testing if service has recovered
|
||||
|
||||
|
||||
class CircuitBreakerOpenException(Exception):
|
||||
"""Raised when circuit breaker is open and rejects a request"""
|
||||
pass
|
||||
|
||||
|
||||
class CircuitBreaker:
|
||||
"""
|
||||
Circuit breaker pattern implementation for preventing cascading failures.
|
||||
|
||||
States:
|
||||
- CLOSED: Normal operation, all requests pass through
|
||||
- OPEN: Service is failing, reject all requests immediately
|
||||
- HALF_OPEN: Testing recovery, allow one request through
|
||||
|
||||
Transitions:
|
||||
- CLOSED -> OPEN: After failure_threshold consecutive failures
|
||||
- OPEN -> HALF_OPEN: After timeout seconds have passed
|
||||
- HALF_OPEN -> CLOSED: If test request succeeds
|
||||
- HALF_OPEN -> OPEN: If test request fails
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
service_name: str,
|
||||
failure_threshold: int = 5,
|
||||
timeout: int = 60,
|
||||
success_threshold: int = 2
|
||||
):
|
||||
"""
|
||||
Initialize circuit breaker.
|
||||
|
||||
Args:
|
||||
service_name: Name of the service being protected
|
||||
failure_threshold: Number of consecutive failures before opening circuit
|
||||
timeout: Seconds to wait before attempting recovery (half-open state)
|
||||
success_threshold: Consecutive successes needed to close from half-open
|
||||
"""
|
||||
self.service_name = service_name
|
||||
self.failure_threshold = failure_threshold
|
||||
self.timeout = timeout
|
||||
self.success_threshold = success_threshold
|
||||
|
||||
self.state = CircuitState.CLOSED
|
||||
self.failure_count = 0
|
||||
self.success_count = 0
|
||||
self.last_failure_time: Optional[float] = None
|
||||
self._lock = asyncio.Lock()
|
||||
|
||||
logger.info(
|
||||
"Circuit breaker initialized",
|
||||
service=service_name,
|
||||
failure_threshold=failure_threshold,
|
||||
timeout=timeout
|
||||
)
|
||||
|
||||
async def call(self, func: Callable, *args, **kwargs) -> Any:
|
||||
"""
|
||||
Execute function with circuit breaker protection.
|
||||
|
||||
Args:
|
||||
func: Async function to execute
|
||||
*args, **kwargs: Arguments to pass to func
|
||||
|
||||
Returns:
|
||||
Result from func
|
||||
|
||||
Raises:
|
||||
CircuitBreakerOpenException: If circuit is open
|
||||
Exception: Any exception raised by func
|
||||
"""
|
||||
async with self._lock:
|
||||
# Check if circuit should transition to half-open
|
||||
if self.state == CircuitState.OPEN:
|
||||
if self._should_attempt_reset():
|
||||
logger.info(
|
||||
"Circuit breaker transitioning to half-open",
|
||||
service=self.service_name
|
||||
)
|
||||
self.state = CircuitState.HALF_OPEN
|
||||
self.success_count = 0
|
||||
else:
|
||||
# Circuit is open, reject request
|
||||
raise CircuitBreakerOpenException(
|
||||
f"Circuit breaker is OPEN for {self.service_name}. "
|
||||
f"Service will be retried in {self._time_until_retry():.0f} seconds."
|
||||
)
|
||||
|
||||
# Execute function
|
||||
try:
|
||||
result = await func(*args, **kwargs)
|
||||
await self._on_success()
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
await self._on_failure(e)
|
||||
raise
|
||||
|
||||
def _should_attempt_reset(self) -> bool:
|
||||
"""Check if enough time has passed to attempt recovery"""
|
||||
if self.last_failure_time is None:
|
||||
return True
|
||||
|
||||
return time.time() - self.last_failure_time >= self.timeout
|
||||
|
||||
def _time_until_retry(self) -> float:
|
||||
"""Calculate seconds until next retry attempt"""
|
||||
if self.last_failure_time is None:
|
||||
return 0.0
|
||||
|
||||
elapsed = time.time() - self.last_failure_time
|
||||
return max(0.0, self.timeout - elapsed)
|
||||
|
||||
async def _on_success(self):
|
||||
"""Handle successful request"""
|
||||
async with self._lock:
|
||||
self.failure_count = 0
|
||||
|
||||
if self.state == CircuitState.HALF_OPEN:
|
||||
self.success_count += 1
|
||||
logger.debug(
|
||||
"Circuit breaker success in half-open state",
|
||||
service=self.service_name,
|
||||
success_count=self.success_count,
|
||||
success_threshold=self.success_threshold
|
||||
)
|
||||
|
||||
if self.success_count >= self.success_threshold:
|
||||
logger.info(
|
||||
"Circuit breaker closing - service recovered",
|
||||
service=self.service_name
|
||||
)
|
||||
self.state = CircuitState.CLOSED
|
||||
self.success_count = 0
|
||||
|
||||
async def _on_failure(self, exception: Exception):
|
||||
"""Handle failed request"""
|
||||
async with self._lock:
|
||||
self.failure_count += 1
|
||||
self.last_failure_time = time.time()
|
||||
|
||||
if self.state == CircuitState.HALF_OPEN:
|
||||
logger.warning(
|
||||
"Circuit breaker opening - recovery attempt failed",
|
||||
service=self.service_name,
|
||||
error=str(exception)
|
||||
)
|
||||
self.state = CircuitState.OPEN
|
||||
self.success_count = 0
|
||||
|
||||
elif self.state == CircuitState.CLOSED:
|
||||
logger.warning(
|
||||
"Circuit breaker failure recorded",
|
||||
service=self.service_name,
|
||||
failure_count=self.failure_count,
|
||||
threshold=self.failure_threshold,
|
||||
error=str(exception)
|
||||
)
|
||||
|
||||
if self.failure_count >= self.failure_threshold:
|
||||
logger.error(
|
||||
"Circuit breaker opening - failure threshold reached",
|
||||
service=self.service_name,
|
||||
failure_count=self.failure_count
|
||||
)
|
||||
self.state = CircuitState.OPEN
|
||||
|
||||
def get_state(self) -> str:
|
||||
"""Get current circuit breaker state"""
|
||||
return self.state.value
|
||||
|
||||
def is_closed(self) -> bool:
|
||||
"""Check if circuit is closed (normal operation)"""
|
||||
return self.state == CircuitState.CLOSED
|
||||
|
||||
def is_open(self) -> bool:
|
||||
"""Check if circuit is open (failing fast)"""
|
||||
return self.state == CircuitState.OPEN
|
||||
|
||||
def is_half_open(self) -> bool:
|
||||
"""Check if circuit is half-open (testing recovery)"""
|
||||
return self.state == CircuitState.HALF_OPEN
|
||||
|
||||
async def reset(self):
|
||||
"""Manually reset circuit breaker to closed state"""
|
||||
async with self._lock:
|
||||
logger.info(
|
||||
"Circuit breaker manually reset",
|
||||
service=self.service_name,
|
||||
previous_state=self.state.value
|
||||
)
|
||||
self.state = CircuitState.CLOSED
|
||||
self.failure_count = 0
|
||||
self.success_count = 0
|
||||
self.last_failure_time = None
|
||||
477
shared/clients/distribution_client.py
Executable file
477
shared/clients/distribution_client.py
Executable file
@@ -0,0 +1,477 @@
|
||||
"""
|
||||
Distribution Service Client for Inter-Service Communication
|
||||
|
||||
This client provides a high-level API for interacting with the Distribution Service,
|
||||
which manages delivery routes, shipment tracking, and vehicle routing optimization for
|
||||
enterprise multi-location bakery networks.
|
||||
|
||||
Key Capabilities:
|
||||
- Generate daily distribution plans using VRP (Vehicle Routing Problem) optimization
|
||||
- Manage delivery routes with driver assignments and route sequencing
|
||||
- Track shipments from pending → packed → in_transit → delivered
|
||||
- Update shipment status with proof of delivery (POD) metadata
|
||||
- Filter routes and shipments by date range and status
|
||||
- Setup enterprise distribution for demo sessions
|
||||
|
||||
Enterprise Context:
|
||||
- Designed for parent-child tenant hierarchies (central production + retail outlets)
|
||||
- Routes optimize deliveries from parent (central bakery) to children (outlets)
|
||||
- Integrates with Procurement Service (internal transfer POs) and Inventory Service (stock transfers)
|
||||
- Publishes shipment.delivered events for inventory ownership transfer
|
||||
|
||||
Usage Example:
|
||||
```python
|
||||
from shared.clients import create_distribution_client
|
||||
from shared.config.base import get_settings
|
||||
|
||||
config = get_settings()
|
||||
client = create_distribution_client(config, service_name="orchestrator")
|
||||
|
||||
# Generate daily distribution plan
|
||||
plan = await client.generate_daily_distribution_plan(
|
||||
tenant_id=parent_tenant_id,
|
||||
target_date=date.today(),
|
||||
vehicle_capacity_kg=1000.0
|
||||
)
|
||||
|
||||
# Get active delivery routes
|
||||
routes = await client.get_delivery_routes(
|
||||
tenant_id=parent_tenant_id,
|
||||
status="in_progress"
|
||||
)
|
||||
|
||||
# Update shipment to delivered
|
||||
await client.update_shipment_status(
|
||||
tenant_id=parent_tenant_id,
|
||||
shipment_id=shipment_id,
|
||||
new_status="delivered",
|
||||
user_id=driver_id,
|
||||
metadata={"signature": "...", "photo_url": "..."}
|
||||
)
|
||||
```
|
||||
|
||||
Service Architecture:
|
||||
- Base URL: Configured via DISTRIBUTION_SERVICE_URL environment variable
|
||||
- Authentication: Uses BaseServiceClient with tenant_id header validation
|
||||
- Error Handling: Returns None on errors, logs detailed error context
|
||||
- Async: All methods are async and use httpx for HTTP communication
|
||||
|
||||
Related Services:
|
||||
- Procurement Service: Approved internal transfer POs feed into distribution planning
|
||||
- Inventory Service: Consumes shipment.delivered events for stock ownership transfer
|
||||
- Tenant Service: Validates parent-child tenant relationships and location data
|
||||
- Orchestrator Service: Enterprise dashboard displays delivery route status
|
||||
|
||||
For more details, see services/distribution/README.md
|
||||
"""
|
||||
|
||||
import structlog
|
||||
from typing import Dict, Any, List, Optional
|
||||
from datetime import date
|
||||
from shared.clients.base_service_client import BaseServiceClient
|
||||
from shared.config.base import BaseServiceSettings
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class DistributionServiceClient(BaseServiceClient):
|
||||
"""Client for communicating with the Distribution Service"""
|
||||
|
||||
def __init__(self, config: BaseServiceSettings, service_name: str = "unknown"):
|
||||
super().__init__(service_name, config)
|
||||
self.service_base_url = config.DISTRIBUTION_SERVICE_URL
|
||||
|
||||
def get_service_base_path(self) -> str:
|
||||
return "/api/v1"
|
||||
|
||||
# ================================================================
|
||||
# DAILY DISTRIBUTION PLAN ENDPOINTS
|
||||
# ================================================================
|
||||
|
||||
async def generate_daily_distribution_plan(
|
||||
self,
|
||||
tenant_id: str,
|
||||
target_date: date,
|
||||
vehicle_capacity_kg: float = 1000.0
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Generate daily distribution plan for internal transfers
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID (should be parent tenant for enterprise)
|
||||
target_date: Date for which to generate distribution plan
|
||||
vehicle_capacity_kg: Maximum capacity per vehicle
|
||||
|
||||
Returns:
|
||||
Distribution plan details
|
||||
"""
|
||||
try:
|
||||
response = await self.post(
|
||||
f"tenants/{tenant_id}/distribution/plans/generate",
|
||||
data={
|
||||
"target_date": target_date.isoformat(),
|
||||
"vehicle_capacity_kg": vehicle_capacity_kg
|
||||
},
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
|
||||
if response:
|
||||
logger.info("Generated daily distribution plan",
|
||||
tenant_id=tenant_id,
|
||||
target_date=target_date.isoformat())
|
||||
return response
|
||||
except Exception as e:
|
||||
logger.error("Error generating distribution plan",
|
||||
tenant_id=tenant_id,
|
||||
target_date=target_date,
|
||||
error=str(e))
|
||||
return None
|
||||
|
||||
# ================================================================
|
||||
# DELIVERY ROUTES ENDPOINTS
|
||||
# ================================================================
|
||||
|
||||
async def get_delivery_routes(
|
||||
self,
|
||||
tenant_id: str,
|
||||
date_from: Optional[date] = None,
|
||||
date_to: Optional[date] = None,
|
||||
status: Optional[str] = None
|
||||
) -> Optional[List[Dict[str, Any]]]:
|
||||
"""
|
||||
Get delivery routes with optional filtering
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
date_from: Start date for filtering
|
||||
date_to: End date for filtering
|
||||
status: Status filter
|
||||
|
||||
Returns:
|
||||
List of delivery route dictionaries
|
||||
"""
|
||||
try:
|
||||
params = {}
|
||||
if date_from:
|
||||
params["date_from"] = date_from.isoformat()
|
||||
if date_to:
|
||||
params["date_to"] = date_to.isoformat()
|
||||
if status:
|
||||
params["status"] = status
|
||||
|
||||
# Use _make_request directly to construct correct URL
|
||||
# Gateway route: /api/v1/tenants/{tenant_id}/distribution/{path}
|
||||
response = await self._make_request(
|
||||
"GET",
|
||||
f"tenants/{tenant_id}/distribution/routes",
|
||||
params=params
|
||||
)
|
||||
|
||||
if response:
|
||||
# Handle different response formats
|
||||
if isinstance(response, list):
|
||||
# Direct list of routes
|
||||
logger.info("Retrieved delivery routes",
|
||||
tenant_id=tenant_id,
|
||||
count=len(response))
|
||||
return response
|
||||
elif isinstance(response, dict):
|
||||
# Response wrapped in routes key
|
||||
if "routes" in response:
|
||||
logger.info("Retrieved delivery routes",
|
||||
tenant_id=tenant_id,
|
||||
count=len(response.get("routes", [])))
|
||||
return response.get("routes", [])
|
||||
else:
|
||||
# Return the whole dict if it's a single route
|
||||
logger.info("Retrieved delivery routes",
|
||||
tenant_id=tenant_id,
|
||||
count=1)
|
||||
return [response]
|
||||
logger.info("No delivery routes found",
|
||||
tenant_id=tenant_id)
|
||||
return []
|
||||
except Exception as e:
|
||||
logger.error("Error getting delivery routes",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
return []
|
||||
|
||||
async def get_delivery_route_detail(
|
||||
self,
|
||||
tenant_id: str,
|
||||
route_id: str
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get detailed information about a specific delivery route
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
route_id: Route ID
|
||||
|
||||
Returns:
|
||||
Delivery route details
|
||||
"""
|
||||
try:
|
||||
response = await self.get(
|
||||
f"distribution/routes/{route_id}",
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
|
||||
if response:
|
||||
logger.info("Retrieved delivery route detail",
|
||||
tenant_id=tenant_id,
|
||||
route_id=route_id)
|
||||
# Ensure we return the route data directly if it's wrapped in a route key
|
||||
if isinstance(response, dict) and "route" in response:
|
||||
return response["route"]
|
||||
return response
|
||||
except Exception as e:
|
||||
logger.error("Error getting delivery route detail",
|
||||
tenant_id=tenant_id,
|
||||
route_id=route_id,
|
||||
error=str(e))
|
||||
return None
|
||||
|
||||
# ================================================================
|
||||
# SHIPMENT ENDPOINTS
|
||||
# ================================================================
|
||||
|
||||
async def get_shipments(
|
||||
self,
|
||||
tenant_id: str,
|
||||
date_from: Optional[date] = None,
|
||||
date_to: Optional[date] = None,
|
||||
status: Optional[str] = None
|
||||
) -> Optional[List[Dict[str, Any]]]:
|
||||
"""
|
||||
Get shipments with optional filtering
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
date_from: Start date for filtering
|
||||
date_to: End date for filtering
|
||||
status: Status filter
|
||||
|
||||
Returns:
|
||||
List of shipment dictionaries
|
||||
"""
|
||||
try:
|
||||
params = {}
|
||||
if date_from:
|
||||
params["date_from"] = date_from.isoformat()
|
||||
if date_to:
|
||||
params["date_to"] = date_to.isoformat()
|
||||
if status:
|
||||
params["status"] = status
|
||||
|
||||
# Use _make_request directly to construct correct URL
|
||||
# Gateway route: /api/v1/tenants/{tenant_id}/distribution/{path}
|
||||
response = await self._make_request(
|
||||
"GET",
|
||||
f"tenants/{tenant_id}/distribution/shipments",
|
||||
params=params
|
||||
)
|
||||
|
||||
if response:
|
||||
# Handle different response formats
|
||||
if isinstance(response, list):
|
||||
# Direct list of shipments
|
||||
logger.info("Retrieved shipments",
|
||||
tenant_id=tenant_id,
|
||||
count=len(response))
|
||||
return response
|
||||
elif isinstance(response, dict):
|
||||
# Response wrapped in shipments key
|
||||
if "shipments" in response:
|
||||
logger.info("Retrieved shipments",
|
||||
tenant_id=tenant_id,
|
||||
count=len(response.get("shipments", [])))
|
||||
return response.get("shipments", [])
|
||||
else:
|
||||
# Return the whole dict if it's a single shipment
|
||||
logger.info("Retrieved shipments",
|
||||
tenant_id=tenant_id,
|
||||
count=1)
|
||||
return [response]
|
||||
logger.info("No shipments found",
|
||||
tenant_id=tenant_id)
|
||||
return []
|
||||
except Exception as e:
|
||||
logger.error("Error getting shipments",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
return []
|
||||
|
||||
async def get_shipment_detail(
|
||||
self,
|
||||
tenant_id: str,
|
||||
shipment_id: str
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get detailed information about a specific shipment
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
shipment_id: Shipment ID
|
||||
|
||||
Returns:
|
||||
Shipment details
|
||||
"""
|
||||
try:
|
||||
response = await self.get(
|
||||
f"distribution/shipments/{shipment_id}",
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
|
||||
if response:
|
||||
logger.info("Retrieved shipment detail",
|
||||
tenant_id=tenant_id,
|
||||
shipment_id=shipment_id)
|
||||
# Ensure we return the shipment data directly if it's wrapped in a shipment key
|
||||
if isinstance(response, dict) and "shipment" in response:
|
||||
return response["shipment"]
|
||||
return response
|
||||
except Exception as e:
|
||||
logger.error("Error getting shipment detail",
|
||||
tenant_id=tenant_id,
|
||||
shipment_id=shipment_id,
|
||||
error=str(e))
|
||||
return None
|
||||
|
||||
async def update_shipment_status(
|
||||
self,
|
||||
tenant_id: str,
|
||||
shipment_id: str,
|
||||
new_status: str,
|
||||
user_id: str,
|
||||
metadata: Optional[Dict[str, Any]] = None
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Update shipment status
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
shipment_id: Shipment ID
|
||||
new_status: New status
|
||||
user_id: User ID performing update
|
||||
metadata: Additional metadata for the update
|
||||
|
||||
Returns:
|
||||
Updated shipment details
|
||||
"""
|
||||
try:
|
||||
payload = {
|
||||
"status": new_status,
|
||||
"updated_by_user_id": user_id,
|
||||
"metadata": metadata or {}
|
||||
}
|
||||
|
||||
response = await self.put(
|
||||
f"distribution/shipments/{shipment_id}/status",
|
||||
data=payload,
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
|
||||
if response:
|
||||
logger.info("Updated shipment status",
|
||||
tenant_id=tenant_id,
|
||||
shipment_id=shipment_id,
|
||||
new_status=new_status)
|
||||
return response
|
||||
except Exception as e:
|
||||
logger.error("Error updating shipment status",
|
||||
tenant_id=tenant_id,
|
||||
shipment_id=shipment_id,
|
||||
new_status=new_status,
|
||||
error=str(e))
|
||||
return None
|
||||
|
||||
# ================================================================
|
||||
# INTERNAL DEMO ENDPOINTS
|
||||
# ================================================================
|
||||
|
||||
# Legacy setup_enterprise_distribution_demo method removed
|
||||
# Distribution now uses standard /internal/demo/clone endpoint via DataCloner
|
||||
|
||||
async def get_shipments_for_date(
|
||||
self,
|
||||
tenant_id: str,
|
||||
target_date: date
|
||||
) -> Optional[List[Dict[str, Any]]]:
|
||||
"""
|
||||
Get all shipments for a specific date
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
target_date: Target date
|
||||
|
||||
Returns:
|
||||
List of shipments for the date
|
||||
"""
|
||||
try:
|
||||
# Use _make_request directly to construct correct URL
|
||||
# Gateway route: /api/v1/tenants/{tenant_id}/distribution/{path}
|
||||
response = await self._make_request(
|
||||
"GET",
|
||||
f"tenants/{tenant_id}/distribution/shipments",
|
||||
params={
|
||||
"date_from": target_date.isoformat(),
|
||||
"date_to": target_date.isoformat()
|
||||
}
|
||||
)
|
||||
|
||||
if response:
|
||||
# Handle different response formats
|
||||
if isinstance(response, list):
|
||||
# Direct list of shipments
|
||||
logger.info("Retrieved shipments for date",
|
||||
tenant_id=tenant_id,
|
||||
target_date=target_date.isoformat(),
|
||||
shipment_count=len(response))
|
||||
return response
|
||||
elif isinstance(response, dict):
|
||||
# Response wrapped in shipments key
|
||||
if "shipments" in response:
|
||||
logger.info("Retrieved shipments for date",
|
||||
tenant_id=tenant_id,
|
||||
target_date=target_date.isoformat(),
|
||||
shipment_count=len(response.get("shipments", [])))
|
||||
return response.get("shipments", [])
|
||||
else:
|
||||
# Return the whole dict if it's a single shipment
|
||||
logger.info("Retrieved shipments for date",
|
||||
tenant_id=tenant_id,
|
||||
target_date=target_date.isoformat(),
|
||||
shipment_count=1)
|
||||
return [response]
|
||||
logger.info("No shipments found for date",
|
||||
tenant_id=tenant_id,
|
||||
target_date=target_date.isoformat())
|
||||
return []
|
||||
except Exception as e:
|
||||
logger.error("Error getting shipments for date",
|
||||
tenant_id=tenant_id,
|
||||
target_date=target_date,
|
||||
error=str(e))
|
||||
return []
|
||||
|
||||
# ================================================================
|
||||
# HEALTH CHECK
|
||||
# ================================================================
|
||||
|
||||
async def health_check(self) -> bool:
|
||||
"""Check if distribution service is healthy"""
|
||||
try:
|
||||
# Use base health check method
|
||||
response = await self.get("health")
|
||||
return response is not None
|
||||
except Exception as e:
|
||||
logger.error("Distribution service health check failed", error=str(e))
|
||||
return False
|
||||
|
||||
|
||||
# Factory function for dependency injection
|
||||
def create_distribution_client(config: BaseServiceSettings, service_name: str = "unknown") -> DistributionServiceClient:
|
||||
"""Create distribution service client instance"""
|
||||
return DistributionServiceClient(config, service_name)
|
||||
611
shared/clients/external_client.py
Executable file
611
shared/clients/external_client.py
Executable file
@@ -0,0 +1,611 @@
|
||||
# shared/clients/external_client.py
|
||||
"""
|
||||
External Service Client
|
||||
Handles all API calls to the external service (weather and traffic data)
|
||||
"""
|
||||
|
||||
import httpx
|
||||
import structlog
|
||||
from typing import Dict, Any, Optional, List
|
||||
from .base_service_client import BaseServiceClient
|
||||
from shared.config.base import BaseServiceSettings
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class ExternalServiceClient(BaseServiceClient):
|
||||
"""Client for communicating with the external service"""
|
||||
|
||||
def __init__(self, config: BaseServiceSettings, calling_service_name: str = "unknown"):
|
||||
super().__init__(calling_service_name, config)
|
||||
self.service_url = config.EXTERNAL_SERVICE_URL
|
||||
|
||||
def get_service_base_path(self) -> str:
|
||||
return "/api/v1"
|
||||
|
||||
# ================================================================
|
||||
# WEATHER DATA
|
||||
# ================================================================
|
||||
|
||||
async def get_weather_historical(
|
||||
self,
|
||||
tenant_id: str,
|
||||
start_date: str,
|
||||
end_date: str,
|
||||
latitude: Optional[float] = None,
|
||||
longitude: Optional[float] = None
|
||||
) -> Optional[List[Dict[str, Any]]]:
|
||||
"""
|
||||
Get historical weather data using NEW v2.0 optimized city-based endpoint
|
||||
This uses pre-loaded data from the database with Redis caching for <100ms response times
|
||||
"""
|
||||
# Prepare query parameters
|
||||
params = {
|
||||
"latitude": latitude or 40.4168, # Default Madrid coordinates
|
||||
"longitude": longitude or -3.7038,
|
||||
"start_date": start_date, # ISO format datetime
|
||||
"end_date": end_date # ISO format datetime
|
||||
}
|
||||
|
||||
logger.info(f"Weather request (v2.0 optimized): {params}", tenant_id=tenant_id)
|
||||
|
||||
# Use GET request to new optimized endpoint with short timeout (data is cached)
|
||||
result = await self._make_request(
|
||||
"GET",
|
||||
"external/operations/historical-weather-optimized",
|
||||
tenant_id=tenant_id,
|
||||
params=params,
|
||||
timeout=10.0 # Much shorter - data is pre-loaded and cached
|
||||
)
|
||||
|
||||
if result:
|
||||
logger.info(f"Successfully fetched {len(result)} weather records from v2.0 endpoint")
|
||||
return result
|
||||
else:
|
||||
logger.warning("No weather data returned from v2.0 endpoint")
|
||||
return []
|
||||
|
||||
async def get_current_weather(
|
||||
self,
|
||||
tenant_id: str,
|
||||
latitude: Optional[float] = None,
|
||||
longitude: Optional[float] = None
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get current weather for a location (real-time data)
|
||||
Uses new v2.0 endpoint
|
||||
"""
|
||||
params = {
|
||||
"latitude": latitude or 40.4168,
|
||||
"longitude": longitude or -3.7038
|
||||
}
|
||||
|
||||
logger.info(f"Current weather request (v2.0): {params}", tenant_id=tenant_id)
|
||||
|
||||
result = await self._make_request(
|
||||
"GET",
|
||||
"external/operations/weather/current",
|
||||
tenant_id=tenant_id,
|
||||
params=params,
|
||||
timeout=10.0
|
||||
)
|
||||
|
||||
if result:
|
||||
logger.info("Successfully fetched current weather")
|
||||
return result
|
||||
else:
|
||||
logger.warning("No current weather data available")
|
||||
return None
|
||||
|
||||
async def get_weather_forecast(
|
||||
self,
|
||||
tenant_id: str,
|
||||
days: int = 7,
|
||||
latitude: Optional[float] = None,
|
||||
longitude: Optional[float] = None
|
||||
) -> Optional[List[Dict[str, Any]]]:
|
||||
"""
|
||||
Get weather forecast for location (from AEMET)
|
||||
Uses new v2.0 endpoint
|
||||
"""
|
||||
params = {
|
||||
"latitude": latitude or 40.4168,
|
||||
"longitude": longitude or -3.7038,
|
||||
"days": days
|
||||
}
|
||||
|
||||
logger.info(f"Weather forecast request (v2.0): {params}", tenant_id=tenant_id)
|
||||
|
||||
result = await self._make_request(
|
||||
"GET",
|
||||
"external/operations/weather/forecast",
|
||||
tenant_id=tenant_id,
|
||||
params=params,
|
||||
timeout=10.0
|
||||
)
|
||||
|
||||
if result:
|
||||
logger.info(f"Successfully fetched weather forecast for {days} days")
|
||||
return result
|
||||
else:
|
||||
logger.warning("No forecast data available")
|
||||
return []
|
||||
|
||||
|
||||
# ================================================================
|
||||
# TRAFFIC DATA
|
||||
# ================================================================
|
||||
|
||||
async def get_traffic_data(
|
||||
self,
|
||||
tenant_id: str,
|
||||
start_date: str,
|
||||
end_date: str,
|
||||
latitude: Optional[float] = None,
|
||||
longitude: Optional[float] = None
|
||||
) -> Optional[List[Dict[str, Any]]]:
|
||||
"""
|
||||
Get historical traffic data using NEW v2.0 optimized city-based endpoint
|
||||
This uses pre-loaded data from the database with Redis caching for <100ms response times
|
||||
"""
|
||||
# Prepare query parameters
|
||||
params = {
|
||||
"latitude": latitude or 40.4168, # Default Madrid coordinates
|
||||
"longitude": longitude or -3.7038,
|
||||
"start_date": start_date, # ISO format datetime
|
||||
"end_date": end_date # ISO format datetime
|
||||
}
|
||||
|
||||
logger.info(f"Traffic request (v2.0 optimized): {params}", tenant_id=tenant_id)
|
||||
|
||||
# Use GET request to new optimized endpoint with short timeout (data is cached)
|
||||
result = await self._make_request(
|
||||
"GET",
|
||||
"external/operations/historical-traffic-optimized",
|
||||
tenant_id=tenant_id,
|
||||
params=params,
|
||||
timeout=10.0 # Much shorter - data is pre-loaded and cached
|
||||
)
|
||||
|
||||
if result:
|
||||
logger.info(f"Successfully fetched {len(result)} traffic records from v2.0 endpoint")
|
||||
return result
|
||||
else:
|
||||
logger.warning("No traffic data returned from v2.0 endpoint")
|
||||
return []
|
||||
|
||||
async def get_stored_traffic_data_for_training(
|
||||
self,
|
||||
tenant_id: str,
|
||||
start_date: str,
|
||||
end_date: str,
|
||||
latitude: Optional[float] = None,
|
||||
longitude: Optional[float] = None
|
||||
) -> Optional[List[Dict[str, Any]]]:
|
||||
"""
|
||||
Get stored traffic data for model training/re-training
|
||||
In v2.0, this uses the same optimized endpoint as get_traffic_data
|
||||
since all data is pre-loaded and cached
|
||||
"""
|
||||
logger.info("Training traffic data request - delegating to optimized endpoint", tenant_id=tenant_id)
|
||||
|
||||
# Delegate to the same optimized endpoint
|
||||
return await self.get_traffic_data(
|
||||
tenant_id=tenant_id,
|
||||
start_date=start_date,
|
||||
end_date=end_date,
|
||||
latitude=latitude,
|
||||
longitude=longitude
|
||||
)
|
||||
|
||||
async def get_current_traffic(
|
||||
self,
|
||||
tenant_id: str,
|
||||
latitude: Optional[float] = None,
|
||||
longitude: Optional[float] = None
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get current traffic conditions for a location (real-time data)
|
||||
Uses new v2.0 endpoint
|
||||
"""
|
||||
params = {
|
||||
"latitude": latitude or 40.4168,
|
||||
"longitude": longitude or -3.7038
|
||||
}
|
||||
|
||||
logger.info(f"Current traffic request (v2.0): {params}", tenant_id=tenant_id)
|
||||
|
||||
result = await self._make_request(
|
||||
"GET",
|
||||
"external/operations/traffic/current",
|
||||
tenant_id=tenant_id,
|
||||
params=params,
|
||||
timeout=10.0
|
||||
)
|
||||
|
||||
if result:
|
||||
logger.info("Successfully fetched current traffic")
|
||||
return result
|
||||
else:
|
||||
logger.warning("No current traffic data available")
|
||||
return None
|
||||
|
||||
# ================================================================
|
||||
# CALENDAR DATA (School Calendars and Hyperlocal Information)
|
||||
# ================================================================
|
||||
|
||||
async def get_tenant_location_context(
|
||||
self,
|
||||
tenant_id: str
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get tenant location context including school calendar assignment
|
||||
"""
|
||||
logger.info("Fetching tenant location context", tenant_id=tenant_id)
|
||||
|
||||
result = await self._make_request(
|
||||
"GET",
|
||||
"external/location-context",
|
||||
tenant_id=tenant_id,
|
||||
timeout=5.0
|
||||
)
|
||||
|
||||
if result:
|
||||
logger.info("Successfully fetched tenant location context", tenant_id=tenant_id)
|
||||
return result
|
||||
else:
|
||||
logger.info("No location context found for tenant", tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def create_tenant_location_context(
|
||||
self,
|
||||
tenant_id: str,
|
||||
city_id: str,
|
||||
school_calendar_id: Optional[str] = None,
|
||||
neighborhood: Optional[str] = None,
|
||||
local_events: Optional[List[Dict[str, Any]]] = None,
|
||||
notes: Optional[str] = None
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Create or update location context for a tenant.
|
||||
|
||||
This establishes the city association for a tenant and optionally assigns
|
||||
a school calendar. Typically called during tenant registration to set up
|
||||
location-based context for ML features.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
city_id: Normalized city ID (e.g., "madrid", "barcelona")
|
||||
school_calendar_id: Optional school calendar UUID to assign
|
||||
neighborhood: Optional neighborhood name
|
||||
local_events: Optional list of local events with impact data
|
||||
notes: Optional notes about the location context
|
||||
|
||||
Returns:
|
||||
Dict with created location context including nested calendar details,
|
||||
or None if creation failed
|
||||
"""
|
||||
payload = {"city_id": city_id}
|
||||
|
||||
if school_calendar_id:
|
||||
payload["school_calendar_id"] = school_calendar_id
|
||||
if neighborhood:
|
||||
payload["neighborhood"] = neighborhood
|
||||
if local_events:
|
||||
payload["local_events"] = local_events
|
||||
if notes:
|
||||
payload["notes"] = notes
|
||||
|
||||
logger.info(
|
||||
"Creating tenant location context",
|
||||
tenant_id=tenant_id,
|
||||
city_id=city_id,
|
||||
has_calendar=bool(school_calendar_id)
|
||||
)
|
||||
|
||||
result = await self._make_request(
|
||||
"POST",
|
||||
"external/location-context",
|
||||
tenant_id=tenant_id,
|
||||
data=payload,
|
||||
timeout=10.0
|
||||
)
|
||||
|
||||
if result:
|
||||
logger.info(
|
||||
"Successfully created tenant location context",
|
||||
tenant_id=tenant_id,
|
||||
city_id=city_id
|
||||
)
|
||||
return result
|
||||
else:
|
||||
logger.warning(
|
||||
"Failed to create tenant location context",
|
||||
tenant_id=tenant_id,
|
||||
city_id=city_id
|
||||
)
|
||||
return None
|
||||
|
||||
async def suggest_calendar_for_tenant(
|
||||
self,
|
||||
tenant_id: str
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get smart calendar suggestion for a tenant based on POI data and location.
|
||||
|
||||
Analyzes tenant's location context, nearby schools from POI detection,
|
||||
and available calendars to provide an intelligent suggestion with
|
||||
confidence score and reasoning.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
|
||||
Returns:
|
||||
Dict with:
|
||||
- suggested_calendar_id: Suggested calendar UUID
|
||||
- calendar_name: Name of suggested calendar
|
||||
- confidence: Float 0.0-1.0
|
||||
- confidence_percentage: Percentage format
|
||||
- reasoning: List of reasoning steps
|
||||
- fallback_calendars: Alternative suggestions
|
||||
- should_auto_assign: Boolean recommendation
|
||||
- admin_message: Formatted message for display
|
||||
- school_analysis: Analysis of nearby schools
|
||||
Or None if request failed
|
||||
"""
|
||||
logger.info("Requesting calendar suggestion", tenant_id=tenant_id)
|
||||
|
||||
result = await self._make_request(
|
||||
"POST",
|
||||
"external/location-context/suggest-calendar",
|
||||
tenant_id=tenant_id,
|
||||
timeout=10.0
|
||||
)
|
||||
|
||||
if result:
|
||||
confidence = result.get("confidence_percentage", 0)
|
||||
suggested = result.get("calendar_name", "None")
|
||||
logger.info(
|
||||
"Calendar suggestion received",
|
||||
tenant_id=tenant_id,
|
||||
suggested_calendar=suggested,
|
||||
confidence=confidence
|
||||
)
|
||||
return result
|
||||
else:
|
||||
logger.warning(
|
||||
"Failed to get calendar suggestion",
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
return None
|
||||
|
||||
async def get_school_calendar(
|
||||
self,
|
||||
calendar_id: str,
|
||||
tenant_id: str
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get school calendar details by ID
|
||||
"""
|
||||
logger.info("Fetching school calendar", calendar_id=calendar_id, tenant_id=tenant_id)
|
||||
|
||||
result = await self._make_request(
|
||||
"GET",
|
||||
f"external/operations/school-calendars/{calendar_id}",
|
||||
tenant_id=tenant_id,
|
||||
timeout=5.0
|
||||
)
|
||||
|
||||
if result:
|
||||
logger.info("Successfully fetched school calendar", calendar_id=calendar_id)
|
||||
return result
|
||||
else:
|
||||
logger.warning("School calendar not found", calendar_id=calendar_id)
|
||||
return None
|
||||
|
||||
async def check_is_school_holiday(
|
||||
self,
|
||||
calendar_id: str,
|
||||
check_date: str,
|
||||
tenant_id: str
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Check if a specific date is a school holiday
|
||||
|
||||
Args:
|
||||
calendar_id: School calendar UUID
|
||||
check_date: Date to check in ISO format (YYYY-MM-DD)
|
||||
tenant_id: Tenant ID for auth
|
||||
|
||||
Returns:
|
||||
Dict with is_holiday, holiday_name, etc.
|
||||
"""
|
||||
params = {"check_date": check_date}
|
||||
|
||||
logger.debug(
|
||||
"Checking school holiday status",
|
||||
calendar_id=calendar_id,
|
||||
date=check_date,
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
|
||||
result = await self._make_request(
|
||||
"GET",
|
||||
f"external/operations/school-calendars/{calendar_id}/is-holiday",
|
||||
tenant_id=tenant_id,
|
||||
params=params,
|
||||
timeout=5.0
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
async def get_city_school_calendars(
|
||||
self,
|
||||
city_id: str,
|
||||
tenant_id: str,
|
||||
school_type: Optional[str] = None,
|
||||
academic_year: Optional[str] = None
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get all school calendars for a city with optional filters
|
||||
|
||||
Args:
|
||||
city_id: City ID (e.g., "madrid")
|
||||
tenant_id: Tenant ID for auth
|
||||
school_type: Optional filter by school type
|
||||
academic_year: Optional filter by academic year
|
||||
|
||||
Returns:
|
||||
Dict with calendars list and total count
|
||||
"""
|
||||
params = {}
|
||||
if school_type:
|
||||
params["school_type"] = school_type
|
||||
if academic_year:
|
||||
params["academic_year"] = academic_year
|
||||
|
||||
logger.info(
|
||||
"Fetching school calendars for city",
|
||||
city_id=city_id,
|
||||
tenant_id=tenant_id,
|
||||
filters=params
|
||||
)
|
||||
|
||||
result = await self._make_request(
|
||||
"GET",
|
||||
f"external/operations/cities/{city_id}/school-calendars",
|
||||
tenant_id=tenant_id,
|
||||
params=params if params else None,
|
||||
timeout=5.0
|
||||
)
|
||||
|
||||
if result:
|
||||
logger.info(
|
||||
"Successfully fetched school calendars",
|
||||
city_id=city_id,
|
||||
total=result.get("total", 0)
|
||||
)
|
||||
return result
|
||||
else:
|
||||
logger.warning("No school calendars found for city", city_id=city_id)
|
||||
return None
|
||||
|
||||
# ================================================================
|
||||
# POI (POINT OF INTEREST) DATA
|
||||
# ================================================================
|
||||
|
||||
async def detect_poi_for_tenant(
|
||||
self,
|
||||
tenant_id: str,
|
||||
latitude: float,
|
||||
longitude: float,
|
||||
force_refresh: bool = False
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Detect POIs for a tenant's location and generate ML features for forecasting.
|
||||
|
||||
With the new tenant-based architecture:
|
||||
- Gateway receives at: /api/v1/tenants/{tenant_id}/external/poi-context/detect
|
||||
- Gateway proxies to external service at: /api/v1/tenants/{tenant_id}/poi-context/detect
|
||||
- This client calls: poi-context/detect (base client automatically constructs with tenant)
|
||||
|
||||
This triggers POI detection using Overpass API and calculates ML features
|
||||
for demand forecasting.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
latitude: Latitude of the bakery location
|
||||
longitude: Longitude of the bakery location
|
||||
force_refresh: Whether to force refresh even if POI context exists
|
||||
|
||||
Returns:
|
||||
Dict with POI detection results including:
|
||||
- ml_features: Dict of POI features for ML models (e.g., poi_retail_total_count)
|
||||
- poi_detection_results: Full detection results
|
||||
- location: Latitude/longitude
|
||||
- total_pois_detected: Count of POIs
|
||||
"""
|
||||
logger.info(
|
||||
"Detecting POIs for tenant",
|
||||
tenant_id=tenant_id,
|
||||
location=(latitude, longitude),
|
||||
force_refresh=force_refresh
|
||||
)
|
||||
|
||||
params = {
|
||||
"latitude": latitude,
|
||||
"longitude": longitude,
|
||||
"force_refresh": force_refresh
|
||||
}
|
||||
|
||||
# Updated endpoint path to follow tenant-based pattern: external/poi-context/detect
|
||||
result = await self._make_request(
|
||||
"POST",
|
||||
"external/poi-context/detect", # Path will become /api/v1/tenants/{tenant_id}/external/poi-context/detect by base client
|
||||
tenant_id=tenant_id, # Pass tenant_id to include in headers and path construction
|
||||
params=params,
|
||||
timeout=60.0 # POI detection can take longer
|
||||
)
|
||||
|
||||
if result:
|
||||
poi_context = result.get("poi_context", {})
|
||||
ml_features = poi_context.get("ml_features", {})
|
||||
|
||||
logger.info(
|
||||
"POI detection completed successfully",
|
||||
tenant_id=tenant_id,
|
||||
total_pois=poi_context.get("total_pois_detected", 0),
|
||||
ml_features_count=len(ml_features),
|
||||
source=result.get("source", "unknown")
|
||||
)
|
||||
return result
|
||||
else:
|
||||
logger.warning("POI detection failed for tenant", tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def get_poi_context(
|
||||
self,
|
||||
tenant_id: str
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get POI context for a tenant including ML features for forecasting.
|
||||
|
||||
With the new tenant-based architecture:
|
||||
- Gateway receives at: /api/v1/tenants/{tenant_id}/external/poi-context
|
||||
- Gateway proxies to external service at: /api/v1/tenants/{tenant_id}/poi-context
|
||||
- This client calls: poi-context (base client automatically constructs with tenant)
|
||||
|
||||
This retrieves stored POI detection results and calculated ML features
|
||||
that should be included in demand forecasting predictions.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
|
||||
Returns:
|
||||
Dict with POI context including:
|
||||
- ml_features: Dict of POI features for ML models (e.g., poi_retail_total_count)
|
||||
- poi_detection_results: Full detection results
|
||||
- location: Latitude/longitude
|
||||
- total_pois_detected: Count of POIs
|
||||
"""
|
||||
logger.info("Fetching POI context for forecasting", tenant_id=tenant_id)
|
||||
|
||||
# Updated endpoint path to follow tenant-based pattern: external/poi-context
|
||||
result = await self._make_request(
|
||||
"GET",
|
||||
"external/poi-context", # Path will become /api/v1/tenants/{tenant_id}/external/poi-context by base client
|
||||
tenant_id=tenant_id, # Pass tenant_id to include in headers and path construction
|
||||
timeout=5.0
|
||||
)
|
||||
|
||||
if result:
|
||||
logger.info(
|
||||
"Successfully fetched POI context",
|
||||
tenant_id=tenant_id,
|
||||
total_pois=result.get("total_pois_detected", 0),
|
||||
ml_features_count=len(result.get("ml_features", {}))
|
||||
)
|
||||
return result
|
||||
else:
|
||||
logger.info("No POI context found for tenant", tenant_id=tenant_id)
|
||||
return None
|
||||
510
shared/clients/forecast_client.py
Executable file
510
shared/clients/forecast_client.py
Executable file
@@ -0,0 +1,510 @@
|
||||
# shared/clients/forecast_client.py
|
||||
"""
|
||||
Forecast Service Client for Inter-Service Communication
|
||||
|
||||
This client provides a high-level API for interacting with the Forecasting Service,
|
||||
which generates demand predictions using Prophet ML algorithm, validates forecast accuracy,
|
||||
and provides enterprise network demand aggregation for multi-location bakeries.
|
||||
|
||||
Key Capabilities:
|
||||
- Forecast Generation: Single product, multi-day, batch forecasting
|
||||
- Real-Time Predictions: On-demand predictions with custom features
|
||||
- Forecast Validation: Compare predictions vs actual sales, track accuracy
|
||||
- Analytics: Prediction performance metrics, historical accuracy trends
|
||||
- Enterprise Aggregation: Network-wide demand forecasting for parent-child hierarchies
|
||||
- Caching: Redis-backed caching for high-performance prediction serving
|
||||
|
||||
Backend Architecture:
|
||||
- ATOMIC: /forecasting/forecasts (CRUD operations on forecast records)
|
||||
- BUSINESS: /forecasting/operations/* (forecast generation, validation)
|
||||
- ANALYTICS: /forecasting/analytics/* (performance metrics, accuracy trends)
|
||||
- ENTERPRISE: /forecasting/enterprise/* (network demand aggregation)
|
||||
|
||||
Enterprise Features (NEW):
|
||||
- Network demand aggregation across all child outlets for centralized production planning
|
||||
- Child contribution tracking (each outlet's % of total network demand)
|
||||
- Redis caching with 1-hour TTL for enterprise forecasts
|
||||
- Subscription gating (requires Enterprise tier)
|
||||
|
||||
Usage Example:
|
||||
```python
|
||||
from shared.clients import get_forecast_client
|
||||
from shared.config.base import get_settings
|
||||
from datetime import date, timedelta
|
||||
|
||||
config = get_settings()
|
||||
client = get_forecast_client(config, calling_service_name="production")
|
||||
|
||||
# Generate 7-day forecast for a product
|
||||
forecast = await client.generate_multi_day_forecast(
|
||||
tenant_id=tenant_id,
|
||||
inventory_product_id=product_id,
|
||||
forecast_date=date.today(),
|
||||
forecast_days=7,
|
||||
include_recommendations=True
|
||||
)
|
||||
|
||||
# Batch forecast for multiple products
|
||||
batch_forecast = await client.generate_batch_forecast(
|
||||
tenant_id=tenant_id,
|
||||
inventory_product_ids=[product_id_1, product_id_2],
|
||||
forecast_date=date.today(),
|
||||
forecast_days=7
|
||||
)
|
||||
|
||||
# Validate forecasts against actual sales
|
||||
validation = await client.validate_forecasts(
|
||||
tenant_id=tenant_id,
|
||||
date=date.today() - timedelta(days=1)
|
||||
)
|
||||
|
||||
# Get predictions for a specific date (from cache or DB)
|
||||
predictions = await client.get_predictions_for_date(
|
||||
tenant_id=tenant_id,
|
||||
target_date=date.today()
|
||||
)
|
||||
```
|
||||
|
||||
Service Architecture:
|
||||
- Base URL: Configured via FORECASTING_SERVICE_URL environment variable
|
||||
- Authentication: Uses BaseServiceClient with tenant_id header validation
|
||||
- Error Handling: Returns None on errors, logs detailed error context
|
||||
- Async: All methods are async and use httpx for HTTP communication
|
||||
- Caching: 24-hour TTL for standard forecasts, 1-hour TTL for enterprise aggregations
|
||||
|
||||
ML Model Details:
|
||||
- Algorithm: Facebook Prophet (time series forecasting)
|
||||
- Features: 20+ temporal, weather, traffic, holiday, POI features
|
||||
- Accuracy: 15-25% MAPE (Mean Absolute Percentage Error)
|
||||
- Training: Weekly retraining via orchestrator automation
|
||||
- Confidence Intervals: 95% confidence bounds (yhat_lower, yhat_upper)
|
||||
|
||||
Related Services:
|
||||
- Production Service: Uses forecasts for production planning
|
||||
- Procurement Service: Uses forecasts for ingredient ordering
|
||||
- Orchestrator Service: Triggers daily forecast generation, displays network forecasts on enterprise dashboard
|
||||
- Tenant Service: Validates hierarchy for enterprise aggregation
|
||||
- Distribution Service: Network forecasts inform capacity planning
|
||||
|
||||
For more details, see services/forecasting/README.md
|
||||
"""
|
||||
|
||||
from typing import Dict, Any, Optional, List
|
||||
from datetime import date
|
||||
import structlog
|
||||
from .base_service_client import BaseServiceClient
|
||||
from shared.config.base import BaseServiceSettings
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class ForecastServiceClient(BaseServiceClient):
|
||||
"""Client for communicating with the forecasting service"""
|
||||
|
||||
def __init__(self, config: BaseServiceSettings, calling_service_name: str = "unknown"):
|
||||
super().__init__(calling_service_name, config)
|
||||
|
||||
def get_service_base_path(self) -> str:
|
||||
return "/api/v1"
|
||||
|
||||
# ================================================================
|
||||
# ATOMIC: Forecast CRUD Operations
|
||||
# ================================================================
|
||||
|
||||
async def get_forecast(self, tenant_id: str, forecast_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get forecast details by ID"""
|
||||
return await self.get(f"forecasting/forecasts/{forecast_id}", tenant_id=tenant_id)
|
||||
|
||||
async def list_forecasts(
|
||||
self,
|
||||
tenant_id: str,
|
||||
inventory_product_id: Optional[str] = None,
|
||||
start_date: Optional[date] = None,
|
||||
end_date: Optional[date] = None,
|
||||
limit: int = 50,
|
||||
offset: int = 0
|
||||
) -> Optional[List[Dict[str, Any]]]:
|
||||
"""List forecasts for a tenant with optional filters"""
|
||||
params = {"limit": limit, "offset": offset}
|
||||
if inventory_product_id:
|
||||
params["inventory_product_id"] = inventory_product_id
|
||||
if start_date:
|
||||
params["start_date"] = start_date.isoformat()
|
||||
if end_date:
|
||||
params["end_date"] = end_date.isoformat()
|
||||
|
||||
return await self.get("forecasting/forecasts", tenant_id=tenant_id, params=params)
|
||||
|
||||
async def delete_forecast(self, tenant_id: str, forecast_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Delete a forecast"""
|
||||
return await self.delete(f"forecasting/forecasts/{forecast_id}", tenant_id=tenant_id)
|
||||
|
||||
# ================================================================
|
||||
# BUSINESS: Forecasting Operations
|
||||
# ================================================================
|
||||
|
||||
async def generate_single_forecast(
|
||||
self,
|
||||
tenant_id: str,
|
||||
inventory_product_id: str,
|
||||
forecast_date: date,
|
||||
include_recommendations: bool = False
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""Generate a single product forecast"""
|
||||
data = {
|
||||
"inventory_product_id": inventory_product_id,
|
||||
"forecast_date": forecast_date.isoformat(),
|
||||
"include_recommendations": include_recommendations
|
||||
}
|
||||
return await self.post("forecasting/operations/single", data=data, tenant_id=tenant_id)
|
||||
|
||||
async def generate_multi_day_forecast(
|
||||
self,
|
||||
tenant_id: str,
|
||||
inventory_product_id: str,
|
||||
forecast_date: date,
|
||||
forecast_days: int = 7,
|
||||
include_recommendations: bool = False
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""Generate multiple daily forecasts for the specified period"""
|
||||
data = {
|
||||
"inventory_product_id": inventory_product_id,
|
||||
"forecast_date": forecast_date.isoformat(),
|
||||
"forecast_days": forecast_days,
|
||||
"include_recommendations": include_recommendations
|
||||
}
|
||||
return await self.post("forecasting/operations/multi-day", data=data, tenant_id=tenant_id)
|
||||
|
||||
async def generate_batch_forecast(
|
||||
self,
|
||||
tenant_id: str,
|
||||
inventory_product_ids: List[str],
|
||||
forecast_date: date,
|
||||
forecast_days: int = 1
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""Generate forecasts for multiple products in batch"""
|
||||
data = {
|
||||
"inventory_product_ids": inventory_product_ids,
|
||||
"forecast_date": forecast_date.isoformat(),
|
||||
"forecast_days": forecast_days
|
||||
}
|
||||
return await self.post("forecasting/operations/batch", data=data, tenant_id=tenant_id)
|
||||
|
||||
async def generate_realtime_prediction(
|
||||
self,
|
||||
tenant_id: str,
|
||||
inventory_product_id: str,
|
||||
model_id: str,
|
||||
features: Dict[str, Any],
|
||||
model_path: Optional[str] = None,
|
||||
confidence_level: float = 0.8
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""Generate real-time prediction"""
|
||||
data = {
|
||||
"inventory_product_id": inventory_product_id,
|
||||
"model_id": model_id,
|
||||
"features": features,
|
||||
"confidence_level": confidence_level
|
||||
}
|
||||
if model_path:
|
||||
data["model_path"] = model_path
|
||||
|
||||
return await self.post("forecasting/operations/realtime", data=data, tenant_id=tenant_id)
|
||||
|
||||
async def validate_predictions(
|
||||
self,
|
||||
tenant_id: str,
|
||||
start_date: date,
|
||||
end_date: date
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""Validate predictions against actual sales data"""
|
||||
params = {
|
||||
"start_date": start_date.isoformat(),
|
||||
"end_date": end_date.isoformat()
|
||||
}
|
||||
return await self.post("forecasting/operations/validate-predictions", params=params, tenant_id=tenant_id)
|
||||
|
||||
async def validate_forecasts(
|
||||
self,
|
||||
tenant_id: str,
|
||||
date: date
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Validate forecasts for a specific date against actual sales.
|
||||
Calculates MAPE, RMSE, MAE and identifies products with poor accuracy.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
date: Date to validate (validates this single day)
|
||||
|
||||
Returns:
|
||||
Dict with overall metrics and poor accuracy products list
|
||||
"""
|
||||
from datetime import datetime, timezone
|
||||
|
||||
# Convert date to datetime with timezone for start/end of day
|
||||
start_datetime = datetime.combine(date, datetime.min.time()).replace(tzinfo=timezone.utc)
|
||||
end_datetime = datetime.combine(date, datetime.max.time()).replace(tzinfo=timezone.utc)
|
||||
|
||||
# Call the new validation endpoint
|
||||
result = await self.post(
|
||||
"forecasting/validation/validate-yesterday",
|
||||
params={"orchestration_run_id": None},
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
|
||||
if not result:
|
||||
return None
|
||||
|
||||
# Transform the new response format to match the expected format
|
||||
overall_metrics = result.get("overall_metrics", {})
|
||||
|
||||
# Get poor accuracy products from the result
|
||||
poor_accuracy_products = result.get("poor_accuracy_products", [])
|
||||
|
||||
return {
|
||||
"overall_mape": overall_metrics.get("mape", 0),
|
||||
"overall_rmse": overall_metrics.get("rmse", 0),
|
||||
"overall_mae": overall_metrics.get("mae", 0),
|
||||
"overall_r2_score": overall_metrics.get("r2_score", 0),
|
||||
"overall_accuracy_percentage": overall_metrics.get("accuracy_percentage", 0),
|
||||
"products_validated": result.get("forecasts_with_actuals", 0),
|
||||
"poor_accuracy_products": poor_accuracy_products,
|
||||
"validation_run_id": result.get("validation_run_id"),
|
||||
"forecasts_evaluated": result.get("forecasts_evaluated", 0),
|
||||
"forecasts_with_actuals": result.get("forecasts_with_actuals", 0),
|
||||
"forecasts_without_actuals": result.get("forecasts_without_actuals", 0)
|
||||
}
|
||||
|
||||
async def get_forecast_statistics(
|
||||
self,
|
||||
tenant_id: str,
|
||||
start_date: Optional[date] = None,
|
||||
end_date: Optional[date] = None
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""Get forecast statistics"""
|
||||
params = {}
|
||||
if start_date:
|
||||
params["start_date"] = start_date.isoformat()
|
||||
if end_date:
|
||||
params["end_date"] = end_date.isoformat()
|
||||
|
||||
return await self.get("forecasting/operations/statistics", tenant_id=tenant_id, params=params)
|
||||
|
||||
async def clear_prediction_cache(self, tenant_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Clear prediction cache"""
|
||||
return await self.delete("forecasting/operations/cache", tenant_id=tenant_id)
|
||||
|
||||
# ================================================================
|
||||
# ANALYTICS: Forecasting Analytics
|
||||
# ================================================================
|
||||
|
||||
async def get_predictions_performance(
|
||||
self,
|
||||
tenant_id: str,
|
||||
start_date: Optional[date] = None,
|
||||
end_date: Optional[date] = None
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""Get predictions performance analytics"""
|
||||
params = {}
|
||||
if start_date:
|
||||
params["start_date"] = start_date.isoformat()
|
||||
if end_date:
|
||||
params["end_date"] = end_date.isoformat()
|
||||
|
||||
return await self.get("forecasting/analytics/predictions-performance", tenant_id=tenant_id, params=params)
|
||||
|
||||
# ================================================================
|
||||
# ML INSIGHTS: Dynamic Rules Generation
|
||||
# ================================================================
|
||||
|
||||
async def trigger_rules_generation(
|
||||
self,
|
||||
tenant_id: str,
|
||||
product_ids: Optional[List[str]] = None,
|
||||
lookback_days: int = 90,
|
||||
min_samples: int = 10
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Trigger dynamic business rules learning for demand forecasting.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
product_ids: Specific product IDs to analyze. If None, analyzes all products
|
||||
lookback_days: Days of historical data to analyze (30-365)
|
||||
min_samples: Minimum samples required for rule learning (5-100)
|
||||
|
||||
Returns:
|
||||
Dict with rules generation results including insights posted
|
||||
"""
|
||||
data = {
|
||||
"product_ids": product_ids,
|
||||
"lookback_days": lookback_days,
|
||||
"min_samples": min_samples
|
||||
}
|
||||
return await self.post("forecasting/ml/insights/generate-rules", data=data, tenant_id=tenant_id)
|
||||
|
||||
async def trigger_demand_insights_internal(
|
||||
self,
|
||||
tenant_id: str
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Trigger demand forecasting insights for a tenant (internal service use only).
|
||||
|
||||
This method calls the internal endpoint which is protected by x-internal-service header.
|
||||
Used by demo-session service after cloning to generate AI insights from seeded data.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID to trigger insights for
|
||||
|
||||
Returns:
|
||||
Dict with trigger results or None if failed
|
||||
"""
|
||||
try:
|
||||
result = await self._make_request(
|
||||
method="POST",
|
||||
endpoint=f"forecasting/internal/ml/generate-demand-insights",
|
||||
tenant_id=tenant_id,
|
||||
data={"tenant_id": tenant_id},
|
||||
headers={"x-internal-service": "demo-session"}
|
||||
)
|
||||
|
||||
if result:
|
||||
logger.info(
|
||||
"Demand insights triggered successfully via internal endpoint",
|
||||
tenant_id=tenant_id,
|
||||
insights_posted=result.get("insights_posted", 0)
|
||||
)
|
||||
else:
|
||||
logger.warning(
|
||||
"Demand insights internal endpoint returned no result",
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to trigger demand insights",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e)
|
||||
)
|
||||
return None
|
||||
|
||||
# ================================================================
|
||||
# Legacy/Compatibility Methods (deprecated)
|
||||
# ================================================================
|
||||
|
||||
async def generate_forecasts(
|
||||
self,
|
||||
tenant_id: str,
|
||||
forecast_days: int = 7,
|
||||
inventory_product_ids: Optional[List[str]] = None
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
COMPATIBILITY: Orchestrator-friendly method to generate forecasts
|
||||
|
||||
This method is called by the orchestrator service and generates batch forecasts
|
||||
for either specified products or all products.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
forecast_days: Number of days to forecast (default 7)
|
||||
inventory_product_ids: Optional list of product IDs. If None, forecasts all products.
|
||||
|
||||
Returns:
|
||||
Dict with forecast results
|
||||
"""
|
||||
from datetime import datetime
|
||||
|
||||
# If no product IDs specified, let the backend handle it
|
||||
if not inventory_product_ids:
|
||||
# Call the batch operation endpoint to forecast all products
|
||||
# The forecasting service will handle fetching all products internally
|
||||
data = {
|
||||
"batch_name": f"orchestrator-batch-{datetime.now().strftime('%Y%m%d')}",
|
||||
"inventory_product_ids": [], # Empty list will trigger fetching all products
|
||||
"forecast_days": forecast_days
|
||||
}
|
||||
return await self.post("forecasting/operations/batch", data=data, tenant_id=tenant_id)
|
||||
|
||||
# Otherwise use the standard batch forecast
|
||||
return await self.generate_batch_forecast(
|
||||
tenant_id=tenant_id,
|
||||
inventory_product_ids=inventory_product_ids,
|
||||
forecast_date=datetime.now().date(),
|
||||
forecast_days=forecast_days
|
||||
)
|
||||
|
||||
async def get_aggregated_forecast(
|
||||
self,
|
||||
parent_tenant_id: str,
|
||||
start_date: date,
|
||||
end_date: date,
|
||||
product_id: Optional[str] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Get aggregated forecast for enterprise tenant and all children.
|
||||
|
||||
This method calls the enterprise forecasting aggregation endpoint which
|
||||
combines demand forecasts across the parent tenant and all child tenants
|
||||
in the network. Used for centralized production planning.
|
||||
|
||||
Args:
|
||||
parent_tenant_id: The parent tenant (central bakery) UUID
|
||||
start_date: Start date for forecast range
|
||||
end_date: End date for forecast range
|
||||
product_id: Optional product ID to filter forecasts
|
||||
|
||||
Returns:
|
||||
Aggregated forecast data including:
|
||||
- total_demand: Sum of all child demands
|
||||
- child_contributions: Per-child demand breakdown
|
||||
- forecast_date_range: Date range for the forecast
|
||||
- cached: Whether data was served from Redis cache
|
||||
"""
|
||||
params = {
|
||||
"start_date": start_date.isoformat(),
|
||||
"end_date": end_date.isoformat()
|
||||
}
|
||||
if product_id:
|
||||
params["product_id"] = product_id
|
||||
|
||||
# Use _make_request directly because the base_service_client adds /tenants/{tenant_id}/ prefix
|
||||
# Gateway route is: /api/v1/tenants/{tenant_id}/forecasting/enterprise/{path}
|
||||
# So we need the full path without tenant_id parameter to avoid double prefixing
|
||||
return await self._make_request(
|
||||
"GET",
|
||||
f"tenants/{parent_tenant_id}/forecasting/enterprise/aggregated",
|
||||
params=params
|
||||
)
|
||||
|
||||
async def create_forecast(
|
||||
self,
|
||||
tenant_id: str,
|
||||
model_id: str,
|
||||
start_date: str,
|
||||
end_date: str,
|
||||
product_ids: Optional[List[str]] = None,
|
||||
include_confidence_intervals: bool = True,
|
||||
**kwargs
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
DEPRECATED: Use generate_single_forecast or generate_batch_forecast instead
|
||||
Legacy method for backward compatibility
|
||||
"""
|
||||
# Map to new batch forecast operation
|
||||
if product_ids:
|
||||
return await self.generate_batch_forecast(
|
||||
tenant_id=tenant_id,
|
||||
inventory_product_ids=product_ids,
|
||||
forecast_date=date.fromisoformat(start_date),
|
||||
forecast_days=1
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
# Backward compatibility alias
|
||||
def create_forecast_client(config: BaseServiceSettings, service_name: str = "unknown") -> ForecastServiceClient:
|
||||
"""Create a forecast service client (backward compatibility)"""
|
||||
return ForecastServiceClient(config, service_name)
|
||||
871
shared/clients/inventory_client.py
Executable file
871
shared/clients/inventory_client.py
Executable file
@@ -0,0 +1,871 @@
|
||||
# shared/clients/inventory_client.py
|
||||
"""
|
||||
Inventory Service Client - Inter-service communication
|
||||
Handles communication with the inventory service for all other services
|
||||
"""
|
||||
|
||||
import structlog
|
||||
from typing import Dict, Any, List, Optional, Union
|
||||
from uuid import UUID
|
||||
|
||||
from shared.clients.base_service_client import BaseServiceClient
|
||||
from shared.config.base import BaseServiceSettings
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class InventoryServiceClient(BaseServiceClient):
|
||||
"""Client for communicating with the inventory service via gateway"""
|
||||
|
||||
def __init__(self, config: BaseServiceSettings, calling_service_name: str = "unknown"):
|
||||
super().__init__(calling_service_name, config)
|
||||
|
||||
def get_service_base_path(self) -> str:
|
||||
"""Return the base path for inventory service APIs"""
|
||||
return "/api/v1"
|
||||
|
||||
# ================================================================
|
||||
# INGREDIENT MANAGEMENT
|
||||
# ================================================================
|
||||
|
||||
async def get_ingredient_by_id(self, ingredient_id: UUID, tenant_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get ingredient details by ID"""
|
||||
try:
|
||||
result = await self.get(f"inventory/ingredients/{ingredient_id}", tenant_id=tenant_id)
|
||||
if result:
|
||||
logger.info("Retrieved ingredient from inventory service",
|
||||
ingredient_id=ingredient_id, tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error fetching ingredient by ID",
|
||||
error=str(e), ingredient_id=ingredient_id, tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def search_ingredients(
|
||||
self,
|
||||
tenant_id: str,
|
||||
search: Optional[str] = None,
|
||||
category: Optional[str] = None,
|
||||
is_active: Optional[bool] = None,
|
||||
skip: int = 0,
|
||||
limit: int = 100
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Search ingredients with filters"""
|
||||
try:
|
||||
params = {
|
||||
"skip": skip,
|
||||
"limit": limit
|
||||
}
|
||||
|
||||
if search:
|
||||
params["search"] = search
|
||||
if category:
|
||||
params["category"] = category
|
||||
if is_active is not None:
|
||||
params["is_active"] = is_active
|
||||
|
||||
result = await self.get("inventory/ingredients", tenant_id=tenant_id, params=params)
|
||||
ingredients = result if isinstance(result, list) else []
|
||||
|
||||
logger.info("Searched ingredients in inventory service",
|
||||
search_term=search, count=len(ingredients), tenant_id=tenant_id)
|
||||
return ingredients
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error searching ingredients",
|
||||
error=str(e), search=search, tenant_id=tenant_id)
|
||||
return []
|
||||
|
||||
async def get_all_ingredients(self, tenant_id: str, is_active: Optional[bool] = True) -> List[Dict[str, Any]]:
|
||||
"""Get all ingredients for a tenant (paginated)"""
|
||||
try:
|
||||
params = {}
|
||||
if is_active is not None:
|
||||
params["is_active"] = is_active
|
||||
|
||||
ingredients = await self.get_paginated("inventory/ingredients", tenant_id=tenant_id, params=params)
|
||||
|
||||
logger.info("Retrieved all ingredients from inventory service",
|
||||
count=len(ingredients), tenant_id=tenant_id)
|
||||
return ingredients
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error fetching all ingredients",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return []
|
||||
|
||||
async def count_ingredients(self, tenant_id: str, is_active: Optional[bool] = True) -> int:
|
||||
"""Get count of ingredients for a tenant"""
|
||||
try:
|
||||
params = {}
|
||||
if is_active is not None:
|
||||
params["is_active"] = is_active
|
||||
|
||||
result = await self.get("inventory/ingredients/count", tenant_id=tenant_id, params=params)
|
||||
count = result.get("ingredient_count", 0) if isinstance(result, dict) else 0
|
||||
|
||||
logger.info("Retrieved ingredient count from inventory service",
|
||||
count=count, tenant_id=tenant_id)
|
||||
return count
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error fetching ingredient count",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return 0
|
||||
|
||||
async def create_ingredient(self, ingredient_data: Dict[str, Any], tenant_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Create a new ingredient"""
|
||||
try:
|
||||
result = await self.post("inventory/ingredients", data=ingredient_data, tenant_id=tenant_id)
|
||||
if result:
|
||||
logger.info("Created ingredient in inventory service",
|
||||
ingredient_name=ingredient_data.get('name'), tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error creating ingredient",
|
||||
error=str(e), ingredient_data=ingredient_data, tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def update_ingredient(
|
||||
self,
|
||||
ingredient_id: UUID,
|
||||
ingredient_data: Dict[str, Any],
|
||||
tenant_id: str
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""Update an existing ingredient"""
|
||||
try:
|
||||
result = await self.put(f"inventory/ingredients/{ingredient_id}", data=ingredient_data, tenant_id=tenant_id)
|
||||
if result:
|
||||
logger.info("Updated ingredient in inventory service",
|
||||
ingredient_id=ingredient_id, tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error updating ingredient",
|
||||
error=str(e), ingredient_id=ingredient_id, tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def delete_ingredient(self, ingredient_id: UUID, tenant_id: str) -> bool:
|
||||
"""Delete (deactivate) an ingredient"""
|
||||
try:
|
||||
result = await self.delete(f"inventory/ingredients/{ingredient_id}", tenant_id=tenant_id)
|
||||
success = result is not None
|
||||
if success:
|
||||
logger.info("Deleted ingredient in inventory service",
|
||||
ingredient_id=ingredient_id, tenant_id=tenant_id)
|
||||
return success
|
||||
except Exception as e:
|
||||
logger.error("Error deleting ingredient",
|
||||
error=str(e), ingredient_id=ingredient_id, tenant_id=tenant_id)
|
||||
return False
|
||||
|
||||
async def get_ingredient_stock(
|
||||
self,
|
||||
ingredient_id: UUID,
|
||||
tenant_id: str,
|
||||
include_unavailable: bool = False
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Get stock entries for an ingredient"""
|
||||
try:
|
||||
params = {}
|
||||
if include_unavailable:
|
||||
params["include_unavailable"] = include_unavailable
|
||||
|
||||
result = await self.get(f"inventory/ingredients/{ingredient_id}/stock", tenant_id=tenant_id, params=params)
|
||||
stock_entries = result if isinstance(result, list) else []
|
||||
|
||||
logger.info("Retrieved ingredient stock from inventory service",
|
||||
ingredient_id=ingredient_id, stock_count=len(stock_entries), tenant_id=tenant_id)
|
||||
return stock_entries
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error fetching ingredient stock",
|
||||
error=str(e), ingredient_id=ingredient_id, tenant_id=tenant_id)
|
||||
return []
|
||||
|
||||
# ================================================================
|
||||
# STOCK MANAGEMENT
|
||||
# ================================================================
|
||||
|
||||
async def get_stock_levels(self, tenant_id: str, ingredient_ids: Optional[List[UUID]] = None) -> List[Dict[str, Any]]:
|
||||
"""Get current stock levels"""
|
||||
try:
|
||||
params = {}
|
||||
if ingredient_ids:
|
||||
params["ingredient_ids"] = [str(id) for id in ingredient_ids]
|
||||
|
||||
result = await self.get("inventory/stock", tenant_id=tenant_id, params=params)
|
||||
stock_levels = result if isinstance(result, list) else []
|
||||
|
||||
logger.info("Retrieved stock levels from inventory service",
|
||||
count=len(stock_levels), tenant_id=tenant_id)
|
||||
return stock_levels
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error fetching stock levels",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return []
|
||||
|
||||
async def get_low_stock_alerts(self, tenant_id: str) -> List[Dict[str, Any]]:
|
||||
"""Get low stock alerts"""
|
||||
try:
|
||||
result = await self.get("inventory/alerts", tenant_id=tenant_id, params={"type": "low_stock"})
|
||||
alerts = result if isinstance(result, list) else []
|
||||
|
||||
logger.info("Retrieved low stock alerts from inventory service",
|
||||
count=len(alerts), tenant_id=tenant_id)
|
||||
return alerts
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error fetching low stock alerts",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return []
|
||||
|
||||
async def consume_stock(
|
||||
self,
|
||||
consumption_data: Dict[str, Any],
|
||||
tenant_id: str
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""Record stock consumption"""
|
||||
try:
|
||||
result = await self.post("inventory/operations/consume-stock", data=consumption_data, tenant_id=tenant_id)
|
||||
if result:
|
||||
logger.info("Recorded stock consumption",
|
||||
tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error recording stock consumption",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def receive_stock(
|
||||
self,
|
||||
receipt_data: Dict[str, Any],
|
||||
tenant_id: str
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""Record stock receipt"""
|
||||
try:
|
||||
result = await self.post("inventory/operations/receive-stock", data=receipt_data, tenant_id=tenant_id)
|
||||
if result:
|
||||
logger.info("Recorded stock receipt",
|
||||
tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error recording stock receipt",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
# ================================================================
|
||||
# PRODUCT CLASSIFICATION (for onboarding)
|
||||
# ================================================================
|
||||
|
||||
async def classify_product(
|
||||
self,
|
||||
product_name: str,
|
||||
sales_volume: Optional[float],
|
||||
tenant_id: str
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""Classify a single product for inventory creation"""
|
||||
try:
|
||||
classification_data = {
|
||||
"product_name": product_name,
|
||||
"sales_volume": sales_volume
|
||||
}
|
||||
|
||||
result = await self.post("inventory/operations/classify-product", data=classification_data, tenant_id=tenant_id)
|
||||
if result:
|
||||
logger.info("Classified product",
|
||||
product=product_name,
|
||||
classification=result.get('product_type'),
|
||||
confidence=result.get('confidence_score'),
|
||||
tenant_id=tenant_id)
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error classifying product",
|
||||
error=str(e), product=product_name, tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def classify_products_batch(
|
||||
self,
|
||||
products: List[Dict[str, Any]],
|
||||
tenant_id: str
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""Classify multiple products for onboarding automation"""
|
||||
try:
|
||||
classification_data = {
|
||||
"products": products
|
||||
}
|
||||
|
||||
result = await self.post("inventory/operations/classify-products-batch", data=classification_data, tenant_id=tenant_id)
|
||||
if result:
|
||||
suggestions = result.get('suggestions', [])
|
||||
business_model = result.get('business_model_analysis', {}).get('model', 'unknown')
|
||||
|
||||
logger.info("Batch classification complete",
|
||||
total_products=len(suggestions),
|
||||
business_model=business_model,
|
||||
tenant_id=tenant_id)
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error in batch classification",
|
||||
error=str(e), products_count=len(products), tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def resolve_or_create_products_batch(
|
||||
self,
|
||||
products: List[Dict[str, Any]],
|
||||
tenant_id: str
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""Resolve or create multiple products in a single batch operation"""
|
||||
try:
|
||||
batch_data = {
|
||||
"products": products
|
||||
}
|
||||
|
||||
result = await self.post("inventory/operations/resolve-or-create-products-batch",
|
||||
data=batch_data, tenant_id=tenant_id)
|
||||
if result:
|
||||
created = result.get('created_count', 0)
|
||||
resolved = result.get('resolved_count', 0)
|
||||
failed = result.get('failed_count', 0)
|
||||
|
||||
logger.info("Batch product resolution complete",
|
||||
created=created,
|
||||
resolved=resolved,
|
||||
failed=failed,
|
||||
total=len(products),
|
||||
tenant_id=tenant_id)
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error in batch product resolution",
|
||||
error=str(e), products_count=len(products), tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
# ================================================================
|
||||
# DASHBOARD AND ANALYTICS
|
||||
# ================================================================
|
||||
|
||||
async def get_inventory_dashboard(self, tenant_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get inventory dashboard data"""
|
||||
try:
|
||||
result = await self.get("inventory/dashboard/overview", tenant_id=tenant_id)
|
||||
if result:
|
||||
logger.info("Retrieved inventory dashboard data", tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error fetching inventory dashboard",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def get_inventory_summary(self, tenant_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get inventory summary statistics"""
|
||||
try:
|
||||
result = await self.get("inventory/dashboard/summary", tenant_id=tenant_id)
|
||||
if result:
|
||||
logger.info("Retrieved inventory summary", tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error fetching inventory summary",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
# ================================================================
|
||||
# PRODUCT TRANSFORMATION
|
||||
# ================================================================
|
||||
|
||||
async def create_transformation(
|
||||
self,
|
||||
transformation_data: Dict[str, Any],
|
||||
tenant_id: str
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""Create a product transformation (e.g., par-baked to fully baked)"""
|
||||
try:
|
||||
result = await self.post("inventory/transformations", data=transformation_data, tenant_id=tenant_id)
|
||||
if result:
|
||||
logger.info("Created product transformation",
|
||||
transformation_reference=result.get('transformation_reference'),
|
||||
source_stage=transformation_data.get('source_stage'),
|
||||
target_stage=transformation_data.get('target_stage'),
|
||||
tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error creating transformation",
|
||||
error=str(e), transformation_data=transformation_data, tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def create_par_bake_transformation(
|
||||
self,
|
||||
source_ingredient_id: Union[str, UUID],
|
||||
target_ingredient_id: Union[str, UUID],
|
||||
quantity: float,
|
||||
tenant_id: str,
|
||||
target_batch_number: Optional[str] = None,
|
||||
expiration_hours: int = 24,
|
||||
notes: Optional[str] = None
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""Convenience method for par-baked to fresh transformation"""
|
||||
try:
|
||||
params = {
|
||||
"source_ingredient_id": str(source_ingredient_id),
|
||||
"target_ingredient_id": str(target_ingredient_id),
|
||||
"quantity": quantity,
|
||||
"expiration_hours": expiration_hours
|
||||
}
|
||||
|
||||
if target_batch_number:
|
||||
params["target_batch_number"] = target_batch_number
|
||||
if notes:
|
||||
params["notes"] = notes
|
||||
|
||||
result = await self.post("inventory/transformations/par-bake-to-fresh", params=params, tenant_id=tenant_id)
|
||||
if result:
|
||||
logger.info("Created par-bake transformation",
|
||||
transformation_id=result.get('transformation_id'),
|
||||
quantity=quantity, tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error creating par-bake transformation",
|
||||
error=str(e), source_ingredient_id=source_ingredient_id,
|
||||
target_ingredient_id=target_ingredient_id, tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def get_transformations(
|
||||
self,
|
||||
tenant_id: str,
|
||||
ingredient_id: Optional[Union[str, UUID]] = None,
|
||||
source_stage: Optional[str] = None,
|
||||
target_stage: Optional[str] = None,
|
||||
days_back: Optional[int] = None,
|
||||
skip: int = 0,
|
||||
limit: int = 100
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Get product transformations with filtering"""
|
||||
try:
|
||||
params = {
|
||||
"skip": skip,
|
||||
"limit": limit
|
||||
}
|
||||
|
||||
if ingredient_id:
|
||||
params["ingredient_id"] = str(ingredient_id)
|
||||
if source_stage:
|
||||
params["source_stage"] = source_stage
|
||||
if target_stage:
|
||||
params["target_stage"] = target_stage
|
||||
if days_back:
|
||||
params["days_back"] = days_back
|
||||
|
||||
result = await self.get("inventory/transformations", tenant_id=tenant_id, params=params)
|
||||
transformations = result if isinstance(result, list) else []
|
||||
|
||||
logger.info("Retrieved transformations from inventory service",
|
||||
count=len(transformations), tenant_id=tenant_id)
|
||||
return transformations
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error fetching transformations",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return []
|
||||
|
||||
async def get_transformation_by_id(
|
||||
self,
|
||||
transformation_id: Union[str, UUID],
|
||||
tenant_id: str
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""Get specific transformation by ID"""
|
||||
try:
|
||||
result = await self.get(f"inventory/transformations/{transformation_id}", tenant_id=tenant_id)
|
||||
if result:
|
||||
logger.info("Retrieved transformation by ID",
|
||||
transformation_id=transformation_id, tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error fetching transformation by ID",
|
||||
error=str(e), transformation_id=transformation_id, tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def get_transformation_summary(
|
||||
self,
|
||||
tenant_id: str,
|
||||
days_back: int = 30
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""Get transformation summary for dashboard"""
|
||||
try:
|
||||
params = {"days_back": days_back}
|
||||
result = await self.get("inventory/dashboard/transformations-summary", tenant_id=tenant_id, params=params)
|
||||
if result:
|
||||
logger.info("Retrieved transformation summary",
|
||||
days_back=days_back, tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error fetching transformation summary",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
# ================================================================
|
||||
# BATCH OPERATIONS (NEW - for Orchestrator optimization)
|
||||
# ================================================================
|
||||
|
||||
async def get_ingredients_batch(
|
||||
self,
|
||||
tenant_id: str,
|
||||
ingredient_ids: List[UUID]
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Fetch multiple ingredients in a single request.
|
||||
|
||||
This method reduces N API calls to 1, significantly improving
|
||||
performance when fetching data for multiple ingredients.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
ingredient_ids: List of ingredient IDs to fetch
|
||||
|
||||
Returns:
|
||||
Dict with 'ingredients', 'found_count', and 'missing_ids'
|
||||
"""
|
||||
try:
|
||||
if not ingredient_ids:
|
||||
return {
|
||||
'ingredients': [],
|
||||
'found_count': 0,
|
||||
'missing_ids': []
|
||||
}
|
||||
|
||||
# Convert UUIDs to strings for JSON serialization
|
||||
ids_str = [str(id) for id in ingredient_ids]
|
||||
|
||||
result = await self.post(
|
||||
"inventory/operations/ingredients/batch",
|
||||
data={"ingredient_ids": ids_str},
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
|
||||
if result:
|
||||
logger.info(
|
||||
"Retrieved ingredients in batch",
|
||||
requested=len(ingredient_ids),
|
||||
found=result.get('found_count', 0),
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
|
||||
return result or {'ingredients': [], 'found_count': 0, 'missing_ids': ids_str}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error fetching ingredients in batch",
|
||||
error=str(e),
|
||||
count=len(ingredient_ids),
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
return {'ingredients': [], 'found_count': 0, 'missing_ids': [str(id) for id in ingredient_ids]}
|
||||
|
||||
async def get_stock_levels_batch(
|
||||
self,
|
||||
tenant_id: str,
|
||||
ingredient_ids: List[UUID]
|
||||
) -> Dict[str, float]:
|
||||
"""
|
||||
Fetch stock levels for multiple ingredients in a single request.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
ingredient_ids: List of ingredient IDs
|
||||
|
||||
Returns:
|
||||
Dict mapping ingredient_id (str) to stock level (float)
|
||||
"""
|
||||
try:
|
||||
if not ingredient_ids:
|
||||
return {}
|
||||
|
||||
# Convert UUIDs to strings for JSON serialization
|
||||
ids_str = [str(id) for id in ingredient_ids]
|
||||
|
||||
result = await self.post(
|
||||
"inventory/operations/stock-levels/batch",
|
||||
data={"ingredient_ids": ids_str},
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
|
||||
stock_levels = result.get('stock_levels', {}) if result else {}
|
||||
|
||||
logger.info(
|
||||
"Retrieved stock levels in batch",
|
||||
requested=len(ingredient_ids),
|
||||
found=len(stock_levels),
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
|
||||
return stock_levels
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error fetching stock levels in batch",
|
||||
error=str(e),
|
||||
count=len(ingredient_ids),
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
return {}
|
||||
|
||||
# ================================================================
|
||||
# ML INSIGHTS: Safety Stock Optimization
|
||||
# ================================================================
|
||||
|
||||
async def trigger_safety_stock_optimization(
|
||||
self,
|
||||
tenant_id: str,
|
||||
product_ids: Optional[List[str]] = None,
|
||||
lookback_days: int = 90,
|
||||
min_history_days: int = 30
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Trigger safety stock optimization for inventory products.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
product_ids: Specific product IDs to optimize. If None, optimizes all products
|
||||
lookback_days: Days of historical demand to analyze (30-365)
|
||||
min_history_days: Minimum days of history required (7-180)
|
||||
|
||||
Returns:
|
||||
Dict with optimization results including insights posted
|
||||
"""
|
||||
try:
|
||||
data = {
|
||||
"product_ids": product_ids,
|
||||
"lookback_days": lookback_days,
|
||||
"min_history_days": min_history_days
|
||||
}
|
||||
result = await self.post("inventory/ml/insights/optimize-safety-stock", data=data, tenant_id=tenant_id)
|
||||
if result:
|
||||
logger.info("Triggered safety stock optimization",
|
||||
products_optimized=result.get('products_optimized', 0),
|
||||
insights_posted=result.get('total_insights_posted', 0),
|
||||
tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error triggering safety stock optimization",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
# ================================================================
|
||||
# DASHBOARD METHODS
|
||||
# ================================================================
|
||||
|
||||
async def get_inventory_summary_batch(
|
||||
self,
|
||||
tenant_ids: List[str]
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Get inventory summaries for multiple tenants in a single request.
|
||||
|
||||
Phase 2 optimization: Eliminates N+1 query patterns for enterprise dashboards.
|
||||
|
||||
Args:
|
||||
tenant_ids: List of tenant IDs to fetch
|
||||
|
||||
Returns:
|
||||
Dict mapping tenant_id -> inventory summary
|
||||
"""
|
||||
try:
|
||||
if not tenant_ids:
|
||||
return {}
|
||||
|
||||
if len(tenant_ids) > 100:
|
||||
logger.warning("Batch request exceeds max tenant limit", requested=len(tenant_ids))
|
||||
tenant_ids = tenant_ids[:100]
|
||||
|
||||
result = await self.post(
|
||||
"inventory/batch/inventory-summary",
|
||||
data={"tenant_ids": tenant_ids},
|
||||
tenant_id=tenant_ids[0] # Use first tenant for auth context
|
||||
)
|
||||
|
||||
summaries = result if isinstance(result, dict) else {}
|
||||
|
||||
logger.info(
|
||||
"Batch retrieved inventory summaries",
|
||||
requested=len(tenant_ids),
|
||||
found=len(summaries)
|
||||
)
|
||||
|
||||
return summaries
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error batch fetching inventory summaries",
|
||||
error=str(e),
|
||||
tenant_count=len(tenant_ids)
|
||||
)
|
||||
return {}
|
||||
|
||||
async def get_stock_status(
|
||||
self,
|
||||
tenant_id: str
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get inventory stock status for dashboard insights
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
|
||||
Returns:
|
||||
Dict with stock counts and status metrics
|
||||
"""
|
||||
try:
|
||||
return await self.get(
|
||||
"/inventory/dashboard/stock-status",
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error("Error fetching stock status", error=str(e), tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def get_sustainability_widget(
|
||||
self,
|
||||
tenant_id: str
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get sustainability metrics for dashboard
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
|
||||
Returns:
|
||||
Dict with sustainability metrics (waste, CO2, etc.)
|
||||
"""
|
||||
try:
|
||||
return await self.get(
|
||||
"/sustainability/widget",
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error("Error fetching sustainability widget", error=str(e), tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
# ================================================================
|
||||
# UTILITY METHODS
|
||||
# ================================================================
|
||||
|
||||
async def health_check(self) -> bool:
|
||||
"""Check if inventory service is healthy"""
|
||||
try:
|
||||
result = await self.get("../health") # Health endpoint is not tenant-scoped
|
||||
return result is not None
|
||||
except Exception as e:
|
||||
logger.error("Inventory service health check failed", error=str(e))
|
||||
return False
|
||||
|
||||
async def trigger_inventory_alerts_internal(
|
||||
self,
|
||||
tenant_id: str
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Trigger inventory alerts for a tenant (internal service use only).
|
||||
|
||||
This method calls the internal endpoint which is protected by x-internal-service header.
|
||||
The endpoint should trigger alerts specifically for the given tenant.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID to trigger alerts for
|
||||
|
||||
Returns:
|
||||
Dict with trigger results or None if failed
|
||||
"""
|
||||
try:
|
||||
# Call internal endpoint via gateway using tenant-scoped URL pattern
|
||||
# Endpoint: /api/v1/tenants/{tenant_id}/inventory/internal/alerts/trigger
|
||||
result = await self._make_request(
|
||||
method="POST",
|
||||
endpoint="inventory/internal/alerts/trigger",
|
||||
tenant_id=tenant_id,
|
||||
data={},
|
||||
headers={"x-internal-service": "demo-session"}
|
||||
)
|
||||
|
||||
if result:
|
||||
logger.info(
|
||||
"Inventory alerts triggered successfully via internal endpoint",
|
||||
tenant_id=tenant_id,
|
||||
alerts_generated=result.get("alerts_generated", 0)
|
||||
)
|
||||
else:
|
||||
logger.warning(
|
||||
"Inventory alerts internal endpoint returned no result",
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error triggering inventory alerts via internal endpoint",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e)
|
||||
)
|
||||
return None
|
||||
|
||||
# ================================================================
|
||||
# INTERNAL AI INSIGHTS METHODS
|
||||
# ================================================================
|
||||
|
||||
async def trigger_safety_stock_insights_internal(
|
||||
self,
|
||||
tenant_id: str
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Trigger safety stock optimization insights for a tenant (internal service use only).
|
||||
|
||||
This method calls the internal endpoint which is protected by x-internal-service header.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID to trigger insights for
|
||||
|
||||
Returns:
|
||||
Dict with trigger results or None if failed
|
||||
"""
|
||||
try:
|
||||
result = await self._make_request(
|
||||
method="POST",
|
||||
endpoint="inventory/internal/ml/generate-safety-stock-insights",
|
||||
tenant_id=tenant_id,
|
||||
data={"tenant_id": tenant_id},
|
||||
headers={"x-internal-service": "demo-session"}
|
||||
)
|
||||
|
||||
if result:
|
||||
logger.info(
|
||||
"Safety stock insights triggered successfully via internal endpoint",
|
||||
tenant_id=tenant_id,
|
||||
insights_posted=result.get("insights_posted", 0)
|
||||
)
|
||||
else:
|
||||
logger.warning(
|
||||
"Safety stock insights internal endpoint returned no result",
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error triggering safety stock insights via internal endpoint",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e)
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
# Factory function for dependency injection
|
||||
def create_inventory_client(config: BaseServiceSettings, service_name: str = "unknown") -> InventoryServiceClient:
|
||||
"""Create inventory service client instance"""
|
||||
return InventoryServiceClient(config, calling_service_name=service_name)
|
||||
|
||||
|
||||
# Convenience function for quick access (requires config to be passed)
|
||||
async def get_inventory_client(config: BaseServiceSettings) -> InventoryServiceClient:
|
||||
"""Get inventory service client instance"""
|
||||
return create_inventory_client(config)
|
||||
418
shared/clients/minio_client.py
Normal file
418
shared/clients/minio_client.py
Normal file
@@ -0,0 +1,418 @@
|
||||
"""
|
||||
MinIO Client Library
|
||||
Shared client for MinIO object storage operations with TLS support
|
||||
"""
|
||||
|
||||
import os
|
||||
import io
|
||||
import ssl
|
||||
import time
|
||||
import urllib3
|
||||
from typing import Optional, Dict, Any, Union
|
||||
from pathlib import Path
|
||||
from functools import wraps
|
||||
|
||||
from minio import Minio
|
||||
from minio.error import S3Error
|
||||
import structlog
|
||||
|
||||
# Configure logger
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
def with_retry(max_retries: int = 3, base_delay: float = 1.0, max_delay: float = 30.0):
|
||||
"""Decorator for retrying operations with exponential backoff
|
||||
|
||||
Args:
|
||||
max_retries: Maximum number of retry attempts
|
||||
base_delay: Initial delay between retries in seconds
|
||||
max_delay: Maximum delay between retries in seconds
|
||||
"""
|
||||
def decorator(func):
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
last_exception = None
|
||||
for attempt in range(max_retries + 1):
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
except (S3Error, urllib3.exceptions.HTTPError, ConnectionError, TimeoutError) as e:
|
||||
last_exception = e
|
||||
if attempt < max_retries:
|
||||
# Exponential backoff with jitter
|
||||
delay = min(base_delay * (2 ** attempt), max_delay)
|
||||
logger.warning(
|
||||
f"MinIO operation failed, retrying in {delay:.1f}s",
|
||||
attempt=attempt + 1,
|
||||
max_retries=max_retries,
|
||||
error=str(e)
|
||||
)
|
||||
time.sleep(delay)
|
||||
else:
|
||||
logger.error(
|
||||
"MinIO operation failed after all retries",
|
||||
attempts=max_retries + 1,
|
||||
error=str(e)
|
||||
)
|
||||
raise last_exception
|
||||
return wrapper
|
||||
return decorator
|
||||
|
||||
|
||||
class MinIOClient:
|
||||
"""Client for MinIO object storage operations with TLS support"""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize MinIO client with configuration"""
|
||||
self._client = None
|
||||
self._initialize_client()
|
||||
|
||||
def _initialize_client(self) -> None:
|
||||
"""Initialize MinIO client from environment variables with SSL/TLS support"""
|
||||
try:
|
||||
# Get configuration from environment
|
||||
endpoint = os.getenv("MINIO_ENDPOINT", "minio.bakery-ia.svc.cluster.local:9000")
|
||||
access_key = os.getenv("MINIO_ACCESS_KEY", os.getenv("MINIO_ROOT_USER", "admin"))
|
||||
secret_key = os.getenv("MINIO_SECRET_KEY", os.getenv("MINIO_ROOT_PASSWORD", "secure-password"))
|
||||
use_ssl = os.getenv("MINIO_USE_SSL", "true").lower() == "true"
|
||||
|
||||
# TLS certificate paths (optional - for cert verification)
|
||||
ca_cert_path = os.getenv("MINIO_CA_CERT_PATH", "/etc/ssl/certs/minio-ca.crt")
|
||||
# SSL verification is disabled by default for internal cluster with self-signed certs
|
||||
# Set MINIO_VERIFY_SSL=true and provide CA cert path for production with proper certs
|
||||
verify_ssl = os.getenv("MINIO_VERIFY_SSL", "false").lower() == "true"
|
||||
|
||||
# Try to get settings from service configuration if available
|
||||
try:
|
||||
from app.core.config import settings
|
||||
if hasattr(settings, 'MINIO_ENDPOINT'):
|
||||
endpoint = settings.MINIO_ENDPOINT
|
||||
access_key = settings.MINIO_ACCESS_KEY
|
||||
secret_key = settings.MINIO_SECRET_KEY
|
||||
use_ssl = settings.MINIO_USE_SSL
|
||||
except ImportError:
|
||||
# Fallback to environment variables (for shared client usage)
|
||||
pass
|
||||
|
||||
# Configure HTTP client with TLS settings
|
||||
http_client = None
|
||||
if use_ssl:
|
||||
# Create custom HTTP client for TLS
|
||||
if verify_ssl and os.path.exists(ca_cert_path):
|
||||
# Verify certificates against CA
|
||||
http_client = urllib3.PoolManager(
|
||||
timeout=urllib3.Timeout(connect=10.0, read=60.0),
|
||||
maxsize=10,
|
||||
cert_reqs='CERT_REQUIRED',
|
||||
ca_certs=ca_cert_path,
|
||||
retries=urllib3.Retry(
|
||||
total=5,
|
||||
backoff_factor=0.2,
|
||||
status_forcelist=[500, 502, 503, 504]
|
||||
)
|
||||
)
|
||||
logger.info("MinIO TLS with certificate verification enabled",
|
||||
ca_cert_path=ca_cert_path)
|
||||
else:
|
||||
# TLS without certificate verification (for self-signed certs in internal cluster)
|
||||
# Still encrypted, just skips cert validation
|
||||
http_client = urllib3.PoolManager(
|
||||
timeout=urllib3.Timeout(connect=10.0, read=60.0),
|
||||
maxsize=10,
|
||||
cert_reqs='CERT_NONE',
|
||||
retries=urllib3.Retry(
|
||||
total=5,
|
||||
backoff_factor=0.2,
|
||||
status_forcelist=[500, 502, 503, 504]
|
||||
)
|
||||
)
|
||||
# Suppress insecure request warnings for internal cluster
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
logger.info("MinIO TLS enabled without certificate verification (internal cluster)")
|
||||
|
||||
# Initialize client with SSL/TLS
|
||||
self._client = Minio(
|
||||
endpoint,
|
||||
access_key=access_key,
|
||||
secret_key=secret_key,
|
||||
secure=use_ssl,
|
||||
http_client=http_client
|
||||
)
|
||||
|
||||
logger.info("MinIO client initialized successfully",
|
||||
endpoint=endpoint,
|
||||
use_ssl=use_ssl,
|
||||
verify_ssl=verify_ssl if use_ssl else False)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to initialize MinIO client", error=str(e))
|
||||
raise
|
||||
|
||||
def reconnect(self) -> bool:
|
||||
"""Reconnect to MinIO server
|
||||
|
||||
Useful when connection is lost or credentials have changed.
|
||||
|
||||
Returns:
|
||||
True if reconnection succeeded, False otherwise
|
||||
"""
|
||||
try:
|
||||
logger.info("Attempting to reconnect to MinIO...")
|
||||
self._initialize_client()
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error("Failed to reconnect to MinIO", error=str(e))
|
||||
return False
|
||||
|
||||
@with_retry(max_retries=3, base_delay=1.0)
|
||||
def bucket_exists(self, bucket_name: str) -> bool:
|
||||
"""Check if bucket exists - handles limited permissions gracefully"""
|
||||
try:
|
||||
# First try the standard method
|
||||
return self._client.bucket_exists(bucket_name)
|
||||
except S3Error as e:
|
||||
# If we get AccessDenied, try alternative method for limited-permission users
|
||||
if e.code == "AccessDenied":
|
||||
logger.debug("Access denied for bucket_exists, trying alternative method",
|
||||
bucket_name=bucket_name)
|
||||
try:
|
||||
# Try to list objects - this works with ListBucket permission
|
||||
# If bucket doesn't exist, this will raise NoSuchBucket error
|
||||
# If bucket exists but user has no permission, this will raise AccessDenied
|
||||
objects = list(self._client.list_objects(bucket_name, recursive=False))
|
||||
logger.debug("Bucket exists (verified via list_objects)", bucket_name=bucket_name)
|
||||
return True
|
||||
except S3Error as list_error:
|
||||
if list_error.code == "NoSuchBucket":
|
||||
logger.debug("Bucket does not exist", bucket_name=bucket_name)
|
||||
return False
|
||||
else:
|
||||
logger.error("Failed to check bucket existence (alternative method)",
|
||||
bucket_name=bucket_name,
|
||||
error=str(list_error))
|
||||
return False
|
||||
else:
|
||||
logger.error("Failed to check bucket existence",
|
||||
bucket_name=bucket_name,
|
||||
error=str(e))
|
||||
return False
|
||||
|
||||
def create_bucket(self, bucket_name: str, region: str = "us-east-1") -> bool:
|
||||
"""Create a new bucket if it doesn't exist"""
|
||||
try:
|
||||
if not self.bucket_exists(bucket_name):
|
||||
self._client.make_bucket(bucket_name, region)
|
||||
logger.info("Created MinIO bucket", bucket_name=bucket_name)
|
||||
return True
|
||||
return False
|
||||
except S3Error as e:
|
||||
logger.error("Failed to create bucket",
|
||||
bucket_name=bucket_name,
|
||||
error=str(e))
|
||||
return False
|
||||
|
||||
@with_retry(max_retries=3, base_delay=1.0)
|
||||
def put_object(
|
||||
self,
|
||||
bucket_name: str,
|
||||
object_name: str,
|
||||
data: Union[bytes, io.BytesIO, str, Path],
|
||||
length: Optional[int] = None,
|
||||
content_type: str = "application/octet-stream",
|
||||
metadata: Optional[Dict[str, str]] = None
|
||||
) -> bool:
|
||||
"""Upload an object to MinIO
|
||||
|
||||
Args:
|
||||
bucket_name: Target bucket name
|
||||
object_name: Object key/path in the bucket
|
||||
data: Data to upload (bytes, BytesIO, string, or Path)
|
||||
length: Optional data length (calculated automatically if not provided)
|
||||
content_type: MIME type of the object
|
||||
metadata: Optional metadata dictionary
|
||||
|
||||
Returns:
|
||||
True if upload succeeded, False otherwise
|
||||
"""
|
||||
try:
|
||||
# Ensure bucket exists
|
||||
self.create_bucket(bucket_name)
|
||||
|
||||
# Convert data to bytes if needed
|
||||
if isinstance(data, str):
|
||||
data = data.encode('utf-8')
|
||||
elif isinstance(data, Path):
|
||||
with open(data, 'rb') as f:
|
||||
data = f.read()
|
||||
elif isinstance(data, io.BytesIO):
|
||||
data = data.getvalue()
|
||||
|
||||
# Calculate length if not provided
|
||||
data_length = length if length is not None else len(data)
|
||||
|
||||
# MinIO SDK requires BytesIO stream and explicit length
|
||||
data_stream = io.BytesIO(data)
|
||||
|
||||
# Upload object with proper stream and length
|
||||
self._client.put_object(
|
||||
bucket_name,
|
||||
object_name,
|
||||
data_stream,
|
||||
length=data_length,
|
||||
content_type=content_type,
|
||||
metadata=metadata
|
||||
)
|
||||
|
||||
logger.info("Uploaded object to MinIO",
|
||||
bucket_name=bucket_name,
|
||||
object_name=object_name,
|
||||
size=data_length)
|
||||
|
||||
return True
|
||||
|
||||
except S3Error as e:
|
||||
logger.error("Failed to upload object",
|
||||
bucket_name=bucket_name,
|
||||
object_name=object_name,
|
||||
error=str(e))
|
||||
return False
|
||||
|
||||
@with_retry(max_retries=3, base_delay=1.0)
|
||||
def get_object(self, bucket_name: str, object_name: str) -> Optional[bytes]:
|
||||
"""Download an object from MinIO"""
|
||||
try:
|
||||
# Get object data
|
||||
response = self._client.get_object(bucket_name, object_name)
|
||||
data = response.read()
|
||||
|
||||
logger.info("Downloaded object from MinIO",
|
||||
bucket_name=bucket_name,
|
||||
object_name=object_name,
|
||||
size=len(data))
|
||||
|
||||
return data
|
||||
|
||||
except S3Error as e:
|
||||
logger.error("Failed to download object",
|
||||
bucket_name=bucket_name,
|
||||
object_name=object_name,
|
||||
error=str(e))
|
||||
return None
|
||||
|
||||
def object_exists(self, bucket_name: str, object_name: str) -> bool:
|
||||
"""Check if object exists"""
|
||||
try:
|
||||
self._client.stat_object(bucket_name, object_name)
|
||||
return True
|
||||
except S3Error:
|
||||
return False
|
||||
|
||||
def list_objects(self, bucket_name: str, prefix: str = "") -> list:
|
||||
"""List objects in bucket with optional prefix"""
|
||||
try:
|
||||
objects = self._client.list_objects(bucket_name, prefix=prefix, recursive=True)
|
||||
return [obj.object_name for obj in objects]
|
||||
except S3Error as e:
|
||||
logger.error("Failed to list objects",
|
||||
bucket_name=bucket_name,
|
||||
prefix=prefix,
|
||||
error=str(e))
|
||||
return []
|
||||
|
||||
def delete_object(self, bucket_name: str, object_name: str) -> bool:
|
||||
"""Delete an object from MinIO"""
|
||||
try:
|
||||
self._client.remove_object(bucket_name, object_name)
|
||||
logger.info("Deleted object from MinIO",
|
||||
bucket_name=bucket_name,
|
||||
object_name=object_name)
|
||||
return True
|
||||
except S3Error as e:
|
||||
logger.error("Failed to delete object",
|
||||
bucket_name=bucket_name,
|
||||
object_name=object_name,
|
||||
error=str(e))
|
||||
return False
|
||||
|
||||
def get_presigned_url(
|
||||
self,
|
||||
bucket_name: str,
|
||||
object_name: str,
|
||||
expires: int = 3600
|
||||
) -> Optional[str]:
|
||||
"""Generate presigned URL for object access"""
|
||||
try:
|
||||
url = self._client.presigned_get_object(
|
||||
bucket_name,
|
||||
object_name,
|
||||
expires=expires
|
||||
)
|
||||
return url
|
||||
except S3Error as e:
|
||||
logger.error("Failed to generate presigned URL",
|
||||
bucket_name=bucket_name,
|
||||
object_name=object_name,
|
||||
error=str(e))
|
||||
return None
|
||||
|
||||
def copy_object(
|
||||
self,
|
||||
source_bucket: str,
|
||||
source_object: str,
|
||||
dest_bucket: str,
|
||||
dest_object: str
|
||||
) -> bool:
|
||||
"""Copy object within MinIO"""
|
||||
try:
|
||||
# Ensure destination bucket exists
|
||||
self.create_bucket(dest_bucket)
|
||||
|
||||
# Copy object
|
||||
self._client.copy_object(dest_bucket, dest_object,
|
||||
f"{source_bucket}/{source_object}")
|
||||
|
||||
logger.info("Copied object in MinIO",
|
||||
source_bucket=source_bucket,
|
||||
source_object=source_object,
|
||||
dest_bucket=dest_bucket,
|
||||
dest_object=dest_object)
|
||||
|
||||
return True
|
||||
except S3Error as e:
|
||||
logger.error("Failed to copy object",
|
||||
source_bucket=source_bucket,
|
||||
source_object=source_object,
|
||||
dest_bucket=dest_bucket,
|
||||
dest_object=dest_object,
|
||||
error=str(e))
|
||||
return False
|
||||
|
||||
def get_object_metadata(self, bucket_name: str, object_name: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get object metadata"""
|
||||
try:
|
||||
stat = self._client.stat_object(bucket_name, object_name)
|
||||
return {
|
||||
"size": stat.size,
|
||||
"last_modified": stat.last_modified,
|
||||
"content_type": stat.content_type,
|
||||
"metadata": stat.metadata or {}
|
||||
}
|
||||
except S3Error as e:
|
||||
logger.error("Failed to get object metadata",
|
||||
bucket_name=bucket_name,
|
||||
object_name=object_name,
|
||||
error=str(e))
|
||||
return None
|
||||
|
||||
def health_check(self) -> bool:
|
||||
"""Check MinIO service health"""
|
||||
try:
|
||||
# Simple bucket list to check connectivity
|
||||
self._client.list_buckets()
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error("MinIO health check failed", error=str(e))
|
||||
return False
|
||||
|
||||
|
||||
# Singleton instance for convenience
|
||||
minio_client = MinIOClient()
|
||||
205
shared/clients/nominatim_client.py
Executable file
205
shared/clients/nominatim_client.py
Executable file
@@ -0,0 +1,205 @@
|
||||
"""
|
||||
Nominatim Client for geocoding and address search
|
||||
"""
|
||||
|
||||
import structlog
|
||||
import httpx
|
||||
from typing import Optional, List, Dict, Any
|
||||
from shared.config.base import BaseServiceSettings
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class NominatimClient:
|
||||
"""
|
||||
Client for Nominatim geocoding service.
|
||||
|
||||
Provides address search and geocoding capabilities for the bakery onboarding flow.
|
||||
"""
|
||||
|
||||
def __init__(self, config: BaseServiceSettings):
|
||||
self.config = config
|
||||
self.nominatim_url = getattr(
|
||||
config,
|
||||
"NOMINATIM_SERVICE_URL",
|
||||
"http://nominatim-service:8080"
|
||||
)
|
||||
self.timeout = 30
|
||||
|
||||
async def search_address(
|
||||
self,
|
||||
query: str,
|
||||
country_codes: str = "es",
|
||||
limit: int = 5,
|
||||
addressdetails: bool = True
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Search for addresses matching a query.
|
||||
|
||||
Args:
|
||||
query: Address search query (e.g., "Calle Mayor 1, Madrid")
|
||||
country_codes: Limit search to country codes (default: "es" for Spain)
|
||||
limit: Maximum number of results (default: 5)
|
||||
addressdetails: Include detailed address breakdown (default: True)
|
||||
|
||||
Returns:
|
||||
List of geocoded results with lat, lon, and address details
|
||||
|
||||
Example:
|
||||
results = await nominatim.search_address("Calle Mayor 1, Madrid")
|
||||
if results:
|
||||
lat = results[0]["lat"]
|
||||
lon = results[0]["lon"]
|
||||
display_name = results[0]["display_name"]
|
||||
"""
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=self.timeout) as client:
|
||||
response = await client.get(
|
||||
f"{self.nominatim_url}/search",
|
||||
params={
|
||||
"q": query,
|
||||
"format": "json",
|
||||
"countrycodes": country_codes,
|
||||
"addressdetails": 1 if addressdetails else 0,
|
||||
"limit": limit
|
||||
}
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
results = response.json()
|
||||
logger.info(
|
||||
"Address search completed",
|
||||
query=query,
|
||||
results_count=len(results)
|
||||
)
|
||||
return results
|
||||
else:
|
||||
logger.error(
|
||||
"Nominatim search failed",
|
||||
query=query,
|
||||
status_code=response.status_code,
|
||||
response=response.text
|
||||
)
|
||||
return []
|
||||
|
||||
except httpx.TimeoutException:
|
||||
logger.error("Nominatim search timeout", query=query)
|
||||
return []
|
||||
except Exception as e:
|
||||
logger.error("Nominatim search error", query=query, error=str(e))
|
||||
return []
|
||||
|
||||
async def geocode_address(
|
||||
self,
|
||||
street: str,
|
||||
city: str,
|
||||
postal_code: Optional[str] = None,
|
||||
country: str = "Spain"
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Geocode a structured address to coordinates.
|
||||
|
||||
Args:
|
||||
street: Street name and number
|
||||
city: City name
|
||||
postal_code: Optional postal code
|
||||
country: Country name (default: "Spain")
|
||||
|
||||
Returns:
|
||||
Dict with lat, lon, and display_name, or None if not found
|
||||
|
||||
Example:
|
||||
location = await nominatim.geocode_address(
|
||||
street="Calle Mayor 1",
|
||||
city="Madrid",
|
||||
postal_code="28013"
|
||||
)
|
||||
if location:
|
||||
lat, lon = location["lat"], location["lon"]
|
||||
"""
|
||||
# Build structured query
|
||||
query_parts = [street, city]
|
||||
if postal_code:
|
||||
query_parts.append(postal_code)
|
||||
query_parts.append(country)
|
||||
|
||||
query = ", ".join(query_parts)
|
||||
|
||||
results = await self.search_address(query, limit=1)
|
||||
if results:
|
||||
return results[0]
|
||||
return None
|
||||
|
||||
async def reverse_geocode(
|
||||
self,
|
||||
latitude: float,
|
||||
longitude: float
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Reverse geocode coordinates to an address.
|
||||
|
||||
Args:
|
||||
latitude: Latitude coordinate
|
||||
longitude: Longitude coordinate
|
||||
|
||||
Returns:
|
||||
Dict with address details, or None if not found
|
||||
|
||||
Example:
|
||||
address = await nominatim.reverse_geocode(40.4168, -3.7038)
|
||||
if address:
|
||||
city = address["address"]["city"]
|
||||
street = address["address"]["road"]
|
||||
"""
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=self.timeout) as client:
|
||||
response = await client.get(
|
||||
f"{self.nominatim_url}/reverse",
|
||||
params={
|
||||
"lat": latitude,
|
||||
"lon": longitude,
|
||||
"format": "json",
|
||||
"addressdetails": 1
|
||||
}
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
result = response.json()
|
||||
logger.info(
|
||||
"Reverse geocoding completed",
|
||||
lat=latitude,
|
||||
lon=longitude
|
||||
)
|
||||
return result
|
||||
else:
|
||||
logger.error(
|
||||
"Nominatim reverse geocoding failed",
|
||||
lat=latitude,
|
||||
lon=longitude,
|
||||
status_code=response.status_code
|
||||
)
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Reverse geocoding error",
|
||||
lat=latitude,
|
||||
lon=longitude,
|
||||
error=str(e)
|
||||
)
|
||||
return None
|
||||
|
||||
async def health_check(self) -> bool:
|
||||
"""
|
||||
Check if Nominatim service is healthy.
|
||||
|
||||
Returns:
|
||||
True if service is responding, False otherwise
|
||||
"""
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=5) as client:
|
||||
response = await client.get(f"{self.nominatim_url}/status")
|
||||
return response.status_code == 200
|
||||
except Exception as e:
|
||||
logger.warning("Nominatim health check failed", error=str(e))
|
||||
return False
|
||||
186
shared/clients/notification_client.py
Executable file
186
shared/clients/notification_client.py
Executable file
@@ -0,0 +1,186 @@
|
||||
# shared/clients/notification_client.py
|
||||
"""
|
||||
Notification Service Client for Inter-Service Communication
|
||||
Provides access to notification and email sending from other services
|
||||
"""
|
||||
|
||||
import structlog
|
||||
from typing import Dict, Any, Optional
|
||||
from uuid import UUID
|
||||
from shared.clients.base_service_client import BaseServiceClient
|
||||
from shared.config.base import BaseServiceSettings
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class NotificationServiceClient(BaseServiceClient):
|
||||
"""Client for communicating with the Notification Service"""
|
||||
|
||||
def __init__(self, config: BaseServiceSettings, calling_service_name: str = "unknown"):
|
||||
super().__init__(calling_service_name, config)
|
||||
|
||||
def get_service_base_path(self) -> str:
|
||||
return "/api/v1"
|
||||
|
||||
# ================================================================
|
||||
# NOTIFICATION ENDPOINTS
|
||||
# ================================================================
|
||||
|
||||
async def send_notification(
|
||||
self,
|
||||
tenant_id: str,
|
||||
notification_type: str,
|
||||
message: str,
|
||||
recipient_email: Optional[str] = None,
|
||||
subject: Optional[str] = None,
|
||||
html_content: Optional[str] = None,
|
||||
priority: str = "normal",
|
||||
metadata: Optional[Dict[str, Any]] = None
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Send a notification
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID (UUID as string)
|
||||
notification_type: Type of notification (email, sms, push, in_app)
|
||||
message: Notification message
|
||||
recipient_email: Recipient email address (for email notifications)
|
||||
subject: Email subject (for email notifications)
|
||||
html_content: HTML content for email (optional)
|
||||
priority: Priority level (low, normal, high, urgent)
|
||||
metadata: Additional metadata
|
||||
|
||||
Returns:
|
||||
Dictionary with notification details
|
||||
"""
|
||||
try:
|
||||
notification_data = {
|
||||
"type": notification_type,
|
||||
"message": message,
|
||||
"priority": priority,
|
||||
"recipient_email": recipient_email,
|
||||
"subject": subject,
|
||||
"html_content": html_content,
|
||||
"metadata": metadata or {}
|
||||
}
|
||||
|
||||
result = await self.post("notifications/send", data=notification_data, tenant_id=tenant_id)
|
||||
if result:
|
||||
logger.info("Notification sent successfully",
|
||||
tenant_id=tenant_id,
|
||||
notification_type=notification_type)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error sending notification",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id,
|
||||
notification_type=notification_type)
|
||||
return None
|
||||
|
||||
async def send_email(
|
||||
self,
|
||||
tenant_id: str,
|
||||
to_email: str,
|
||||
subject: str,
|
||||
message: str,
|
||||
html_content: Optional[str] = None,
|
||||
priority: str = "normal"
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Send an email notification (convenience method)
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID (UUID as string)
|
||||
to_email: Recipient email address
|
||||
subject: Email subject
|
||||
message: Email message (plain text)
|
||||
html_content: HTML version of email (optional)
|
||||
priority: Priority level (low, normal, high, urgent)
|
||||
|
||||
Returns:
|
||||
Dictionary with notification details
|
||||
"""
|
||||
return await self.send_notification(
|
||||
tenant_id=tenant_id,
|
||||
notification_type="email",
|
||||
message=message,
|
||||
recipient_email=to_email,
|
||||
subject=subject,
|
||||
html_content=html_content,
|
||||
priority=priority
|
||||
)
|
||||
|
||||
async def send_workflow_summary(
|
||||
self,
|
||||
tenant_id: str,
|
||||
notification_data: Dict[str, Any]
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Send workflow summary notification
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
notification_data: Summary data to include in notification
|
||||
|
||||
Returns:
|
||||
Dictionary with notification result
|
||||
"""
|
||||
try:
|
||||
# Prepare workflow summary notification
|
||||
subject = f"Daily Workflow Summary - {notification_data.get('orchestration_run_id', 'N/A')}"
|
||||
|
||||
message_parts = [
|
||||
f"Daily workflow completed for tenant {tenant_id}",
|
||||
f"Orchestration Run ID: {notification_data.get('orchestration_run_id', 'N/A')}",
|
||||
f"Forecasts created: {notification_data.get('forecasts_created', 0)}",
|
||||
f"Production batches created: {notification_data.get('batches_created', 0)}",
|
||||
f"Procurement requirements created: {notification_data.get('requirements_created', 0)}",
|
||||
f"Purchase orders created: {notification_data.get('pos_created', 0)}"
|
||||
]
|
||||
|
||||
message = "\n".join(message_parts)
|
||||
|
||||
notification_payload = {
|
||||
"type": "email",
|
||||
"message": message,
|
||||
"priority": "normal",
|
||||
"subject": subject,
|
||||
"metadata": {
|
||||
"orchestration_run_id": notification_data.get('orchestration_run_id'),
|
||||
"forecast_id": notification_data.get('forecast_id'),
|
||||
"production_schedule_id": notification_data.get('production_schedule_id'),
|
||||
"procurement_plan_id": notification_data.get('procurement_plan_id'),
|
||||
"summary_type": "workflow_completion"
|
||||
}
|
||||
}
|
||||
|
||||
result = await self.post("notifications/send", data=notification_payload, tenant_id=tenant_id)
|
||||
if result:
|
||||
logger.info("Workflow summary notification sent successfully",
|
||||
tenant_id=tenant_id,
|
||||
orchestration_run_id=notification_data.get('orchestration_run_id'))
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error sending workflow summary notification",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
# ================================================================
|
||||
# UTILITY METHODS
|
||||
# ================================================================
|
||||
|
||||
async def health_check(self) -> bool:
|
||||
"""Check if notification service is healthy"""
|
||||
try:
|
||||
result = await self.get("../health") # Health endpoint is not tenant-scoped
|
||||
return result is not None
|
||||
except Exception as e:
|
||||
logger.error("Notification service health check failed", error=str(e))
|
||||
return False
|
||||
|
||||
|
||||
# Factory function for dependency injection
|
||||
def create_notification_client(config: BaseServiceSettings) -> NotificationServiceClient:
|
||||
"""Create notification service client instance"""
|
||||
return NotificationServiceClient(config)
|
||||
251
shared/clients/orders_client.py
Executable file
251
shared/clients/orders_client.py
Executable file
@@ -0,0 +1,251 @@
|
||||
# shared/clients/orders_client.py
|
||||
"""
|
||||
Orders Service Client for Inter-Service Communication
|
||||
Provides access to orders and procurement planning from other services
|
||||
"""
|
||||
|
||||
import structlog
|
||||
from typing import Dict, Any, Optional, List
|
||||
from uuid import UUID
|
||||
from shared.clients.base_service_client import BaseServiceClient
|
||||
from shared.config.base import BaseServiceSettings
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class OrdersServiceClient(BaseServiceClient):
|
||||
"""Client for communicating with the Orders Service"""
|
||||
|
||||
def __init__(self, config: BaseServiceSettings):
|
||||
super().__init__("orders", config)
|
||||
|
||||
def get_service_base_path(self) -> str:
|
||||
return "/api/v1"
|
||||
|
||||
# ================================================================
|
||||
# PROCUREMENT PLANNING
|
||||
# ================================================================
|
||||
|
||||
async def get_demand_requirements(self, tenant_id: str, date: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get demand requirements for production planning"""
|
||||
try:
|
||||
params = {"date": date}
|
||||
result = await self.get("orders/demand-requirements", tenant_id=tenant_id, params=params)
|
||||
if result:
|
||||
logger.info("Retrieved demand requirements from orders service",
|
||||
date=date, tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error getting demand requirements",
|
||||
error=str(e), date=date, tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def get_procurement_requirements(self, tenant_id: str, horizon: Optional[str] = None) -> Optional[Dict[str, Any]]:
|
||||
"""Get procurement requirements for purchasing planning"""
|
||||
try:
|
||||
params = {}
|
||||
if horizon:
|
||||
params["horizon"] = horizon
|
||||
|
||||
result = await self.get("orders/procurement-requirements", tenant_id=tenant_id, params=params)
|
||||
if result:
|
||||
logger.info("Retrieved procurement requirements from orders service",
|
||||
horizon=horizon, tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error getting procurement requirements",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def get_weekly_ingredient_needs(self, tenant_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get weekly ingredient ordering needs for dashboard"""
|
||||
try:
|
||||
result = await self.get("orders/dashboard/weekly-ingredient-needs", tenant_id=tenant_id)
|
||||
if result:
|
||||
logger.info("Retrieved weekly ingredient needs from orders service",
|
||||
tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error getting weekly ingredient needs",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
# ================================================================
|
||||
# CUSTOMER ORDERS
|
||||
# ================================================================
|
||||
|
||||
async def get_customer_orders(self, tenant_id: str, params: Optional[Dict[str, Any]] = None) -> Optional[Dict[str, Any]]:
|
||||
"""Get customer orders with optional filtering"""
|
||||
try:
|
||||
result = await self.get("orders/list", tenant_id=tenant_id, params=params)
|
||||
if result:
|
||||
orders_count = len(result.get('orders', [])) if isinstance(result, dict) else len(result) if isinstance(result, list) else 0
|
||||
logger.info("Retrieved customer orders from orders service",
|
||||
orders_count=orders_count, tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error getting customer orders",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def create_customer_order(self, tenant_id: str, order_data: Dict[str, Any]) -> Optional[Dict[str, Any]]:
|
||||
"""Create a new customer order"""
|
||||
try:
|
||||
result = await self.post("orders/list", data=order_data, tenant_id=tenant_id)
|
||||
if result:
|
||||
logger.info("Created customer order",
|
||||
order_id=result.get('id'), tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error creating customer order",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def update_customer_order(self, tenant_id: str, order_id: str, order_data: Dict[str, Any]) -> Optional[Dict[str, Any]]:
|
||||
"""Update an existing customer order"""
|
||||
try:
|
||||
result = await self.put(f"orders/list/{order_id}", data=order_data, tenant_id=tenant_id)
|
||||
if result:
|
||||
logger.info("Updated customer order",
|
||||
order_id=order_id, tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error updating customer order",
|
||||
error=str(e), order_id=order_id, tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
# ================================================================
|
||||
# CENTRAL BAKERY ORDERS
|
||||
# ================================================================
|
||||
|
||||
async def get_daily_finalized_orders(self, tenant_id: str, date: Optional[str] = None) -> Optional[Dict[str, Any]]:
|
||||
"""Get daily finalized orders for central bakery"""
|
||||
try:
|
||||
params = {}
|
||||
if date:
|
||||
params["date"] = date
|
||||
|
||||
result = await self.get("orders/daily-finalized", tenant_id=tenant_id, params=params)
|
||||
if result:
|
||||
logger.info("Retrieved daily finalized orders from orders service",
|
||||
date=date, tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error getting daily finalized orders",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def get_weekly_order_summaries(self, tenant_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get weekly order summaries for central bakery dashboard"""
|
||||
try:
|
||||
result = await self.get("orders/dashboard/weekly-summaries", tenant_id=tenant_id)
|
||||
if result:
|
||||
logger.info("Retrieved weekly order summaries from orders service",
|
||||
tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error getting weekly order summaries",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
# ================================================================
|
||||
# DASHBOARD AND ANALYTICS
|
||||
# ================================================================
|
||||
|
||||
async def get_dashboard_summary(self, tenant_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get orders dashboard summary data"""
|
||||
try:
|
||||
result = await self.get("orders/dashboard/summary", tenant_id=tenant_id)
|
||||
if result:
|
||||
logger.info("Retrieved orders dashboard summary",
|
||||
tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error getting orders dashboard summary",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def get_order_trends(self, tenant_id: str, start_date: str, end_date: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get order trends analysis"""
|
||||
try:
|
||||
params = {
|
||||
"start_date": start_date,
|
||||
"end_date": end_date
|
||||
}
|
||||
result = await self.get("orders/analytics/trends", tenant_id=tenant_id, params=params)
|
||||
if result:
|
||||
logger.info("Retrieved order trends from orders service",
|
||||
start_date=start_date, end_date=end_date, tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error getting order trends",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
# ================================================================
|
||||
# ALERTS AND NOTIFICATIONS
|
||||
# ================================================================
|
||||
|
||||
async def get_central_bakery_alerts(self, tenant_id: str) -> Optional[List[Dict[str, Any]]]:
|
||||
"""Get central bakery specific alerts"""
|
||||
try:
|
||||
result = await self.get("orders/alerts", tenant_id=tenant_id)
|
||||
alerts = result.get('alerts', []) if result else []
|
||||
logger.info("Retrieved central bakery alerts from orders service",
|
||||
alerts_count=len(alerts), tenant_id=tenant_id)
|
||||
return alerts
|
||||
except Exception as e:
|
||||
logger.error("Error getting central bakery alerts",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return []
|
||||
|
||||
async def acknowledge_alert(self, tenant_id: str, alert_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Acknowledge an order-related alert"""
|
||||
try:
|
||||
result = await self.post(f"orders/alerts/{alert_id}/acknowledge", data={}, tenant_id=tenant_id)
|
||||
if result:
|
||||
logger.info("Acknowledged order alert",
|
||||
alert_id=alert_id, tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error acknowledging order alert",
|
||||
error=str(e), alert_id=alert_id, tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
# ================================================================
|
||||
# UTILITY METHODS
|
||||
# ================================================================
|
||||
|
||||
async def download_orders_pdf(self, tenant_id: str, order_ids: List[str], format_type: str = "supplier_communication") -> Optional[bytes]:
|
||||
"""Download orders as PDF for supplier communication"""
|
||||
try:
|
||||
data = {
|
||||
"order_ids": order_ids,
|
||||
"format": format_type,
|
||||
"include_delivery_schedule": True
|
||||
}
|
||||
# Note: This would need special handling for binary data
|
||||
result = await self.post("orders/operations/download-pdf", data=data, tenant_id=tenant_id)
|
||||
if result:
|
||||
logger.info("Generated orders PDF",
|
||||
orders_count=len(order_ids), tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error generating orders PDF",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def health_check(self) -> bool:
|
||||
"""Check if orders service is healthy"""
|
||||
try:
|
||||
result = await self.get("../health") # Health endpoint is not tenant-scoped
|
||||
return result is not None
|
||||
except Exception as e:
|
||||
logger.error("Orders service health check failed", error=str(e))
|
||||
return False
|
||||
|
||||
|
||||
# Factory function for dependency injection
|
||||
def create_orders_client(config: BaseServiceSettings) -> OrdersServiceClient:
|
||||
"""Create orders service client instance"""
|
||||
return OrdersServiceClient(config)
|
||||
140
shared/clients/payment_client.py
Executable file
140
shared/clients/payment_client.py
Executable file
@@ -0,0 +1,140 @@
|
||||
"""
|
||||
Payment Client Interface and Implementation
|
||||
This module provides an abstraction layer for payment providers to make the system payment-agnostic
|
||||
"""
|
||||
|
||||
import abc
|
||||
from typing import Dict, Any, Optional
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
@dataclass
|
||||
class PaymentCustomer:
|
||||
id: str
|
||||
email: str
|
||||
name: str
|
||||
created_at: datetime
|
||||
|
||||
|
||||
@dataclass
|
||||
class PaymentMethod:
|
||||
id: str
|
||||
type: str
|
||||
brand: Optional[str] = None
|
||||
last4: Optional[str] = None
|
||||
exp_month: Optional[int] = None
|
||||
exp_year: Optional[int] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class Subscription:
|
||||
id: str
|
||||
customer_id: str
|
||||
plan_id: str
|
||||
status: str # active, canceled, past_due, etc.
|
||||
current_period_start: datetime
|
||||
current_period_end: datetime
|
||||
created_at: datetime
|
||||
billing_cycle_anchor: Optional[datetime] = None
|
||||
cancel_at_period_end: Optional[bool] = None
|
||||
# 3DS Authentication fields
|
||||
payment_intent_id: Optional[str] = None
|
||||
payment_intent_status: Optional[str] = None
|
||||
payment_intent_client_secret: Optional[str] = None
|
||||
requires_action: Optional[bool] = None
|
||||
trial_end: Optional[datetime] = None
|
||||
billing_interval: Optional[str] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class Invoice:
|
||||
id: str
|
||||
customer_id: str
|
||||
subscription_id: str
|
||||
amount: float
|
||||
currency: str
|
||||
status: str # draft, open, paid, void, etc.
|
||||
created_at: datetime
|
||||
due_date: Optional[datetime] = None
|
||||
description: Optional[str] = None
|
||||
invoice_pdf: Optional[str] = None # URL to PDF invoice
|
||||
hosted_invoice_url: Optional[str] = None # URL to hosted invoice page
|
||||
|
||||
|
||||
class PaymentProvider(abc.ABC):
|
||||
"""
|
||||
Abstract base class for payment providers.
|
||||
All payment providers should implement this interface.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
async def create_customer(self, customer_data: Dict[str, Any]) -> PaymentCustomer:
|
||||
"""
|
||||
Create a customer in the payment provider system
|
||||
"""
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
async def create_subscription(self, customer_id: str, plan_id: str, payment_method_id: str, trial_period_days: Optional[int] = None) -> Subscription:
|
||||
"""
|
||||
Create a subscription for a customer
|
||||
"""
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
async def update_payment_method(self, customer_id: str, payment_method_id: str) -> PaymentMethod:
|
||||
"""
|
||||
Update the payment method for a customer
|
||||
"""
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
async def cancel_subscription(
|
||||
self,
|
||||
subscription_id: str,
|
||||
cancel_at_period_end: bool = True
|
||||
) -> Subscription:
|
||||
"""
|
||||
Cancel a subscription
|
||||
|
||||
Args:
|
||||
subscription_id: Subscription ID to cancel
|
||||
cancel_at_period_end: If True, cancel at end of billing period. Default True.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
async def get_invoices(self, customer_id: str) -> list[Invoice]:
|
||||
"""
|
||||
Get invoices for a customer
|
||||
"""
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
async def get_subscription(self, subscription_id: str) -> Subscription:
|
||||
"""
|
||||
Get subscription details
|
||||
"""
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
async def get_customer(self, customer_id: str) -> PaymentCustomer:
|
||||
"""
|
||||
Get customer details
|
||||
"""
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
async def create_setup_intent(self) -> Dict[str, Any]:
|
||||
"""
|
||||
Create a setup intent for saving payment methods
|
||||
"""
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
async def create_payment_intent(self, amount: float, currency: str, customer_id: str, payment_method_id: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Create a payment intent for one-time payments
|
||||
"""
|
||||
pass
|
||||
160
shared/clients/payment_provider.py
Normal file
160
shared/clients/payment_provider.py
Normal file
@@ -0,0 +1,160 @@
|
||||
"""
|
||||
Payment Provider Interface
|
||||
Abstract base class for payment provider implementations
|
||||
Allows easy swapping of payment SDKs (Stripe, PayPal, etc.)
|
||||
"""
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Dict, Any, Optional
|
||||
|
||||
|
||||
class PaymentProvider(ABC):
|
||||
"""
|
||||
Abstract Payment Provider Interface
|
||||
Define all required methods for payment processing
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
async def create_customer(
|
||||
self,
|
||||
email: str,
|
||||
name: Optional[str] = None,
|
||||
metadata: Optional[Dict[str, Any]] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""Create a customer in the payment provider"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
async def attach_payment_method(
|
||||
self,
|
||||
payment_method_id: str,
|
||||
customer_id: str
|
||||
) -> Dict[str, Any]:
|
||||
"""Attach a payment method to a customer"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
async def set_default_payment_method(
|
||||
self,
|
||||
customer_id: str,
|
||||
payment_method_id: str
|
||||
) -> Dict[str, Any]:
|
||||
"""Set the default payment method for a customer"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
async def create_setup_intent_for_verification(
|
||||
self,
|
||||
customer_id: str,
|
||||
payment_method_id: str,
|
||||
metadata: Optional[Dict[str, Any]] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""Create a SetupIntent for payment method verification (3DS support)"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
async def verify_setup_intent_status(
|
||||
self,
|
||||
setup_intent_id: str
|
||||
) -> Dict[str, Any]:
|
||||
"""Verify the status of a SetupIntent"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
async def create_subscription_with_verified_payment(
|
||||
self,
|
||||
customer_id: str,
|
||||
price_id: str,
|
||||
payment_method_id: str,
|
||||
trial_period_days: Optional[int] = None,
|
||||
billing_cycle_anchor: Optional[Any] = None,
|
||||
metadata: Optional[Dict[str, Any]] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""Create a subscription with a verified payment method
|
||||
|
||||
Args:
|
||||
billing_cycle_anchor: Can be int (Unix timestamp), "now", or "unchanged"
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
async def create_setup_intent(self) -> Dict[str, Any]:
|
||||
"""Create a basic SetupIntent"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
async def get_setup_intent(
|
||||
self,
|
||||
setup_intent_id: str
|
||||
) -> Any:
|
||||
"""Get SetupIntent details"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
async def create_payment_intent(
|
||||
self,
|
||||
amount: float,
|
||||
currency: str,
|
||||
customer_id: str,
|
||||
payment_method_id: str
|
||||
) -> Dict[str, Any]:
|
||||
"""Create a PaymentIntent for one-time payments"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
async def complete_subscription_after_setup_intent(
|
||||
self,
|
||||
setup_intent_id: str
|
||||
) -> Dict[str, Any]:
|
||||
"""Complete subscription creation after SetupIntent verification"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
async def cancel_subscription(
|
||||
self,
|
||||
subscription_id: str
|
||||
) -> Dict[str, Any]:
|
||||
"""Cancel a subscription"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
async def update_payment_method(
|
||||
self,
|
||||
customer_id: str,
|
||||
payment_method_id: str
|
||||
) -> Dict[str, Any]:
|
||||
"""Update customer's payment method"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
async def update_subscription(
|
||||
self,
|
||||
subscription_id: str,
|
||||
new_price_id: str
|
||||
) -> Dict[str, Any]:
|
||||
"""Update subscription price"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
async def get_subscription(
|
||||
self,
|
||||
subscription_id: str
|
||||
) -> Dict[str, Any]:
|
||||
"""Get subscription details"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
async def get_customer_payment_method(
|
||||
self,
|
||||
customer_id: str
|
||||
) -> Dict[str, Any]:
|
||||
"""Get customer's payment method"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
async def get_invoices(
|
||||
self,
|
||||
customer_id: str
|
||||
) -> Dict[str, Any]:
|
||||
"""Get customer invoices"""
|
||||
pass
|
||||
678
shared/clients/procurement_client.py
Executable file
678
shared/clients/procurement_client.py
Executable file
@@ -0,0 +1,678 @@
|
||||
"""
|
||||
Procurement Service Client for Inter-Service Communication
|
||||
Provides API client for procurement operations and internal transfers
|
||||
"""
|
||||
|
||||
import structlog
|
||||
from typing import Dict, Any, List, Optional
|
||||
from datetime import date
|
||||
from shared.clients.base_service_client import BaseServiceClient
|
||||
from shared.config.base import BaseServiceSettings
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class ProcurementServiceClient(BaseServiceClient):
|
||||
"""Client for communicating with the Procurement Service"""
|
||||
|
||||
def __init__(self, config: BaseServiceSettings, service_name: str = "unknown"):
|
||||
super().__init__(service_name, config)
|
||||
self.service_base_url = config.PROCUREMENT_SERVICE_URL
|
||||
|
||||
def get_service_base_path(self) -> str:
|
||||
return "/api/v1"
|
||||
|
||||
# ================================================================
|
||||
# PURCHASE ORDER ENDPOINTS
|
||||
# ================================================================
|
||||
|
||||
async def create_purchase_order(
|
||||
self,
|
||||
tenant_id: str,
|
||||
order_data: Dict[str, Any]
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Create a new purchase order
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
order_data: Purchase order data
|
||||
|
||||
Returns:
|
||||
Created purchase order
|
||||
"""
|
||||
try:
|
||||
response = await self.post(
|
||||
"procurement/purchase-orders",
|
||||
data=order_data,
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
|
||||
if response:
|
||||
logger.info("Created purchase order",
|
||||
tenant_id=tenant_id,
|
||||
po_number=response.get("po_number"))
|
||||
return response
|
||||
except Exception as e:
|
||||
logger.error("Error creating purchase order",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
return None
|
||||
|
||||
async def get_purchase_order(
|
||||
self,
|
||||
tenant_id: str,
|
||||
po_id: str
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get a specific purchase order
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
po_id: Purchase order ID
|
||||
|
||||
Returns:
|
||||
Purchase order details
|
||||
"""
|
||||
try:
|
||||
response = await self.get(
|
||||
f"procurement/purchase-orders/{po_id}",
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
|
||||
if response:
|
||||
logger.info("Retrieved purchase order",
|
||||
tenant_id=tenant_id,
|
||||
po_id=po_id)
|
||||
return response
|
||||
except Exception as e:
|
||||
logger.error("Error getting purchase order",
|
||||
tenant_id=tenant_id,
|
||||
po_id=po_id,
|
||||
error=str(e))
|
||||
return None
|
||||
|
||||
async def update_purchase_order_status(
|
||||
self,
|
||||
tenant_id: str,
|
||||
po_id: str,
|
||||
new_status: str,
|
||||
user_id: str
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Update purchase order status
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
po_id: Purchase order ID
|
||||
new_status: New status
|
||||
user_id: User ID performing update
|
||||
|
||||
Returns:
|
||||
Updated purchase order
|
||||
"""
|
||||
try:
|
||||
response = await self.put(
|
||||
f"procurement/purchase-orders/{po_id}/status",
|
||||
data={
|
||||
"status": new_status,
|
||||
"updated_by_user_id": user_id
|
||||
},
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
|
||||
if response:
|
||||
logger.info("Updated purchase order status",
|
||||
tenant_id=tenant_id,
|
||||
po_id=po_id,
|
||||
new_status=new_status)
|
||||
return response
|
||||
except Exception as e:
|
||||
logger.error("Error updating purchase order status",
|
||||
tenant_id=tenant_id,
|
||||
po_id=po_id,
|
||||
new_status=new_status,
|
||||
error=str(e))
|
||||
return None
|
||||
|
||||
async def get_pending_purchase_orders(
|
||||
self,
|
||||
tenant_id: str,
|
||||
limit: int = 50,
|
||||
enrich_supplier: bool = True
|
||||
) -> Optional[List[Dict[str, Any]]]:
|
||||
"""
|
||||
Get pending purchase orders
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
limit: Maximum number of results
|
||||
enrich_supplier: Whether to include supplier details (default: True)
|
||||
Set to False for faster queries when supplier data will be fetched separately
|
||||
|
||||
Returns:
|
||||
List of pending purchase orders
|
||||
"""
|
||||
try:
|
||||
response = await self.get(
|
||||
"procurement/purchase-orders",
|
||||
params={
|
||||
"status": "pending_approval",
|
||||
"limit": limit,
|
||||
"enrich_supplier": enrich_supplier
|
||||
},
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
|
||||
if response:
|
||||
logger.info("Retrieved pending purchase orders",
|
||||
tenant_id=tenant_id,
|
||||
count=len(response),
|
||||
enriched=enrich_supplier)
|
||||
return response if response else []
|
||||
except Exception as e:
|
||||
logger.error("Error getting pending purchase orders",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
return []
|
||||
|
||||
async def get_purchase_orders_by_supplier(
|
||||
self,
|
||||
tenant_id: str,
|
||||
supplier_id: str,
|
||||
date_from: Optional[date] = None,
|
||||
date_to: Optional[date] = None,
|
||||
status: Optional[str] = None,
|
||||
limit: int = 100
|
||||
) -> Optional[List[Dict[str, Any]]]:
|
||||
"""
|
||||
Get purchase orders for a specific supplier
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
supplier_id: Supplier ID to filter by
|
||||
date_from: Start date for filtering
|
||||
date_to: End date for filtering
|
||||
status: Status filter (e.g., 'approved', 'delivered')
|
||||
limit: Maximum number of results
|
||||
|
||||
Returns:
|
||||
List of purchase orders with items
|
||||
"""
|
||||
try:
|
||||
params = {
|
||||
"supplier_id": supplier_id,
|
||||
"limit": limit
|
||||
}
|
||||
if date_from:
|
||||
params["date_from"] = date_from.isoformat()
|
||||
if date_to:
|
||||
params["date_to"] = date_to.isoformat()
|
||||
if status:
|
||||
params["status"] = status
|
||||
|
||||
response = await self.get(
|
||||
"procurement/purchase-orders",
|
||||
params=params,
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
|
||||
if response:
|
||||
logger.info("Retrieved purchase orders by supplier",
|
||||
tenant_id=tenant_id,
|
||||
supplier_id=supplier_id,
|
||||
count=len(response))
|
||||
return response if response else []
|
||||
except Exception as e:
|
||||
logger.error("Error getting purchase orders by supplier",
|
||||
tenant_id=tenant_id,
|
||||
supplier_id=supplier_id,
|
||||
error=str(e))
|
||||
return []
|
||||
|
||||
# ================================================================
|
||||
# INTERNAL TRANSFER ENDPOINTS (NEW FOR ENTERPRISE TIER)
|
||||
# ================================================================
|
||||
|
||||
async def create_internal_purchase_order(
|
||||
self,
|
||||
parent_tenant_id: str,
|
||||
child_tenant_id: str,
|
||||
items: List[Dict[str, Any]],
|
||||
delivery_date: date,
|
||||
notes: Optional[str] = None
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Create an internal purchase order from parent to child tenant
|
||||
|
||||
Args:
|
||||
parent_tenant_id: Parent tenant ID (supplier)
|
||||
child_tenant_id: Child tenant ID (buyer)
|
||||
items: List of items with product_id, quantity, unit_of_measure
|
||||
delivery_date: When child needs delivery
|
||||
notes: Optional notes for the transfer
|
||||
|
||||
Returns:
|
||||
Created internal purchase order
|
||||
"""
|
||||
try:
|
||||
response = await self.post(
|
||||
"procurement/internal-transfers",
|
||||
data={
|
||||
"destination_tenant_id": child_tenant_id,
|
||||
"items": items,
|
||||
"delivery_date": delivery_date.isoformat(),
|
||||
"notes": notes
|
||||
},
|
||||
tenant_id=parent_tenant_id
|
||||
)
|
||||
|
||||
if response:
|
||||
logger.info("Created internal purchase order",
|
||||
parent_tenant_id=parent_tenant_id,
|
||||
child_tenant_id=child_tenant_id,
|
||||
po_number=response.get("po_number"))
|
||||
return response
|
||||
except Exception as e:
|
||||
logger.error("Error creating internal purchase order",
|
||||
parent_tenant_id=parent_tenant_id,
|
||||
child_tenant_id=child_tenant_id,
|
||||
error=str(e))
|
||||
return None
|
||||
|
||||
async def get_approved_internal_purchase_orders(
|
||||
self,
|
||||
parent_tenant_id: str,
|
||||
target_date: Optional[date] = None,
|
||||
status: Optional[str] = "approved"
|
||||
) -> Optional[List[Dict[str, Any]]]:
|
||||
"""
|
||||
Get approved internal purchase orders for parent tenant
|
||||
|
||||
Args:
|
||||
parent_tenant_id: Parent tenant ID
|
||||
target_date: Optional target date to filter
|
||||
status: Status filter (default: approved)
|
||||
|
||||
Returns:
|
||||
List of approved internal purchase orders
|
||||
"""
|
||||
try:
|
||||
params = {"status": status}
|
||||
if target_date:
|
||||
params["target_date"] = target_date.isoformat()
|
||||
|
||||
response = await self.get(
|
||||
"procurement/internal-transfers",
|
||||
params=params,
|
||||
tenant_id=parent_tenant_id
|
||||
)
|
||||
|
||||
if response:
|
||||
logger.info("Retrieved internal purchase orders",
|
||||
parent_tenant_id=parent_tenant_id,
|
||||
count=len(response))
|
||||
return response if response else []
|
||||
except Exception as e:
|
||||
logger.error("Error getting internal purchase orders",
|
||||
parent_tenant_id=parent_tenant_id,
|
||||
error=str(e))
|
||||
return []
|
||||
|
||||
async def approve_internal_purchase_order(
|
||||
self,
|
||||
parent_tenant_id: str,
|
||||
po_id: str,
|
||||
approved_by_user_id: str
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Approve an internal purchase order
|
||||
|
||||
Args:
|
||||
parent_tenant_id: Parent tenant ID
|
||||
po_id: Purchase order ID to approve
|
||||
approved_by_user_id: User ID performing approval
|
||||
|
||||
Returns:
|
||||
Updated purchase order
|
||||
"""
|
||||
try:
|
||||
response = await self.post(
|
||||
f"procurement/internal-transfers/{po_id}/approve",
|
||||
data={
|
||||
"approved_by_user_id": approved_by_user_id
|
||||
},
|
||||
tenant_id=parent_tenant_id
|
||||
)
|
||||
|
||||
if response:
|
||||
logger.info("Approved internal purchase order",
|
||||
parent_tenant_id=parent_tenant_id,
|
||||
po_id=po_id)
|
||||
return response
|
||||
except Exception as e:
|
||||
logger.error("Error approving internal purchase order",
|
||||
parent_tenant_id=parent_tenant_id,
|
||||
po_id=po_id,
|
||||
error=str(e))
|
||||
return None
|
||||
|
||||
async def get_internal_transfer_history(
|
||||
self,
|
||||
tenant_id: str,
|
||||
parent_tenant_id: Optional[str] = None,
|
||||
child_tenant_id: Optional[str] = None,
|
||||
start_date: Optional[date] = None,
|
||||
end_date: Optional[date] = None
|
||||
) -> Optional[List[Dict[str, Any]]]:
|
||||
"""
|
||||
Get internal transfer history with optional filtering
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID (either parent or child)
|
||||
parent_tenant_id: Filter by specific parent tenant
|
||||
child_tenant_id: Filter by specific child tenant
|
||||
start_date: Filter by start date
|
||||
end_date: Filter by end date
|
||||
|
||||
Returns:
|
||||
List of internal transfer records
|
||||
"""
|
||||
try:
|
||||
params = {}
|
||||
if parent_tenant_id:
|
||||
params["parent_tenant_id"] = parent_tenant_id
|
||||
if child_tenant_id:
|
||||
params["child_tenant_id"] = child_tenant_id
|
||||
if start_date:
|
||||
params["start_date"] = start_date.isoformat()
|
||||
if end_date:
|
||||
params["end_date"] = end_date.isoformat()
|
||||
|
||||
response = await self.get(
|
||||
"procurement/internal-transfers/history",
|
||||
params=params,
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
|
||||
if response:
|
||||
logger.info("Retrieved internal transfer history",
|
||||
tenant_id=tenant_id,
|
||||
count=len(response))
|
||||
return response if response else []
|
||||
except Exception as e:
|
||||
logger.error("Error getting internal transfer history",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
return []
|
||||
|
||||
# ================================================================
|
||||
# PROCUREMENT PLAN ENDPOINTS
|
||||
# ================================================================
|
||||
|
||||
async def get_procurement_plan(
|
||||
self,
|
||||
tenant_id: str,
|
||||
plan_id: str
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get a specific procurement plan
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
plan_id: Procurement plan ID
|
||||
|
||||
Returns:
|
||||
Procurement plan details
|
||||
"""
|
||||
try:
|
||||
response = await self.get(
|
||||
f"procurement/plans/{plan_id}",
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
|
||||
if response:
|
||||
logger.info("Retrieved procurement plan",
|
||||
tenant_id=tenant_id,
|
||||
plan_id=plan_id)
|
||||
return response
|
||||
except Exception as e:
|
||||
logger.error("Error getting procurement plan",
|
||||
tenant_id=tenant_id,
|
||||
plan_id=plan_id,
|
||||
error=str(e))
|
||||
return None
|
||||
|
||||
async def get_procurement_plans(
|
||||
self,
|
||||
tenant_id: str,
|
||||
date_from: Optional[date] = None,
|
||||
date_to: Optional[date] = None,
|
||||
status: Optional[str] = None
|
||||
) -> Optional[List[Dict[str, Any]]]:
|
||||
"""
|
||||
Get procurement plans with optional filtering
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
date_from: Start date for filtering
|
||||
date_to: End date for filtering
|
||||
status: Status filter
|
||||
|
||||
Returns:
|
||||
List of procurement plan dictionaries
|
||||
"""
|
||||
try:
|
||||
params = {}
|
||||
if date_from:
|
||||
params["date_from"] = date_from.isoformat()
|
||||
if date_to:
|
||||
params["date_to"] = date_to.isoformat()
|
||||
if status:
|
||||
params["status"] = status
|
||||
|
||||
response = await self.get(
|
||||
"procurement/plans",
|
||||
params=params,
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
|
||||
if response:
|
||||
logger.info("Retrieved procurement plans",
|
||||
tenant_id=tenant_id,
|
||||
count=len(response))
|
||||
return response if response else []
|
||||
except Exception as e:
|
||||
logger.error("Error getting procurement plans",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
return []
|
||||
|
||||
# ================================================================
|
||||
# SUPPLIER ENDPOINTS
|
||||
# ================================================================
|
||||
|
||||
async def get_suppliers(
|
||||
self,
|
||||
tenant_id: str
|
||||
) -> Optional[List[Dict[str, Any]]]:
|
||||
"""
|
||||
Get suppliers for a tenant
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
|
||||
Returns:
|
||||
List of supplier dictionaries
|
||||
"""
|
||||
try:
|
||||
response = await self.get(
|
||||
"procurement/suppliers",
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
|
||||
if response:
|
||||
logger.info("Retrieved suppliers",
|
||||
tenant_id=tenant_id,
|
||||
count=len(response))
|
||||
return response if response else []
|
||||
except Exception as e:
|
||||
logger.error("Error getting suppliers",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
return []
|
||||
|
||||
async def get_supplier(
|
||||
self,
|
||||
tenant_id: str,
|
||||
supplier_id: str
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get specific supplier details
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
supplier_id: Supplier ID
|
||||
|
||||
Returns:
|
||||
Supplier details
|
||||
"""
|
||||
try:
|
||||
# Use suppliers service to get supplier details
|
||||
from shared.clients.suppliers_client import SuppliersServiceClient
|
||||
suppliers_client = SuppliersServiceClient(self.config)
|
||||
response = await suppliers_client.get_supplier_by_id(tenant_id, supplier_id)
|
||||
|
||||
if response:
|
||||
logger.info("Retrieved supplier details",
|
||||
tenant_id=tenant_id,
|
||||
supplier_id=supplier_id)
|
||||
return response
|
||||
except Exception as e:
|
||||
logger.error("Error getting supplier details",
|
||||
tenant_id=tenant_id,
|
||||
supplier_id=supplier_id,
|
||||
error=str(e))
|
||||
return None
|
||||
|
||||
# ================================================================
|
||||
# UTILITIES
|
||||
# ================================================================
|
||||
|
||||
async def health_check(self) -> bool:
|
||||
"""Check if procurement service is healthy"""
|
||||
try:
|
||||
# Use base health check method
|
||||
response = await self.get("health")
|
||||
return response is not None
|
||||
except Exception as e:
|
||||
logger.error("Procurement service health check failed", error=str(e))
|
||||
return False
|
||||
|
||||
# ================================================================
|
||||
# INTERNAL TRIGGER METHODS
|
||||
# ================================================================
|
||||
|
||||
async def trigger_delivery_tracking_internal(
|
||||
self,
|
||||
tenant_id: str
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Trigger delivery tracking for a tenant (internal service use only).
|
||||
|
||||
This method calls the internal endpoint which is protected by x-internal-service header.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID to trigger delivery tracking for
|
||||
|
||||
Returns:
|
||||
Dict with trigger results or None if failed
|
||||
"""
|
||||
try:
|
||||
# Call internal endpoint via gateway using tenant-scoped URL pattern
|
||||
# Endpoint: /api/v1/tenants/{tenant_id}/procurement/internal/delivery-tracking/trigger
|
||||
result = await self._make_request(
|
||||
method="POST",
|
||||
endpoint="procurement/internal/delivery-tracking/trigger",
|
||||
tenant_id=tenant_id,
|
||||
data={},
|
||||
headers={"x-internal-service": "demo-session"}
|
||||
)
|
||||
|
||||
if result:
|
||||
logger.info(
|
||||
"Delivery tracking triggered successfully via internal endpoint",
|
||||
tenant_id=tenant_id,
|
||||
alerts_generated=result.get("alerts_generated", 0)
|
||||
)
|
||||
else:
|
||||
logger.warning(
|
||||
"Delivery tracking internal endpoint returned no result",
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error triggering delivery tracking via internal endpoint",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e)
|
||||
)
|
||||
return None
|
||||
|
||||
# ================================================================
|
||||
# INTERNAL AI INSIGHTS METHODS
|
||||
# ================================================================
|
||||
|
||||
async def trigger_price_insights_internal(
|
||||
self,
|
||||
tenant_id: str
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Trigger price forecasting insights for a tenant (internal service use only).
|
||||
|
||||
This method calls the internal endpoint which is protected by x-internal-service header.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID to trigger insights for
|
||||
|
||||
Returns:
|
||||
Dict with trigger results or None if failed
|
||||
"""
|
||||
try:
|
||||
result = await self._make_request(
|
||||
method="POST",
|
||||
endpoint="procurement/internal/ml/generate-price-insights",
|
||||
tenant_id=tenant_id,
|
||||
data={"tenant_id": tenant_id},
|
||||
headers={"x-internal-service": "demo-session"}
|
||||
)
|
||||
|
||||
if result:
|
||||
logger.info(
|
||||
"Price insights triggered successfully via internal endpoint",
|
||||
tenant_id=tenant_id,
|
||||
insights_posted=result.get("insights_posted", 0)
|
||||
)
|
||||
else:
|
||||
logger.warning(
|
||||
"Price insights internal endpoint returned no result",
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error triggering price insights via internal endpoint",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e)
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
# Factory function for dependency injection
|
||||
def create_procurement_client(config: BaseServiceSettings, service_name: str = "unknown") -> ProcurementServiceClient:
|
||||
"""Create procurement service client instance"""
|
||||
return ProcurementServiceClient(config, service_name)
|
||||
729
shared/clients/production_client.py
Executable file
729
shared/clients/production_client.py
Executable file
@@ -0,0 +1,729 @@
|
||||
# shared/clients/production_client.py
|
||||
"""
|
||||
Production Service Client for Inter-Service Communication
|
||||
Provides access to production planning and batch management from other services
|
||||
"""
|
||||
|
||||
import structlog
|
||||
from typing import Dict, Any, Optional, List
|
||||
from uuid import UUID
|
||||
from shared.clients.base_service_client import BaseServiceClient
|
||||
from shared.config.base import BaseServiceSettings
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class ProductionServiceClient(BaseServiceClient):
|
||||
"""Client for communicating with the Production Service"""
|
||||
|
||||
def __init__(self, config: BaseServiceSettings, calling_service_name: str = "unknown"):
|
||||
super().__init__(calling_service_name, config)
|
||||
|
||||
def get_service_base_path(self) -> str:
|
||||
return "/api/v1"
|
||||
|
||||
# ================================================================
|
||||
# PRODUCTION PLANNING
|
||||
# ================================================================
|
||||
|
||||
async def generate_schedule(
|
||||
self,
|
||||
tenant_id: str,
|
||||
forecast_data: Dict[str, Any],
|
||||
inventory_data: Optional[Dict[str, Any]] = None,
|
||||
recipes_data: Optional[Dict[str, Any]] = None,
|
||||
target_date: Optional[str] = None,
|
||||
planning_horizon_days: int = 1
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Generate production schedule (called by Orchestrator).
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
forecast_data: Forecast data from forecasting service
|
||||
inventory_data: Optional inventory snapshot (NEW - to avoid duplicate fetching)
|
||||
recipes_data: Optional recipes snapshot (NEW - to avoid duplicate fetching)
|
||||
target_date: Optional target date
|
||||
planning_horizon_days: Number of days to plan
|
||||
|
||||
Returns:
|
||||
Dict with schedule_id, batches_created, etc.
|
||||
"""
|
||||
try:
|
||||
request_data = {
|
||||
"forecast_data": forecast_data,
|
||||
"target_date": target_date,
|
||||
"planning_horizon_days": planning_horizon_days
|
||||
}
|
||||
|
||||
# NEW: Include cached data if provided
|
||||
if inventory_data:
|
||||
request_data["inventory_data"] = inventory_data
|
||||
if recipes_data:
|
||||
request_data["recipes_data"] = recipes_data
|
||||
|
||||
result = await self.post(
|
||||
"production/operations/generate-schedule",
|
||||
data=request_data,
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
|
||||
if result:
|
||||
logger.info(
|
||||
"Generated production schedule",
|
||||
schedule_id=result.get('schedule_id'),
|
||||
batches_created=result.get('batches_created', 0),
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error generating production schedule",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
return None
|
||||
|
||||
async def get_production_requirements(self, tenant_id: str, date: Optional[str] = None) -> Optional[Dict[str, Any]]:
|
||||
"""Get production requirements for procurement planning"""
|
||||
try:
|
||||
params = {}
|
||||
if date:
|
||||
params["date"] = date
|
||||
|
||||
result = await self.get("production/requirements", tenant_id=tenant_id, params=params)
|
||||
if result:
|
||||
logger.info("Retrieved production requirements from production service",
|
||||
date=date, tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error getting production requirements",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def get_daily_requirements(self, tenant_id: str, date: Optional[str] = None) -> Optional[Dict[str, Any]]:
|
||||
"""Get daily production requirements"""
|
||||
try:
|
||||
params = {}
|
||||
if date:
|
||||
params["date"] = date
|
||||
|
||||
result = await self.get("production/daily-requirements", tenant_id=tenant_id, params=params)
|
||||
if result:
|
||||
logger.info("Retrieved daily production requirements from production service",
|
||||
date=date, tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error getting daily production requirements",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def get_production_schedule(self, tenant_id: str, start_date: Optional[str] = None, end_date: Optional[str] = None) -> Optional[Dict[str, Any]]:
|
||||
"""Get production schedule for a date range"""
|
||||
try:
|
||||
params = {}
|
||||
if start_date:
|
||||
params["start_date"] = start_date
|
||||
if end_date:
|
||||
params["end_date"] = end_date
|
||||
|
||||
result = await self.get("production/schedules", tenant_id=tenant_id, params=params)
|
||||
if result:
|
||||
logger.info("Retrieved production schedule from production service",
|
||||
start_date=start_date, end_date=end_date, tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error getting production schedule",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
# ================================================================
|
||||
# BATCH MANAGEMENT
|
||||
# ================================================================
|
||||
|
||||
async def get_active_batches(self, tenant_id: str) -> Optional[List[Dict[str, Any]]]:
|
||||
"""Get currently active production batches"""
|
||||
try:
|
||||
result = await self.get("production/batches/active", tenant_id=tenant_id)
|
||||
batches = result.get('batches', []) if result else []
|
||||
logger.info("Retrieved active production batches from production service",
|
||||
batches_count=len(batches), tenant_id=tenant_id)
|
||||
return batches
|
||||
except Exception as e:
|
||||
logger.error("Error getting active production batches",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return []
|
||||
|
||||
async def create_production_batch(self, tenant_id: str, batch_data: Dict[str, Any]) -> Optional[Dict[str, Any]]:
|
||||
"""Create a new production batch"""
|
||||
try:
|
||||
result = await self.post("production/batches", data=batch_data, tenant_id=tenant_id)
|
||||
if result:
|
||||
logger.info("Created production batch",
|
||||
batch_id=result.get('id'),
|
||||
product_id=batch_data.get('product_id'),
|
||||
tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error creating production batch",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def update_batch_status(self, tenant_id: str, batch_id: str, status: str, actual_quantity: Optional[float] = None) -> Optional[Dict[str, Any]]:
|
||||
"""Update production batch status"""
|
||||
try:
|
||||
data = {"status": status}
|
||||
if actual_quantity is not None:
|
||||
data["actual_quantity"] = actual_quantity
|
||||
|
||||
result = await self.put(f"production/batches/{batch_id}/status", data=data, tenant_id=tenant_id)
|
||||
if result:
|
||||
logger.info("Updated production batch status",
|
||||
batch_id=batch_id, status=status, tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error updating production batch status",
|
||||
error=str(e), batch_id=batch_id, tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def get_batch_details(self, tenant_id: str, batch_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get detailed information about a production batch"""
|
||||
try:
|
||||
result = await self.get(f"production/batches/{batch_id}", tenant_id=tenant_id)
|
||||
if result:
|
||||
logger.info("Retrieved production batch details",
|
||||
batch_id=batch_id, tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error getting production batch details",
|
||||
error=str(e), batch_id=batch_id, tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
# ================================================================
|
||||
# CAPACITY MANAGEMENT
|
||||
# ================================================================
|
||||
|
||||
async def get_capacity_status(self, tenant_id: str, date: Optional[str] = None) -> Optional[Dict[str, Any]]:
|
||||
"""Get production capacity status for a specific date"""
|
||||
try:
|
||||
params = {}
|
||||
if date:
|
||||
params["date"] = date
|
||||
|
||||
result = await self.get("production/capacity/status", tenant_id=tenant_id, params=params)
|
||||
if result:
|
||||
logger.info("Retrieved production capacity status",
|
||||
date=date, tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error getting production capacity status",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def check_capacity_availability(self, tenant_id: str, requirements: List[Dict[str, Any]]) -> Optional[Dict[str, Any]]:
|
||||
"""Check if production capacity is available for requirements"""
|
||||
try:
|
||||
result = await self.post("production/capacity/check-availability",
|
||||
{"requirements": requirements},
|
||||
tenant_id=tenant_id)
|
||||
if result:
|
||||
logger.info("Checked production capacity availability",
|
||||
requirements_count=len(requirements), tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error checking production capacity availability",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
# ================================================================
|
||||
# QUALITY CONTROL
|
||||
# ================================================================
|
||||
|
||||
async def record_quality_check(self, tenant_id: str, batch_id: str, quality_data: Dict[str, Any]) -> Optional[Dict[str, Any]]:
|
||||
"""Record quality control results for a batch"""
|
||||
try:
|
||||
result = await self.post(f"production/batches/{batch_id}/quality-check",
|
||||
data=quality_data,
|
||||
tenant_id=tenant_id)
|
||||
if result:
|
||||
logger.info("Recorded quality check for production batch",
|
||||
batch_id=batch_id, tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error recording quality check",
|
||||
error=str(e), batch_id=batch_id, tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def get_yield_metrics(self, tenant_id: str, start_date: str, end_date: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get production yield metrics for analysis"""
|
||||
try:
|
||||
params = {
|
||||
"start_date": start_date,
|
||||
"end_date": end_date
|
||||
}
|
||||
result = await self.get("production/analytics/yield-metrics", tenant_id=tenant_id, params=params)
|
||||
if result:
|
||||
logger.info("Retrieved production yield metrics",
|
||||
start_date=start_date, end_date=end_date, tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error getting production yield metrics",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
# ================================================================
|
||||
# DASHBOARD AND ANALYTICS
|
||||
# ================================================================
|
||||
|
||||
async def get_dashboard_summary(self, tenant_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get production dashboard summary data"""
|
||||
try:
|
||||
result = await self.get("production/dashboard/summary", tenant_id=tenant_id)
|
||||
if result:
|
||||
logger.info("Retrieved production dashboard summary",
|
||||
tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error getting production dashboard summary",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def get_efficiency_metrics(self, tenant_id: str, period: str = "last_30_days") -> Optional[Dict[str, Any]]:
|
||||
"""Get production efficiency metrics"""
|
||||
try:
|
||||
params = {"period": period}
|
||||
result = await self.get("production/analytics/efficiency", tenant_id=tenant_id, params=params)
|
||||
if result:
|
||||
logger.info("Retrieved production efficiency metrics",
|
||||
period=period, tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error getting production efficiency metrics",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
# ================================================================
|
||||
# ALERTS AND NOTIFICATIONS
|
||||
# ================================================================
|
||||
|
||||
async def get_production_alerts(self, tenant_id: str) -> Optional[List[Dict[str, Any]]]:
|
||||
"""Get production-related alerts"""
|
||||
try:
|
||||
result = await self.get("production/alerts", tenant_id=tenant_id)
|
||||
alerts = result.get('alerts', []) if result else []
|
||||
logger.info("Retrieved production alerts",
|
||||
alerts_count=len(alerts), tenant_id=tenant_id)
|
||||
return alerts
|
||||
except Exception as e:
|
||||
logger.error("Error getting production alerts",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return []
|
||||
|
||||
async def acknowledge_alert(self, tenant_id: str, alert_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Acknowledge a production-related alert"""
|
||||
try:
|
||||
result = await self.post(f"production/alerts/{alert_id}/acknowledge", data={}, tenant_id=tenant_id)
|
||||
if result:
|
||||
logger.info("Acknowledged production alert",
|
||||
alert_id=alert_id, tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error acknowledging production alert",
|
||||
error=str(e), alert_id=alert_id, tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
# ================================================================
|
||||
# WASTE AND SUSTAINABILITY ANALYTICS
|
||||
# ================================================================
|
||||
|
||||
async def get_waste_analytics(
|
||||
self,
|
||||
tenant_id: str,
|
||||
start_date: str,
|
||||
end_date: str
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get production waste analytics for sustainability reporting
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
start_date: Start date (ISO format)
|
||||
end_date: End date (ISO format)
|
||||
|
||||
Returns:
|
||||
Dictionary with waste analytics data:
|
||||
- total_production_waste: Total waste in kg
|
||||
- total_defects: Total defect waste in kg
|
||||
- total_planned: Total planned production in kg
|
||||
- total_actual: Total actual production in kg
|
||||
- ai_assisted_batches: Number of AI-assisted batches
|
||||
"""
|
||||
try:
|
||||
params = {
|
||||
"start_date": start_date,
|
||||
"end_date": end_date
|
||||
}
|
||||
result = await self.get("production/waste-analytics", tenant_id=tenant_id, params=params)
|
||||
if result:
|
||||
logger.info("Retrieved production waste analytics",
|
||||
tenant_id=tenant_id,
|
||||
start_date=start_date,
|
||||
end_date=end_date)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error getting production waste analytics",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def get_baseline(self, tenant_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get baseline waste percentage for SDG compliance calculations
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
|
||||
Returns:
|
||||
Dictionary with baseline data:
|
||||
- waste_percentage: Baseline waste percentage
|
||||
- period: Information about the baseline period
|
||||
- data_available: Whether real data is available
|
||||
- total_production_kg: Total production during baseline
|
||||
- total_waste_kg: Total waste during baseline
|
||||
"""
|
||||
try:
|
||||
result = await self.get("production/baseline", tenant_id=tenant_id)
|
||||
if result:
|
||||
logger.info("Retrieved production baseline data",
|
||||
tenant_id=tenant_id,
|
||||
data_available=result.get('data_available', False))
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error getting production baseline",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
# ================================================================
|
||||
# ML INSIGHTS: Yield Prediction
|
||||
# ================================================================
|
||||
|
||||
async def trigger_yield_prediction(
|
||||
self,
|
||||
tenant_id: str,
|
||||
recipe_ids: Optional[List[str]] = None,
|
||||
lookback_days: int = 90,
|
||||
min_history_runs: int = 30
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Trigger yield prediction for production recipes.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
recipe_ids: Specific recipe IDs to analyze. If None, analyzes all recipes
|
||||
lookback_days: Days of historical production to analyze (30-365)
|
||||
min_history_runs: Minimum production runs required (10-100)
|
||||
|
||||
Returns:
|
||||
Dict with prediction results including insights posted
|
||||
"""
|
||||
try:
|
||||
data = {
|
||||
"recipe_ids": recipe_ids,
|
||||
"lookback_days": lookback_days,
|
||||
"min_history_runs": min_history_runs
|
||||
}
|
||||
result = await self.post("production/ml/insights/predict-yields", data=data, tenant_id=tenant_id)
|
||||
if result:
|
||||
logger.info("Triggered yield prediction",
|
||||
recipes_analyzed=result.get('recipes_analyzed', 0),
|
||||
insights_posted=result.get('total_insights_posted', 0),
|
||||
tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error triggering yield prediction",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
# ================================================================
|
||||
# DASHBOARD METHODS
|
||||
# ================================================================
|
||||
|
||||
async def get_production_summary_batch(
|
||||
self,
|
||||
tenant_ids: List[str]
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Get production summaries for multiple tenants in a single request.
|
||||
|
||||
Phase 2 optimization: Eliminates N+1 query patterns for enterprise dashboards.
|
||||
|
||||
Args:
|
||||
tenant_ids: List of tenant IDs to fetch
|
||||
|
||||
Returns:
|
||||
Dict mapping tenant_id -> production summary
|
||||
"""
|
||||
try:
|
||||
if not tenant_ids:
|
||||
return {}
|
||||
|
||||
if len(tenant_ids) > 100:
|
||||
logger.warning("Batch request exceeds max tenant limit", requested=len(tenant_ids))
|
||||
tenant_ids = tenant_ids[:100]
|
||||
|
||||
result = await self.post(
|
||||
"production/batch/production-summary",
|
||||
data={"tenant_ids": tenant_ids},
|
||||
tenant_id=tenant_ids[0] # Use first tenant for auth context
|
||||
)
|
||||
|
||||
summaries = result if isinstance(result, dict) else {}
|
||||
|
||||
logger.info(
|
||||
"Batch retrieved production summaries",
|
||||
requested=len(tenant_ids),
|
||||
found=len(summaries)
|
||||
)
|
||||
|
||||
return summaries
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error batch fetching production summaries",
|
||||
error=str(e),
|
||||
tenant_count=len(tenant_ids)
|
||||
)
|
||||
return {}
|
||||
|
||||
async def get_todays_batches(
|
||||
self,
|
||||
tenant_id: str
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get today's production batches for dashboard timeline
|
||||
|
||||
For demo compatibility: Queries all recent batches and filters for actionable ones
|
||||
scheduled for today, since demo session dates are adjusted relative to session creation time.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
|
||||
Returns:
|
||||
Dict with ProductionBatchListResponse: {"batches": [...], "total_count": n, "page": 1, "page_size": n}
|
||||
"""
|
||||
try:
|
||||
from datetime import datetime, timezone, timedelta
|
||||
|
||||
# Get today's date range (start of day to end of day in UTC)
|
||||
now = datetime.now(timezone.utc)
|
||||
today_start = now.replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
today_end = today_start + timedelta(days=1)
|
||||
|
||||
# Query all batches without date/status filter for demo compatibility
|
||||
# The dashboard will filter for PENDING, IN_PROGRESS, or SCHEDULED
|
||||
result = await self.get(
|
||||
"/production/batches",
|
||||
tenant_id=tenant_id,
|
||||
params={"page_size": 100}
|
||||
)
|
||||
|
||||
if result and "batches" in result:
|
||||
# Filter for actionable batches scheduled for TODAY
|
||||
actionable_statuses = {"PENDING", "IN_PROGRESS", "SCHEDULED"}
|
||||
filtered_batches = []
|
||||
|
||||
for batch in result["batches"]:
|
||||
# Check if batch is actionable
|
||||
if batch.get("status") not in actionable_statuses:
|
||||
continue
|
||||
|
||||
# Check if batch is scheduled for today
|
||||
# Include batches that START today OR END today (for overnight batches)
|
||||
planned_start = batch.get("planned_start_time")
|
||||
planned_end = batch.get("planned_end_time")
|
||||
|
||||
include_batch = False
|
||||
|
||||
if planned_start:
|
||||
# Parse the start date string
|
||||
if isinstance(planned_start, str):
|
||||
planned_start = datetime.fromisoformat(planned_start.replace('Z', '+00:00'))
|
||||
|
||||
# Include if batch starts today
|
||||
if today_start <= planned_start < today_end:
|
||||
include_batch = True
|
||||
|
||||
# Also check if batch ends today (for overnight batches)
|
||||
if not include_batch and planned_end:
|
||||
if isinstance(planned_end, str):
|
||||
planned_end = datetime.fromisoformat(planned_end.replace('Z', '+00:00'))
|
||||
|
||||
# Include if batch ends today (even if it started yesterday)
|
||||
if today_start <= planned_end < today_end:
|
||||
include_batch = True
|
||||
|
||||
if include_batch:
|
||||
filtered_batches.append(batch)
|
||||
|
||||
# Return filtered result
|
||||
return {
|
||||
**result,
|
||||
"batches": filtered_batches,
|
||||
"total_count": len(filtered_batches)
|
||||
}
|
||||
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error fetching today's batches", error=str(e), tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def get_production_batches_by_status(
|
||||
self,
|
||||
tenant_id: str,
|
||||
status: str,
|
||||
limit: int = 100
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get production batches filtered by status for dashboard
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
status: Batch status (e.g., "ON_HOLD", "IN_PROGRESS")
|
||||
limit: Maximum number of batches to return
|
||||
|
||||
Returns:
|
||||
Dict with ProductionBatchListResponse: {"batches": [...], "total_count": n, "page": 1, "page_size": n}
|
||||
"""
|
||||
try:
|
||||
return await self.get(
|
||||
"/production/batches",
|
||||
tenant_id=tenant_id,
|
||||
params={"status": status, "page_size": limit}
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error("Error fetching production batches", error=str(e),
|
||||
status=status, tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
# ================================================================
|
||||
# UTILITY METHODS
|
||||
# ================================================================
|
||||
|
||||
async def health_check(self) -> bool:
|
||||
"""Check if production service is healthy"""
|
||||
try:
|
||||
result = await self.get("../health") # Health endpoint is not tenant-scoped
|
||||
return result is not None
|
||||
except Exception as e:
|
||||
logger.error("Production service health check failed", error=str(e))
|
||||
return False
|
||||
|
||||
# ================================================================
|
||||
# INTERNAL TRIGGER METHODS
|
||||
# ================================================================
|
||||
|
||||
async def trigger_production_alerts_internal(
|
||||
self,
|
||||
tenant_id: str
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Trigger production alerts for a tenant (internal service use only).
|
||||
|
||||
This method calls the internal endpoint which is protected by x-internal-service header.
|
||||
Includes both production alerts and equipment maintenance checks.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID to trigger alerts for
|
||||
|
||||
Returns:
|
||||
Dict with trigger results or None if failed
|
||||
"""
|
||||
try:
|
||||
# Call internal endpoint via gateway using tenant-scoped URL pattern
|
||||
# Endpoint: /api/v1/tenants/{tenant_id}/production/internal/alerts/trigger
|
||||
result = await self._make_request(
|
||||
method="POST",
|
||||
endpoint="production/internal/alerts/trigger",
|
||||
tenant_id=tenant_id,
|
||||
data={},
|
||||
headers={"x-internal-service": "demo-session"}
|
||||
)
|
||||
|
||||
if result:
|
||||
logger.info(
|
||||
"Production alerts triggered successfully via internal endpoint",
|
||||
tenant_id=tenant_id,
|
||||
alerts_generated=result.get("alerts_generated", 0)
|
||||
)
|
||||
else:
|
||||
logger.warning(
|
||||
"Production alerts internal endpoint returned no result",
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error triggering production alerts via internal endpoint",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e)
|
||||
)
|
||||
return None
|
||||
|
||||
# ================================================================
|
||||
# INTERNAL AI INSIGHTS METHODS
|
||||
# ================================================================
|
||||
|
||||
async def trigger_yield_insights_internal(
|
||||
self,
|
||||
tenant_id: str
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Trigger yield improvement insights for a tenant (internal service use only).
|
||||
|
||||
This method calls the internal endpoint which is protected by x-internal-service header.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID to trigger insights for
|
||||
|
||||
Returns:
|
||||
Dict with trigger results or None if failed
|
||||
"""
|
||||
try:
|
||||
result = await self._make_request(
|
||||
method="POST",
|
||||
endpoint="production/internal/ml/generate-yield-insights",
|
||||
tenant_id=tenant_id,
|
||||
data={"tenant_id": tenant_id},
|
||||
headers={"x-internal-service": "demo-session"}
|
||||
)
|
||||
|
||||
if result:
|
||||
logger.info(
|
||||
"Yield insights triggered successfully via internal endpoint",
|
||||
tenant_id=tenant_id,
|
||||
insights_posted=result.get("insights_posted", 0)
|
||||
)
|
||||
else:
|
||||
logger.warning(
|
||||
"Yield insights internal endpoint returned no result",
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error triggering yield insights via internal endpoint",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e)
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
# Factory function for dependency injection
|
||||
def create_production_client(config: BaseServiceSettings) -> ProductionServiceClient:
|
||||
"""Create production service client instance"""
|
||||
return ProductionServiceClient(config)
|
||||
294
shared/clients/recipes_client.py
Executable file
294
shared/clients/recipes_client.py
Executable file
@@ -0,0 +1,294 @@
|
||||
# shared/clients/recipes_client.py
|
||||
"""
|
||||
Recipes Service Client for Inter-Service Communication
|
||||
Provides access to recipe and ingredient requirements from other services
|
||||
"""
|
||||
|
||||
import structlog
|
||||
from typing import Dict, Any, Optional, List
|
||||
from uuid import UUID
|
||||
from shared.clients.base_service_client import BaseServiceClient
|
||||
from shared.config.base import BaseServiceSettings
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class RecipesServiceClient(BaseServiceClient):
|
||||
"""Client for communicating with the Recipes Service"""
|
||||
|
||||
def __init__(self, config: BaseServiceSettings, calling_service_name: str = "unknown"):
|
||||
super().__init__(calling_service_name, config)
|
||||
|
||||
def get_service_base_path(self) -> str:
|
||||
return "/api/v1"
|
||||
|
||||
# ================================================================
|
||||
# RECIPE MANAGEMENT
|
||||
# ================================================================
|
||||
|
||||
async def get_recipe_by_id(self, tenant_id: str, recipe_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get recipe details by ID"""
|
||||
try:
|
||||
result = await self.get(f"recipes/{recipe_id}", tenant_id=tenant_id)
|
||||
if result:
|
||||
logger.info("Retrieved recipe details from recipes service",
|
||||
recipe_id=recipe_id, tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error getting recipe details",
|
||||
error=str(e), recipe_id=recipe_id, tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def get_recipes_by_product_ids(self, tenant_id: str, product_ids: List[str]) -> Optional[List[Dict[str, Any]]]:
|
||||
"""Get recipes for multiple products"""
|
||||
try:
|
||||
params = {"product_ids": ",".join(product_ids)}
|
||||
result = await self.get("recipes/by-products", tenant_id=tenant_id, params=params)
|
||||
recipes = result.get('recipes', []) if result else []
|
||||
logger.info("Retrieved recipes by product IDs from recipes service",
|
||||
product_ids_count=len(product_ids),
|
||||
recipes_count=len(recipes),
|
||||
tenant_id=tenant_id)
|
||||
return recipes
|
||||
except Exception as e:
|
||||
logger.error("Error getting recipes by product IDs",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return []
|
||||
|
||||
async def get_all_recipes(self, tenant_id: str, is_active: Optional[bool] = True) -> Optional[List[Dict[str, Any]]]:
|
||||
"""Get all recipes for a tenant"""
|
||||
try:
|
||||
params = {}
|
||||
if is_active is not None:
|
||||
params["is_active"] = is_active
|
||||
|
||||
result = await self.get_paginated("recipes", tenant_id=tenant_id, params=params)
|
||||
logger.info("Retrieved all recipes from recipes service",
|
||||
recipes_count=len(result), tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error getting all recipes",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return []
|
||||
|
||||
# ================================================================
|
||||
# INGREDIENT REQUIREMENTS
|
||||
# ================================================================
|
||||
|
||||
async def get_recipe_requirements(self, tenant_id: str, recipe_ids: Optional[List[str]] = None) -> Optional[Dict[str, Any]]:
|
||||
"""Get ingredient requirements for recipes"""
|
||||
try:
|
||||
params = {}
|
||||
if recipe_ids:
|
||||
params["recipe_ids"] = ",".join(recipe_ids)
|
||||
|
||||
result = await self.get("recipes/requirements", tenant_id=tenant_id, params=params)
|
||||
if result:
|
||||
logger.info("Retrieved recipe requirements from recipes service",
|
||||
recipe_ids_count=len(recipe_ids) if recipe_ids else 0,
|
||||
tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error getting recipe requirements",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def get_ingredient_requirements(self, tenant_id: str, product_ids: Optional[List[str]] = None) -> Optional[Dict[str, Any]]:
|
||||
"""Get ingredient requirements for production planning"""
|
||||
try:
|
||||
params = {}
|
||||
if product_ids:
|
||||
params["product_ids"] = ",".join(product_ids)
|
||||
|
||||
result = await self.get("recipes/ingredient-requirements", tenant_id=tenant_id, params=params)
|
||||
if result:
|
||||
logger.info("Retrieved ingredient requirements from recipes service",
|
||||
product_ids_count=len(product_ids) if product_ids else 0,
|
||||
tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error getting ingredient requirements",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def calculate_ingredients_for_quantity(self, tenant_id: str, recipe_id: str, quantity: float) -> Optional[Dict[str, Any]]:
|
||||
"""Calculate ingredient quantities needed for a specific production quantity"""
|
||||
try:
|
||||
data = {
|
||||
"recipe_id": recipe_id,
|
||||
"quantity": quantity
|
||||
}
|
||||
result = await self.post("recipes/operations/calculate-ingredients", data=data, tenant_id=tenant_id)
|
||||
if result:
|
||||
logger.info("Calculated ingredient quantities from recipes service",
|
||||
recipe_id=recipe_id, quantity=quantity, tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error calculating ingredient quantities",
|
||||
error=str(e), recipe_id=recipe_id, tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def calculate_batch_ingredients(self, tenant_id: str, production_requests: List[Dict[str, Any]]) -> Optional[Dict[str, Any]]:
|
||||
"""Calculate total ingredient requirements for multiple production batches"""
|
||||
try:
|
||||
data = {"production_requests": production_requests}
|
||||
result = await self.post("recipes/operations/calculate-batch-ingredients", data=data, tenant_id=tenant_id)
|
||||
if result:
|
||||
logger.info("Calculated batch ingredient requirements from recipes service",
|
||||
batches_count=len(production_requests), tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error calculating batch ingredient requirements",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
# ================================================================
|
||||
# PRODUCTION SUPPORT
|
||||
# ================================================================
|
||||
|
||||
async def get_production_instructions(self, tenant_id: str, recipe_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get detailed production instructions for a recipe"""
|
||||
try:
|
||||
result = await self.get(f"recipes/{recipe_id}/production-instructions", tenant_id=tenant_id)
|
||||
if result:
|
||||
logger.info("Retrieved production instructions from recipes service",
|
||||
recipe_id=recipe_id, tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error getting production instructions",
|
||||
error=str(e), recipe_id=recipe_id, tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def get_recipe_yield_info(self, tenant_id: str, recipe_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get yield information for a recipe"""
|
||||
try:
|
||||
result = await self.get(f"recipes/{recipe_id}/yield", tenant_id=tenant_id)
|
||||
if result:
|
||||
logger.info("Retrieved recipe yield info from recipes service",
|
||||
recipe_id=recipe_id, tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error getting recipe yield info",
|
||||
error=str(e), recipe_id=recipe_id, tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def validate_recipe_feasibility(self, tenant_id: str, recipe_id: str, quantity: float) -> Optional[Dict[str, Any]]:
|
||||
"""Validate if a recipe can be produced in the requested quantity"""
|
||||
try:
|
||||
data = {
|
||||
"recipe_id": recipe_id,
|
||||
"quantity": quantity
|
||||
}
|
||||
result = await self.post("recipes/operations/validate-feasibility", data=data, tenant_id=tenant_id)
|
||||
if result:
|
||||
logger.info("Validated recipe feasibility from recipes service",
|
||||
recipe_id=recipe_id, quantity=quantity, tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error validating recipe feasibility",
|
||||
error=str(e), recipe_id=recipe_id, tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
# ================================================================
|
||||
# ANALYTICS AND OPTIMIZATION
|
||||
# ================================================================
|
||||
|
||||
async def get_recipe_cost_analysis(self, tenant_id: str, recipe_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get cost analysis for a recipe"""
|
||||
try:
|
||||
result = await self.get(f"recipes/{recipe_id}/cost-analysis", tenant_id=tenant_id)
|
||||
if result:
|
||||
logger.info("Retrieved recipe cost analysis from recipes service",
|
||||
recipe_id=recipe_id, tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error getting recipe cost analysis",
|
||||
error=str(e), recipe_id=recipe_id, tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def optimize_production_batch(self, tenant_id: str, requirements: List[Dict[str, Any]]) -> Optional[Dict[str, Any]]:
|
||||
"""Optimize production batch to minimize waste and cost"""
|
||||
try:
|
||||
data = {"requirements": requirements}
|
||||
result = await self.post("recipes/operations/optimize-batch", data=data, tenant_id=tenant_id)
|
||||
if result:
|
||||
logger.info("Optimized production batch from recipes service",
|
||||
requirements_count=len(requirements), tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error optimizing production batch",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
# ================================================================
|
||||
# DASHBOARD AND ANALYTICS
|
||||
# ================================================================
|
||||
|
||||
async def get_dashboard_summary(self, tenant_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get recipes dashboard summary data"""
|
||||
try:
|
||||
result = await self.get("recipes/dashboard/summary", tenant_id=tenant_id)
|
||||
if result:
|
||||
logger.info("Retrieved recipes dashboard summary",
|
||||
tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error getting recipes dashboard summary",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def get_popular_recipes(self, tenant_id: str, period: str = "last_30_days") -> Optional[List[Dict[str, Any]]]:
|
||||
"""Get most popular recipes based on production frequency"""
|
||||
try:
|
||||
params = {"period": period}
|
||||
result = await self.get("recipes/analytics/popular-recipes", tenant_id=tenant_id, params=params)
|
||||
recipes = result.get('recipes', []) if result else []
|
||||
logger.info("Retrieved popular recipes from recipes service",
|
||||
period=period, recipes_count=len(recipes), tenant_id=tenant_id)
|
||||
return recipes
|
||||
except Exception as e:
|
||||
logger.error("Error getting popular recipes",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return []
|
||||
|
||||
# ================================================================
|
||||
# COUNT AND STATISTICS
|
||||
# ================================================================
|
||||
|
||||
async def count_recipes(self, tenant_id: str) -> int:
|
||||
"""
|
||||
Get the count of recipes for a tenant
|
||||
Used for subscription limit tracking
|
||||
|
||||
Returns:
|
||||
int: Number of recipes for the tenant
|
||||
"""
|
||||
try:
|
||||
result = await self.get("recipes/count", tenant_id=tenant_id)
|
||||
count = result.get('count', 0) if result else 0
|
||||
logger.info("Retrieved recipe count from recipes service",
|
||||
count=count, tenant_id=tenant_id)
|
||||
return count
|
||||
except Exception as e:
|
||||
logger.error("Error getting recipe count",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return 0
|
||||
|
||||
# ================================================================
|
||||
# UTILITY METHODS
|
||||
# ================================================================
|
||||
|
||||
async def health_check(self) -> bool:
|
||||
"""Check if recipes service is healthy"""
|
||||
try:
|
||||
result = await self.get("../health") # Health endpoint is not tenant-scoped
|
||||
return result is not None
|
||||
except Exception as e:
|
||||
logger.error("Recipes service health check failed", error=str(e))
|
||||
return False
|
||||
|
||||
|
||||
# Factory function for dependency injection
|
||||
def create_recipes_client(config: BaseServiceSettings, service_name: str = "unknown") -> RecipesServiceClient:
|
||||
"""Create recipes service client instance"""
|
||||
return RecipesServiceClient(config, calling_service_name=service_name)
|
||||
344
shared/clients/sales_client.py
Executable file
344
shared/clients/sales_client.py
Executable file
@@ -0,0 +1,344 @@
|
||||
# shared/clients/sales_client.py
|
||||
"""
|
||||
Sales Service Client
|
||||
Handles all API calls to the sales service
|
||||
"""
|
||||
|
||||
import httpx
|
||||
import structlog
|
||||
from datetime import date
|
||||
from typing import Dict, Any, Optional, List, Union
|
||||
from .base_service_client import BaseServiceClient
|
||||
from shared.config.base import BaseServiceSettings
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class SalesServiceClient(BaseServiceClient):
|
||||
"""Client for communicating with the sales service"""
|
||||
|
||||
def __init__(self, config: BaseServiceSettings, calling_service_name: str = "unknown"):
|
||||
super().__init__(calling_service_name, config)
|
||||
self.service_url = config.SALES_SERVICE_URL
|
||||
|
||||
def get_service_base_path(self) -> str:
|
||||
return "/api/v1"
|
||||
|
||||
# ================================================================
|
||||
# SALES DATA (with advanced pagination support)
|
||||
# ================================================================
|
||||
|
||||
async def get_sales_data(
|
||||
self,
|
||||
tenant_id: str,
|
||||
start_date: Optional[str] = None,
|
||||
end_date: Optional[str] = None,
|
||||
product_id: Optional[str] = None,
|
||||
aggregation: str = "daily"
|
||||
) -> Optional[List[Dict[str, Any]]]:
|
||||
"""Get sales data for a date range"""
|
||||
params = {"aggregation": aggregation}
|
||||
if start_date:
|
||||
params["start_date"] = start_date
|
||||
if end_date:
|
||||
params["end_date"] = end_date
|
||||
if product_id:
|
||||
params["product_id"] = product_id
|
||||
|
||||
result = await self.get("sales/sales", tenant_id=tenant_id, params=params)
|
||||
|
||||
# Handle both list and dict responses
|
||||
if result is None:
|
||||
return None
|
||||
elif isinstance(result, list):
|
||||
return result
|
||||
elif isinstance(result, dict):
|
||||
return result.get("sales", [])
|
||||
else:
|
||||
return None
|
||||
|
||||
async def get_all_sales_data(
|
||||
self,
|
||||
tenant_id: str,
|
||||
start_date: Optional[str] = None,
|
||||
end_date: Optional[str] = None,
|
||||
product_id: Optional[str] = None,
|
||||
aggregation: str = "daily",
|
||||
page_size: int = 1000,
|
||||
max_pages: int = 100
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get ALL sales data using pagination (equivalent to original fetch_sales_data)
|
||||
Retrieves all records without pagination limits
|
||||
"""
|
||||
params = {"aggregation": aggregation}
|
||||
if start_date:
|
||||
params["start_date"] = start_date
|
||||
if end_date:
|
||||
params["end_date"] = end_date
|
||||
if product_id:
|
||||
params["product_id"] = product_id
|
||||
|
||||
# Use the inherited paginated request method
|
||||
try:
|
||||
all_records = await self.get_paginated(
|
||||
"sales/sales",
|
||||
tenant_id=tenant_id,
|
||||
params=params,
|
||||
page_size=page_size,
|
||||
max_pages=max_pages,
|
||||
timeout=2000.0
|
||||
)
|
||||
|
||||
logger.info(f"Successfully fetched {len(all_records)} total sales records via sales service",
|
||||
tenant_id=tenant_id)
|
||||
return all_records
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to fetch paginated sales data: {e}")
|
||||
return []
|
||||
|
||||
async def upload_sales_data(
|
||||
self,
|
||||
tenant_id: str,
|
||||
sales_data: List[Dict[str, Any]]
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""Upload sales data"""
|
||||
data = {"sales": sales_data}
|
||||
return await self.post("sales/sales", data=data, tenant_id=tenant_id)
|
||||
|
||||
# ================================================================
|
||||
# PRODUCTS
|
||||
# ================================================================
|
||||
|
||||
async def get_products(self, tenant_id: str) -> Optional[List[Dict[str, Any]]]:
|
||||
"""Get all products for a tenant"""
|
||||
result = await self.get("sales/products", tenant_id=tenant_id)
|
||||
return result.get("products", []) if result else None
|
||||
|
||||
async def get_product(self, tenant_id: str, product_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get a specific product"""
|
||||
return await self.get(f"sales/products/{product_id}", tenant_id=tenant_id)
|
||||
|
||||
async def create_product(
|
||||
self,
|
||||
tenant_id: str,
|
||||
name: str,
|
||||
category: str,
|
||||
price: float,
|
||||
**kwargs
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""Create a new product"""
|
||||
data = {
|
||||
"name": name,
|
||||
"category": category,
|
||||
"price": price,
|
||||
**kwargs
|
||||
}
|
||||
return await self.post("sales/products", data=data, tenant_id=tenant_id)
|
||||
|
||||
async def update_product(
|
||||
self,
|
||||
tenant_id: str,
|
||||
product_id: str,
|
||||
**updates
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""Update a product"""
|
||||
return await self.put(f"sales/products/{product_id}", data=updates, tenant_id=tenant_id)
|
||||
|
||||
async def create_sales_record(
|
||||
self,
|
||||
tenant_id: str,
|
||||
sales_data: Dict[str, Any]
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""Create a new sales record
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
sales_data: Sales record data including:
|
||||
- inventory_product_id: Optional UUID for inventory tracking
|
||||
- product_name: Product name
|
||||
- product_category: Product category
|
||||
- quantity_sold: Quantity sold
|
||||
- unit_price: Unit price
|
||||
- total_amount: Total amount
|
||||
- sale_date: Sale date (YYYY-MM-DD)
|
||||
- sales_channel: Sales channel (retail, wholesale, online, pos, etc.)
|
||||
- source: Data source (manual, pos_sync, import, etc.)
|
||||
- payment_method: Payment method
|
||||
- notes: Optional notes
|
||||
|
||||
Returns:
|
||||
Created sales record or None if failed
|
||||
"""
|
||||
try:
|
||||
result = await self.post("sales/sales", data=sales_data, tenant_id=tenant_id)
|
||||
if result:
|
||||
logger.info("Created sales record via client",
|
||||
tenant_id=tenant_id,
|
||||
product=sales_data.get("product_name"))
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Failed to create sales record",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def get_sales_summary(
|
||||
self,
|
||||
tenant_id: str,
|
||||
start_date: date,
|
||||
end_date: date
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Get sales summary/analytics for a tenant.
|
||||
|
||||
This method calls the sales analytics summary endpoint which provides
|
||||
aggregated sales metrics over a date range.
|
||||
|
||||
Args:
|
||||
tenant_id: The tenant UUID
|
||||
start_date: Start date for summary range
|
||||
end_date: End date for summary range
|
||||
|
||||
Returns:
|
||||
Sales summary data including metrics like total sales, revenue, etc.
|
||||
"""
|
||||
params = {
|
||||
"start_date": start_date.isoformat(),
|
||||
"end_date": end_date.isoformat()
|
||||
}
|
||||
|
||||
return await self.get(
|
||||
"sales/analytics/summary",
|
||||
tenant_id=tenant_id,
|
||||
params=params
|
||||
)
|
||||
|
||||
async def get_sales_summary_batch(
|
||||
self,
|
||||
tenant_ids: List[str],
|
||||
start_date: date,
|
||||
end_date: date
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Get sales summaries for multiple tenants in a single request.
|
||||
|
||||
Phase 2 optimization: Eliminates N+1 query patterns for enterprise dashboards.
|
||||
|
||||
Args:
|
||||
tenant_ids: List of tenant IDs to fetch
|
||||
start_date: Start date for summary range
|
||||
end_date: End date for summary range
|
||||
|
||||
Returns:
|
||||
Dict mapping tenant_id -> sales summary
|
||||
"""
|
||||
try:
|
||||
if not tenant_ids:
|
||||
return {}
|
||||
|
||||
if len(tenant_ids) > 100:
|
||||
logger.warning("Batch request exceeds max tenant limit", requested=len(tenant_ids))
|
||||
tenant_ids = tenant_ids[:100]
|
||||
|
||||
data = {
|
||||
"tenant_ids": tenant_ids,
|
||||
"start_date": start_date.isoformat(),
|
||||
"end_date": end_date.isoformat()
|
||||
}
|
||||
|
||||
result = await self.post(
|
||||
"sales/batch/sales-summary",
|
||||
data=data,
|
||||
tenant_id=tenant_ids[0] # Use first tenant for auth context
|
||||
)
|
||||
|
||||
summaries = result if isinstance(result, dict) else {}
|
||||
|
||||
logger.info(
|
||||
"Batch retrieved sales summaries",
|
||||
requested=len(tenant_ids),
|
||||
found=len(summaries),
|
||||
start_date=start_date.isoformat(),
|
||||
end_date=end_date.isoformat()
|
||||
)
|
||||
|
||||
return summaries
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error batch fetching sales summaries",
|
||||
error=str(e),
|
||||
tenant_count=len(tenant_ids)
|
||||
)
|
||||
return {}
|
||||
|
||||
async def get_product_demand_patterns(
|
||||
self,
|
||||
tenant_id: str,
|
||||
product_id: str,
|
||||
start_date: Optional[date] = None,
|
||||
end_date: Optional[date] = None,
|
||||
min_history_days: int = 90
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Get demand pattern analysis for a specific product.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant identifier
|
||||
product_id: Product identifier (inventory_product_id)
|
||||
start_date: Start date for analysis
|
||||
end_date: End date for analysis
|
||||
min_history_days: Minimum days of history required
|
||||
|
||||
Returns:
|
||||
Demand pattern analysis including trends, seasonality, and statistics
|
||||
"""
|
||||
try:
|
||||
params = {"min_history_days": min_history_days}
|
||||
if start_date:
|
||||
params["start_date"] = start_date.isoformat()
|
||||
if end_date:
|
||||
params["end_date"] = end_date.isoformat()
|
||||
|
||||
result = await self.get(
|
||||
f"sales/analytics/products/{product_id}/demand-patterns",
|
||||
tenant_id=tenant_id,
|
||||
params=params
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Retrieved product demand patterns",
|
||||
tenant_id=tenant_id,
|
||||
product_id=product_id
|
||||
)
|
||||
return result if result else {}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to get product demand patterns",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id,
|
||||
product_id=product_id
|
||||
)
|
||||
return {}
|
||||
|
||||
# ================================================================
|
||||
# DATA IMPORT
|
||||
# ================================================================
|
||||
|
||||
async def import_sales_data(
|
||||
self,
|
||||
tenant_id: str,
|
||||
file_content: str,
|
||||
file_format: str,
|
||||
filename: Optional[str] = None
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""Import sales data from CSV/Excel/JSON"""
|
||||
data = {
|
||||
"content": file_content,
|
||||
"format": file_format,
|
||||
"filename": filename
|
||||
}
|
||||
return await self.post("sales/operations/import", data=data, tenant_id=tenant_id)
|
||||
1754
shared/clients/stripe_client.py
Executable file
1754
shared/clients/stripe_client.py
Executable file
File diff suppressed because it is too large
Load Diff
158
shared/clients/subscription_client.py
Executable file
158
shared/clients/subscription_client.py
Executable file
@@ -0,0 +1,158 @@
|
||||
"""
|
||||
Subscription Service Client
|
||||
Client for interacting with subscription service functionality
|
||||
"""
|
||||
|
||||
import structlog
|
||||
from typing import Dict, Any, Optional
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from fastapi import Depends
|
||||
|
||||
from shared.database.base import create_database_manager
|
||||
from app.repositories.subscription_repository import SubscriptionRepository
|
||||
from app.models.tenants import Subscription, Tenant
|
||||
from app.repositories.tenant_repository import TenantRepository
|
||||
from shared.subscription.plans import SubscriptionTier
|
||||
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class SubscriptionServiceClient:
|
||||
"""Client for subscription service operations"""
|
||||
|
||||
def __init__(self, database_manager=None):
|
||||
self.database_manager = database_manager or create_database_manager()
|
||||
|
||||
async def get_subscription(self, tenant_id: str) -> Dict[str, Any]:
|
||||
"""Get subscription details for a tenant"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
subscription_repo = SubscriptionRepository(Subscription, session)
|
||||
subscription = await subscription_repo.get_active_subscription(tenant_id)
|
||||
|
||||
if not subscription:
|
||||
# Return default starter subscription if none found
|
||||
return {
|
||||
'id': None,
|
||||
'tenant_id': tenant_id,
|
||||
'plan': SubscriptionTier.STARTER.value,
|
||||
'status': 'active',
|
||||
'monthly_price': 0,
|
||||
'max_users': 5,
|
||||
'max_locations': 1,
|
||||
'max_products': 50,
|
||||
'features': {}
|
||||
}
|
||||
|
||||
return {
|
||||
'id': str(subscription.id) if subscription.id else None,
|
||||
'tenant_id': tenant_id,
|
||||
'plan': subscription.plan,
|
||||
'status': subscription.status,
|
||||
'monthly_price': subscription.monthly_price,
|
||||
'max_users': subscription.max_users,
|
||||
'max_locations': subscription.max_locations,
|
||||
'max_products': subscription.max_products,
|
||||
'features': subscription.features or {}
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get subscription, tenant_id={tenant_id}, error={str(e)}")
|
||||
raise
|
||||
|
||||
async def update_subscription_plan(self, tenant_id: str, new_plan: str) -> Dict[str, Any]:
|
||||
"""Update subscription plan for a tenant"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
subscription_repo = SubscriptionRepository(Subscription, session)
|
||||
|
||||
# Get existing subscription
|
||||
existing_subscription = await subscription_repo.get_active_subscription(tenant_id)
|
||||
|
||||
if existing_subscription:
|
||||
# Update the existing subscription
|
||||
updated_subscription = await subscription_repo.update_subscription(
|
||||
existing_subscription.id,
|
||||
{'plan': new_plan}
|
||||
)
|
||||
else:
|
||||
# Create a new subscription if none exists
|
||||
updated_subscription = await subscription_repo.create_subscription({
|
||||
'tenant_id': tenant_id,
|
||||
'plan': new_plan,
|
||||
'status': 'active',
|
||||
'created_at': None # Let the database set this
|
||||
})
|
||||
|
||||
await session.commit()
|
||||
|
||||
return {
|
||||
'id': str(updated_subscription.id),
|
||||
'tenant_id': tenant_id,
|
||||
'plan': updated_subscription.plan,
|
||||
'status': updated_subscription.status
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to update subscription plan, tenant_id={tenant_id}, new_plan={new_plan}, error={str(e)}")
|
||||
raise
|
||||
|
||||
async def create_child_subscription(self, child_tenant_id: str, parent_tenant_id: str) -> Dict[str, Any]:
|
||||
"""Create a child subscription inheriting from parent"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
subscription_repo = SubscriptionRepository(Subscription, session)
|
||||
tenant_repo = TenantRepository(Tenant, session)
|
||||
|
||||
# Get parent subscription to inherit plan
|
||||
parent_subscription = await subscription_repo.get_active_subscription(parent_tenant_id)
|
||||
|
||||
if not parent_subscription:
|
||||
# If parent has no subscription, create child with starter plan
|
||||
plan = SubscriptionTier.STARTER.value
|
||||
else:
|
||||
plan = parent_subscription.plan
|
||||
|
||||
# Create subscription for child tenant
|
||||
child_subscription = await subscription_repo.create_subscription({
|
||||
'tenant_id': child_tenant_id,
|
||||
'plan': plan,
|
||||
'status': 'active',
|
||||
'created_at': None # Let the database set this
|
||||
})
|
||||
|
||||
await session.commit()
|
||||
|
||||
# Update the child tenant's subscription tier
|
||||
await tenant_repo.update_tenant(child_tenant_id, {
|
||||
'subscription_tier': plan
|
||||
})
|
||||
|
||||
await session.commit()
|
||||
|
||||
return {
|
||||
'id': str(child_subscription.id),
|
||||
'tenant_id': child_tenant_id,
|
||||
'plan': child_subscription.plan,
|
||||
'status': child_subscription.status
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error("Failed to create child subscription",
|
||||
child_tenant_id=child_tenant_id,
|
||||
parent_tenant_id=parent_tenant_id,
|
||||
error=str(e))
|
||||
raise
|
||||
|
||||
async def get_subscription_by_tenant(self, tenant_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get subscription by tenant ID"""
|
||||
return await self.get_subscription(tenant_id)
|
||||
|
||||
async def get_tenant_subscription_tier(self, tenant_id: str) -> str:
|
||||
"""Get the subscription tier for a tenant"""
|
||||
subscription = await self.get_subscription(tenant_id)
|
||||
return subscription.get('plan', SubscriptionTier.STARTER.value)
|
||||
|
||||
|
||||
# Dependency function for FastAPI
|
||||
async def get_subscription_service_client() -> SubscriptionServiceClient:
|
||||
"""FastAPI dependency for subscription service client"""
|
||||
return SubscriptionServiceClient()
|
||||
296
shared/clients/suppliers_client.py
Executable file
296
shared/clients/suppliers_client.py
Executable file
@@ -0,0 +1,296 @@
|
||||
# shared/clients/suppliers_client.py
|
||||
"""
|
||||
Suppliers Service Client for Inter-Service Communication
|
||||
Provides access to supplier data and performance metrics from other services
|
||||
"""
|
||||
|
||||
import structlog
|
||||
from typing import Dict, Any, Optional, List
|
||||
from shared.clients.base_service_client import BaseServiceClient
|
||||
from shared.config.base import BaseServiceSettings
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class SuppliersServiceClient(BaseServiceClient):
|
||||
"""Client for communicating with the Suppliers Service"""
|
||||
|
||||
def __init__(self, config: BaseServiceSettings, calling_service_name: str = "unknown"):
|
||||
super().__init__(calling_service_name, config)
|
||||
|
||||
def get_service_base_path(self) -> str:
|
||||
return "/api/v1"
|
||||
|
||||
# ================================================================
|
||||
# SUPPLIER MANAGEMENT
|
||||
# ================================================================
|
||||
|
||||
async def get_supplier_by_id(self, tenant_id: str, supplier_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get supplier details by ID"""
|
||||
try:
|
||||
result = await self.get(f"suppliers/{supplier_id}", tenant_id=tenant_id)
|
||||
if result:
|
||||
logger.info("Retrieved supplier details from suppliers service",
|
||||
supplier_id=supplier_id, tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error getting supplier details",
|
||||
error=str(e), supplier_id=supplier_id, tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def get_all_suppliers(self, tenant_id: str, is_active: Optional[bool] = True) -> Optional[List[Dict[str, Any]]]:
|
||||
"""Get all suppliers for a tenant"""
|
||||
try:
|
||||
params = {}
|
||||
if is_active is not None:
|
||||
params["is_active"] = is_active
|
||||
|
||||
result = await self.get_paginated("suppliers", tenant_id=tenant_id, params=params)
|
||||
logger.info("Retrieved all suppliers from suppliers service",
|
||||
suppliers_count=len(result) if result else 0, tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error getting all suppliers",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return []
|
||||
|
||||
async def search_suppliers(self, tenant_id: str, search: Optional[str] = None, category: Optional[str] = None) -> Optional[List[Dict[str, Any]]]:
|
||||
"""Search suppliers with filters"""
|
||||
try:
|
||||
params = {}
|
||||
if search:
|
||||
params["search_term"] = search
|
||||
if category:
|
||||
params["supplier_type"] = category
|
||||
|
||||
result = await self.get("suppliers", tenant_id=tenant_id, params=params)
|
||||
suppliers = result if result else []
|
||||
logger.info("Searched suppliers from suppliers service",
|
||||
search_term=search, suppliers_count=len(suppliers), tenant_id=tenant_id)
|
||||
return suppliers
|
||||
except Exception as e:
|
||||
logger.error("Error searching suppliers",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return []
|
||||
|
||||
async def get_suppliers_batch(self, tenant_id: str, supplier_ids: List[str]) -> Optional[List[Dict[str, Any]]]:
|
||||
"""
|
||||
Get multiple suppliers in a single request for performance optimization.
|
||||
|
||||
This method eliminates N+1 query patterns when fetching supplier data
|
||||
for multiple purchase orders or other entities.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
supplier_ids: List of supplier IDs to fetch
|
||||
|
||||
Returns:
|
||||
List of supplier dictionaries or empty list if error
|
||||
"""
|
||||
try:
|
||||
if not supplier_ids:
|
||||
return []
|
||||
|
||||
# Join IDs as comma-separated string
|
||||
ids_param = ",".join(supplier_ids)
|
||||
params = {"ids": ids_param}
|
||||
|
||||
result = await self.get("suppliers/batch", tenant_id=tenant_id, params=params)
|
||||
suppliers = result if result else []
|
||||
|
||||
logger.info("Batch retrieved suppliers from suppliers service",
|
||||
requested_count=len(supplier_ids),
|
||||
found_count=len(suppliers),
|
||||
tenant_id=tenant_id)
|
||||
return suppliers
|
||||
except Exception as e:
|
||||
logger.error("Error batch retrieving suppliers",
|
||||
error=str(e),
|
||||
requested_count=len(supplier_ids),
|
||||
tenant_id=tenant_id)
|
||||
return []
|
||||
|
||||
# ================================================================
|
||||
# SUPPLIER RECOMMENDATIONS
|
||||
# ================================================================
|
||||
|
||||
async def get_supplier_recommendations(self, tenant_id: str, ingredient_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get supplier recommendations for procurement"""
|
||||
try:
|
||||
params = {"ingredient_id": ingredient_id}
|
||||
result = await self.get("suppliers/recommendations", tenant_id=tenant_id, params=params)
|
||||
if result:
|
||||
logger.info("Retrieved supplier recommendations from suppliers service",
|
||||
ingredient_id=ingredient_id, tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error getting supplier recommendations",
|
||||
error=str(e), ingredient_id=ingredient_id, tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def get_best_supplier_for_ingredient(self, tenant_id: str, ingredient_id: str, criteria: Optional[Dict[str, Any]] = None) -> Optional[Dict[str, Any]]:
|
||||
"""Get best supplier for a specific ingredient based on criteria"""
|
||||
try:
|
||||
data = {
|
||||
"ingredient_id": ingredient_id,
|
||||
"criteria": criteria or {}
|
||||
}
|
||||
result = await self.post("suppliers/operations/find-best-supplier", data=data, tenant_id=tenant_id)
|
||||
if result:
|
||||
logger.info("Retrieved best supplier from suppliers service",
|
||||
ingredient_id=ingredient_id, tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error getting best supplier for ingredient",
|
||||
error=str(e), ingredient_id=ingredient_id, tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
# ================================================================
|
||||
# PERFORMANCE TRACKING
|
||||
# ================================================================
|
||||
|
||||
async def get_supplier_performance(self, tenant_id: str, supplier_id: str, period: str = "last_30_days") -> Optional[Dict[str, Any]]:
|
||||
"""Get supplier performance metrics"""
|
||||
try:
|
||||
params = {"period": period}
|
||||
result = await self.get(f"suppliers/analytics/performance/{supplier_id}", tenant_id=tenant_id, params=params)
|
||||
if result:
|
||||
logger.info("Retrieved supplier performance from suppliers service",
|
||||
supplier_id=supplier_id, period=period, tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error getting supplier performance",
|
||||
error=str(e), supplier_id=supplier_id, tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def get_performance_alerts(self, tenant_id: str) -> Optional[List[Dict[str, Any]]]:
|
||||
"""Get supplier performance alerts"""
|
||||
try:
|
||||
result = await self.get("suppliers/alerts/performance", tenant_id=tenant_id)
|
||||
alerts = result.get('alerts', []) if result else []
|
||||
logger.info("Retrieved supplier performance alerts",
|
||||
alerts_count=len(alerts), tenant_id=tenant_id)
|
||||
return alerts
|
||||
except Exception as e:
|
||||
logger.error("Error getting supplier performance alerts",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return []
|
||||
|
||||
async def record_supplier_rating(self, tenant_id: str, supplier_id: str, rating_data: Dict[str, Any]) -> Optional[Dict[str, Any]]:
|
||||
"""Record a rating/review for a supplier"""
|
||||
try:
|
||||
result = await self.post(f"suppliers/{supplier_id}/rating", data=rating_data, tenant_id=tenant_id)
|
||||
if result:
|
||||
logger.info("Recorded supplier rating",
|
||||
supplier_id=supplier_id, tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error recording supplier rating",
|
||||
error=str(e), supplier_id=supplier_id, tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
# ================================================================
|
||||
# DASHBOARD AND ANALYTICS
|
||||
# ================================================================
|
||||
|
||||
async def get_dashboard_summary(self, tenant_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get suppliers dashboard summary data"""
|
||||
try:
|
||||
result = await self.get("suppliers/dashboard/summary", tenant_id=tenant_id)
|
||||
if result:
|
||||
logger.info("Retrieved suppliers dashboard summary",
|
||||
tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error getting suppliers dashboard summary",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def get_cost_analysis(self, tenant_id: str, start_date: str, end_date: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get cost analysis across suppliers"""
|
||||
try:
|
||||
params = {
|
||||
"start_date": start_date,
|
||||
"end_date": end_date
|
||||
}
|
||||
result = await self.get("suppliers/analytics/cost-analysis", tenant_id=tenant_id, params=params)
|
||||
if result:
|
||||
logger.info("Retrieved supplier cost analysis",
|
||||
start_date=start_date, end_date=end_date, tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error getting supplier cost analysis",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def get_supplier_reliability_metrics(self, tenant_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get supplier reliability and quality metrics"""
|
||||
try:
|
||||
result = await self.get("suppliers/analytics/reliability-metrics", tenant_id=tenant_id)
|
||||
if result:
|
||||
logger.info("Retrieved supplier reliability metrics",
|
||||
tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error getting supplier reliability metrics",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
# ================================================================
|
||||
# ALERTS AND NOTIFICATIONS
|
||||
# ================================================================
|
||||
|
||||
async def acknowledge_alert(self, tenant_id: str, alert_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Acknowledge a supplier-related alert"""
|
||||
try:
|
||||
result = await self.post(f"suppliers/alerts/{alert_id}/acknowledge", data={}, tenant_id=tenant_id)
|
||||
if result:
|
||||
logger.info("Acknowledged supplier alert",
|
||||
alert_id=alert_id, tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error acknowledging supplier alert",
|
||||
error=str(e), alert_id=alert_id, tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
# ================================================================
|
||||
# COUNT AND STATISTICS
|
||||
# ================================================================
|
||||
|
||||
async def count_suppliers(self, tenant_id: str) -> int:
|
||||
"""
|
||||
Get the count of suppliers for a tenant
|
||||
Used for subscription limit tracking
|
||||
|
||||
Returns:
|
||||
int: Number of suppliers for the tenant
|
||||
"""
|
||||
try:
|
||||
result = await self.get("suppliers/count", tenant_id=tenant_id)
|
||||
count = result.get('count', 0) if result else 0
|
||||
logger.info("Retrieved supplier count from suppliers service",
|
||||
count=count, tenant_id=tenant_id)
|
||||
return count
|
||||
except Exception as e:
|
||||
logger.error("Error getting supplier count",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return 0
|
||||
|
||||
# ================================================================
|
||||
# UTILITY METHODS
|
||||
# ================================================================
|
||||
|
||||
async def health_check(self) -> bool:
|
||||
"""Check if suppliers service is healthy"""
|
||||
try:
|
||||
result = await self.get("../health") # Health endpoint is not tenant-scoped
|
||||
return result is not None
|
||||
except Exception as e:
|
||||
logger.error("Suppliers service health check failed", error=str(e))
|
||||
return False
|
||||
|
||||
|
||||
# Factory function for dependency injection
|
||||
def create_suppliers_client(config: BaseServiceSettings, service_name: str = "unknown") -> SuppliersServiceClient:
|
||||
"""Create suppliers service client instance"""
|
||||
return SuppliersServiceClient(config, calling_service_name=service_name)
|
||||
798
shared/clients/tenant_client.py
Executable file
798
shared/clients/tenant_client.py
Executable file
@@ -0,0 +1,798 @@
|
||||
# shared/clients/tenant_client.py
|
||||
"""
|
||||
Tenant Service Client for Inter-Service Communication
|
||||
|
||||
This client provides a high-level API for interacting with the Tenant Service,
|
||||
which manages tenant metadata, settings, hierarchical relationships (parent-child),
|
||||
and multi-location support for enterprise bakery networks.
|
||||
|
||||
Key Capabilities:
|
||||
- Tenant Management: Get, create, update tenant records
|
||||
- Settings Management: Category-specific settings (procurement, inventory, production, etc.)
|
||||
- Enterprise Hierarchy: Parent-child tenant relationships for multi-location networks
|
||||
- Tenant Locations: Physical location management (central_production, retail_outlet)
|
||||
- Subscription Management: Subscription tier and quota validation
|
||||
- Multi-Tenancy: Tenant isolation and access control
|
||||
|
||||
URL Pattern Architecture (Redesigned):
|
||||
- Registration endpoints: /api/v1/registration/*
|
||||
- Tenant subscription endpoints: /api/v1/tenants/{tenant_id}/subscription/*
|
||||
- Setup intents: /api/v1/setup-intents/*
|
||||
- Payment customers: /api/v1/payment-customers/*
|
||||
|
||||
For more details, see services/tenant/README.md
|
||||
"""
|
||||
|
||||
import structlog
|
||||
from typing import Dict, Any, Optional, List
|
||||
from uuid import UUID
|
||||
from shared.clients.base_service_client import BaseServiceClient
|
||||
from shared.config.base import BaseServiceSettings
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class TenantServiceClient(BaseServiceClient):
|
||||
"""Client for communicating with the Tenant Service"""
|
||||
|
||||
def __init__(self, config: BaseServiceSettings):
|
||||
super().__init__("tenant", config)
|
||||
|
||||
def get_service_base_path(self) -> str:
|
||||
return "/api/v1"
|
||||
|
||||
# ================================================================
|
||||
# TENANT SETTINGS ENDPOINTS
|
||||
# ================================================================
|
||||
|
||||
async def get_settings(self, tenant_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get all settings for a tenant"""
|
||||
try:
|
||||
result = await self.get("settings", tenant_id=tenant_id)
|
||||
if result:
|
||||
logger.info("Retrieved all settings from tenant service",
|
||||
tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error getting all settings",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def get_category_settings(self, tenant_id: str, category: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get settings for a specific category"""
|
||||
try:
|
||||
result = await self.get(f"settings/{category}", tenant_id=tenant_id)
|
||||
if result:
|
||||
logger.info("Retrieved category settings from tenant service",
|
||||
tenant_id=tenant_id, category=category)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error getting category settings",
|
||||
error=str(e), tenant_id=tenant_id, category=category)
|
||||
return None
|
||||
|
||||
async def get_procurement_settings(self, tenant_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get procurement settings for a tenant"""
|
||||
result = await self.get_category_settings(tenant_id, "procurement")
|
||||
return result.get('settings', {}) if result else {}
|
||||
|
||||
async def get_inventory_settings(self, tenant_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get inventory settings for a tenant"""
|
||||
result = await self.get_category_settings(tenant_id, "inventory")
|
||||
return result.get('settings', {}) if result else {}
|
||||
|
||||
async def get_production_settings(self, tenant_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get production settings for a tenant"""
|
||||
result = await self.get_category_settings(tenant_id, "production")
|
||||
return result.get('settings', {}) if result else {}
|
||||
|
||||
async def get_supplier_settings(self, tenant_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get supplier settings for a tenant"""
|
||||
result = await self.get_category_settings(tenant_id, "supplier")
|
||||
return result.get('settings', {}) if result else {}
|
||||
|
||||
async def get_pos_settings(self, tenant_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get POS settings for a tenant"""
|
||||
result = await self.get_category_settings(tenant_id, "pos")
|
||||
return result.get('settings', {}) if result else {}
|
||||
|
||||
async def get_order_settings(self, tenant_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get order settings for a tenant"""
|
||||
result = await self.get_category_settings(tenant_id, "order")
|
||||
return result.get('settings', {}) if result else {}
|
||||
|
||||
async def get_notification_settings(self, tenant_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get notification settings for a tenant"""
|
||||
result = await self.get_category_settings(tenant_id, "notification")
|
||||
return result.get('settings', {}) if result else {}
|
||||
|
||||
async def update_settings(self, tenant_id: str, settings_data: Dict[str, Any]) -> Optional[Dict[str, Any]]:
|
||||
"""Update settings for a tenant"""
|
||||
try:
|
||||
result = await self.put("settings", data=settings_data, tenant_id=tenant_id)
|
||||
if result:
|
||||
logger.info("Updated tenant settings", tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error updating tenant settings",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def update_category_settings(
|
||||
self,
|
||||
tenant_id: str,
|
||||
category: str,
|
||||
settings_data: Dict[str, Any]
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""Update settings for a specific category"""
|
||||
try:
|
||||
result = await self.put(f"settings/{category}", data=settings_data, tenant_id=tenant_id)
|
||||
if result:
|
||||
logger.info("Updated category settings",
|
||||
tenant_id=tenant_id, category=category)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error updating category settings",
|
||||
error=str(e), tenant_id=tenant_id, category=category)
|
||||
return None
|
||||
|
||||
async def reset_category_settings(self, tenant_id: str, category: str) -> Optional[Dict[str, Any]]:
|
||||
"""Reset category settings to default values"""
|
||||
try:
|
||||
result = await self.post(f"settings/{category}/reset", data={}, tenant_id=tenant_id)
|
||||
if result:
|
||||
logger.info("Reset category settings to defaults",
|
||||
tenant_id=tenant_id, category=category)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error resetting category settings",
|
||||
error=str(e), tenant_id=tenant_id, category=category)
|
||||
return None
|
||||
|
||||
# ================================================================
|
||||
# TENANT MANAGEMENT
|
||||
# ================================================================
|
||||
|
||||
async def get_tenant(self, tenant_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get tenant details"""
|
||||
try:
|
||||
result = await self._make_request("GET", f"tenants/{tenant_id}")
|
||||
if result:
|
||||
logger.info("Retrieved tenant details", tenant_id=tenant_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error getting tenant details",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def get_active_tenants(self, skip: int = 0, limit: int = 100) -> Optional[list]:
|
||||
"""Get all active tenants"""
|
||||
try:
|
||||
result = await self._make_request(
|
||||
"GET",
|
||||
f"tenants?skip={skip}&limit={limit}"
|
||||
)
|
||||
if result:
|
||||
logger.info("Retrieved active tenants from tenant service",
|
||||
count=len(result) if isinstance(result, list) else 0)
|
||||
return result if result else []
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting active tenants: {str(e)}")
|
||||
return []
|
||||
|
||||
# ================================================================
|
||||
# ENTERPRISE TIER METHODS
|
||||
# ================================================================
|
||||
|
||||
async def get_child_tenants(self, parent_tenant_id: str) -> Optional[List[Dict[str, Any]]]:
|
||||
"""Get all child tenants for a parent tenant"""
|
||||
try:
|
||||
result = await self._make_request("GET", f"tenants/{parent_tenant_id}/children")
|
||||
if result:
|
||||
logger.info("Retrieved child tenants",
|
||||
parent_tenant_id=parent_tenant_id,
|
||||
child_count=len(result) if isinstance(result, list) else 0)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error getting child tenants",
|
||||
error=str(e), parent_tenant_id=parent_tenant_id)
|
||||
return None
|
||||
|
||||
async def get_tenant_children_count(self, tenant_id: str) -> int:
|
||||
"""Get count of child tenants for a parent tenant"""
|
||||
try:
|
||||
children = await self.get_child_tenants(tenant_id)
|
||||
return len(children) if children else 0
|
||||
except Exception as e:
|
||||
logger.error("Error getting child tenant count",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return 0
|
||||
|
||||
async def get_parent_tenant(self, child_tenant_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get parent tenant for a child tenant"""
|
||||
try:
|
||||
result = await self.get(f"tenants/{child_tenant_id}/parent", tenant_id=child_tenant_id)
|
||||
if result:
|
||||
logger.info("Retrieved parent tenant",
|
||||
child_tenant_id=child_tenant_id,
|
||||
parent_tenant_id=result.get('id'))
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error getting parent tenant",
|
||||
error=str(e), child_tenant_id=child_tenant_id)
|
||||
return None
|
||||
|
||||
async def get_tenant_hierarchy(self, tenant_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get complete tenant hierarchy information"""
|
||||
try:
|
||||
result = await self.get("hierarchy", tenant_id=tenant_id)
|
||||
if result:
|
||||
logger.info("Retrieved tenant hierarchy",
|
||||
tenant_id=tenant_id,
|
||||
hierarchy_type=result.get('tenant_type'))
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error getting tenant hierarchy",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def get_tenant_locations(self, tenant_id: str) -> Optional[List[Dict[str, Any]]]:
|
||||
"""Get all locations for a tenant"""
|
||||
try:
|
||||
result = await self.get("locations", tenant_id=tenant_id)
|
||||
if result:
|
||||
logger.info("Retrieved tenant locations",
|
||||
tenant_id=tenant_id,
|
||||
location_count=len(result) if isinstance(result, list) else 0)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error getting tenant locations",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
# ================================================================
|
||||
# UTILITY METHODS
|
||||
# ================================================================
|
||||
|
||||
async def health_check(self) -> bool:
|
||||
"""Check if tenant service is healthy"""
|
||||
try:
|
||||
result = await self.get("../health")
|
||||
return result is not None
|
||||
except Exception as e:
|
||||
logger.error(f"Tenant service health check failed: {str(e)}")
|
||||
return False
|
||||
|
||||
# ================================================================
|
||||
# SUBSCRIPTION STATUS ENDPOINTS (NEW URL PATTERNS)
|
||||
# ================================================================
|
||||
|
||||
async def get_subscription_status(self, tenant_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get subscription status for a tenant"""
|
||||
try:
|
||||
result = await self._make_request("GET", f"tenants/{tenant_id}/subscription/status")
|
||||
if result:
|
||||
logger.info("Retrieved subscription status from tenant service",
|
||||
tenant_id=tenant_id, status=result.get('status'))
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error getting subscription status",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def get_subscription_details(self, tenant_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get detailed subscription information for a tenant"""
|
||||
try:
|
||||
result = await self._make_request("GET", f"tenants/{tenant_id}/subscription/details")
|
||||
if result:
|
||||
logger.info("Retrieved subscription details from tenant service",
|
||||
tenant_id=tenant_id, plan=result.get('plan'))
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error getting subscription details",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def get_subscription_tier(self, tenant_id: str) -> Optional[str]:
|
||||
"""Get subscription tier for a tenant (cached endpoint)"""
|
||||
try:
|
||||
result = await self._make_request("GET", f"tenants/{tenant_id}/subscription/tier")
|
||||
return result.get('tier') if result else None
|
||||
except Exception as e:
|
||||
logger.error("Error getting subscription tier",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def get_subscription_limits(self, tenant_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get subscription limits for a tenant"""
|
||||
try:
|
||||
result = await self._make_request("GET", f"tenants/{tenant_id}/subscription/limits")
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error getting subscription limits",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def get_usage_summary(self, tenant_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get usage summary vs limits for a tenant"""
|
||||
try:
|
||||
result = await self._make_request("GET", f"tenants/{tenant_id}/subscription/usage")
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error getting usage summary",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def has_feature(self, tenant_id: str, feature: str) -> bool:
|
||||
"""Check if tenant has access to a specific feature"""
|
||||
try:
|
||||
result = await self._make_request("GET", f"tenants/{tenant_id}/subscription/features/{feature}")
|
||||
return result.get('has_feature', False) if result else False
|
||||
except Exception as e:
|
||||
logger.error("Error checking feature access",
|
||||
error=str(e), tenant_id=tenant_id, feature=feature)
|
||||
return False
|
||||
|
||||
# ================================================================
|
||||
# QUOTA CHECK ENDPOINTS (NEW URL PATTERNS)
|
||||
# ================================================================
|
||||
|
||||
async def can_add_location(self, tenant_id: str) -> Dict[str, Any]:
|
||||
"""Check if tenant can add another location"""
|
||||
try:
|
||||
result = await self._make_request("GET", f"tenants/{tenant_id}/subscription/limits/locations")
|
||||
return result or {"can_add": False, "reason": "Service unavailable"}
|
||||
except Exception as e:
|
||||
logger.error("Error checking location limits",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return {"can_add": False, "reason": str(e)}
|
||||
|
||||
async def can_add_product(self, tenant_id: str) -> Dict[str, Any]:
|
||||
"""Check if tenant can add another product"""
|
||||
try:
|
||||
result = await self._make_request("GET", f"tenants/{tenant_id}/subscription/limits/products")
|
||||
return result or {"can_add": False, "reason": "Service unavailable"}
|
||||
except Exception as e:
|
||||
logger.error("Error checking product limits",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return {"can_add": False, "reason": str(e)}
|
||||
|
||||
async def can_add_user(self, tenant_id: str) -> Dict[str, Any]:
|
||||
"""Check if tenant can add another user"""
|
||||
try:
|
||||
result = await self._make_request("GET", f"tenants/{tenant_id}/subscription/limits/users")
|
||||
return result or {"can_add": False, "reason": "Service unavailable"}
|
||||
except Exception as e:
|
||||
logger.error("Error checking user limits",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return {"can_add": False, "reason": str(e)}
|
||||
|
||||
async def can_add_recipe(self, tenant_id: str) -> Dict[str, Any]:
|
||||
"""Check if tenant can add another recipe"""
|
||||
try:
|
||||
result = await self._make_request("GET", f"tenants/{tenant_id}/subscription/limits/recipes")
|
||||
return result or {"can_add": False, "reason": "Service unavailable"}
|
||||
except Exception as e:
|
||||
logger.error("Error checking recipe limits",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return {"can_add": False, "reason": str(e)}
|
||||
|
||||
async def can_add_supplier(self, tenant_id: str) -> Dict[str, Any]:
|
||||
"""Check if tenant can add another supplier"""
|
||||
try:
|
||||
result = await self._make_request("GET", f"tenants/{tenant_id}/subscription/limits/suppliers")
|
||||
return result or {"can_add": False, "reason": "Service unavailable"}
|
||||
except Exception as e:
|
||||
logger.error("Error checking supplier limits",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return {"can_add": False, "reason": str(e)}
|
||||
|
||||
# ================================================================
|
||||
# SUBSCRIPTION MANAGEMENT ENDPOINTS (NEW URL PATTERNS)
|
||||
# ================================================================
|
||||
|
||||
async def cancel_subscription(self, tenant_id: str, reason: str = "") -> Dict[str, Any]:
|
||||
"""Cancel a subscription"""
|
||||
try:
|
||||
result = await self._make_request(
|
||||
"POST",
|
||||
f"tenants/{tenant_id}/subscription/cancel",
|
||||
params={"reason": reason}
|
||||
)
|
||||
return result or {"success": False, "message": "Cancellation failed"}
|
||||
except Exception as e:
|
||||
logger.error("Error cancelling subscription",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return {"success": False, "message": str(e)}
|
||||
|
||||
async def reactivate_subscription(self, tenant_id: str, plan: str = "starter") -> Dict[str, Any]:
|
||||
"""Reactivate a subscription"""
|
||||
try:
|
||||
result = await self._make_request(
|
||||
"POST",
|
||||
f"tenants/{tenant_id}/subscription/reactivate",
|
||||
params={"plan": plan}
|
||||
)
|
||||
return result or {"success": False, "message": "Reactivation failed"}
|
||||
except Exception as e:
|
||||
logger.error("Error reactivating subscription",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return {"success": False, "message": str(e)}
|
||||
|
||||
async def validate_plan_upgrade(self, tenant_id: str, new_plan: str) -> Dict[str, Any]:
|
||||
"""Validate plan upgrade eligibility"""
|
||||
try:
|
||||
result = await self._make_request(
|
||||
"GET",
|
||||
f"tenants/{tenant_id}/subscription/validate-upgrade/{new_plan}"
|
||||
)
|
||||
return result or {"can_upgrade": False, "reason": "Validation failed"}
|
||||
except Exception as e:
|
||||
logger.error("Error validating plan upgrade",
|
||||
error=str(e), tenant_id=tenant_id, new_plan=new_plan)
|
||||
return {"can_upgrade": False, "reason": str(e)}
|
||||
|
||||
async def upgrade_subscription_plan(self, tenant_id: str, new_plan: str) -> Dict[str, Any]:
|
||||
"""Upgrade subscription plan"""
|
||||
try:
|
||||
result = await self._make_request(
|
||||
"POST",
|
||||
f"tenants/{tenant_id}/subscription/upgrade",
|
||||
params={"new_plan": new_plan}
|
||||
)
|
||||
return result or {"success": False, "message": "Upgrade failed"}
|
||||
except Exception as e:
|
||||
logger.error("Error upgrading subscription plan",
|
||||
error=str(e), tenant_id=tenant_id, new_plan=new_plan)
|
||||
return {"success": False, "message": str(e)}
|
||||
|
||||
# ================================================================
|
||||
# PAYMENT MANAGEMENT ENDPOINTS (NEW URL PATTERNS)
|
||||
# ================================================================
|
||||
|
||||
async def get_payment_method(self, tenant_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get payment method for a tenant"""
|
||||
try:
|
||||
result = await self._make_request("GET", f"tenants/{tenant_id}/subscription/payment-method")
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error getting payment method",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def update_payment_method(self, tenant_id: str, payment_method_id: str) -> Dict[str, Any]:
|
||||
"""Update payment method for a tenant"""
|
||||
try:
|
||||
result = await self._make_request(
|
||||
"POST",
|
||||
f"tenants/{tenant_id}/subscription/payment-method",
|
||||
params={"payment_method_id": payment_method_id}
|
||||
)
|
||||
return result or {"success": False, "message": "Update failed"}
|
||||
except Exception as e:
|
||||
logger.error("Error updating payment method",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return {"success": False, "message": str(e)}
|
||||
|
||||
async def get_invoices(self, tenant_id: str) -> Optional[List[Dict[str, Any]]]:
|
||||
"""Get invoices for a tenant"""
|
||||
try:
|
||||
result = await self._make_request("GET", f"tenants/{tenant_id}/subscription/invoices")
|
||||
return result.get('invoices', []) if result else None
|
||||
except Exception as e:
|
||||
logger.error("Error getting invoices",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
# ================================================================
|
||||
# REGISTRATION FLOW ENDPOINTS (NEW URL PATTERNS)
|
||||
# ================================================================
|
||||
|
||||
async def start_registration_payment_setup(self, user_data: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Start registration payment setup (SetupIntent-first architecture)"""
|
||||
try:
|
||||
logger.info("Starting registration payment setup via tenant service",
|
||||
email=user_data.get('email'),
|
||||
plan_id=user_data.get('plan_id'))
|
||||
|
||||
result = await self._make_request(
|
||||
"POST",
|
||||
"registration/payment-setup",
|
||||
data=user_data
|
||||
)
|
||||
|
||||
if result and result.get("success"):
|
||||
logger.info("Registration payment setup completed",
|
||||
email=user_data.get('email'),
|
||||
setup_intent_id=result.get('setup_intent_id'))
|
||||
return result
|
||||
else:
|
||||
error_msg = result.get('detail') if result else 'Unknown error'
|
||||
logger.error("Registration payment setup failed",
|
||||
email=user_data.get('email'), error=error_msg)
|
||||
raise Exception(f"Registration payment setup failed: {error_msg}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to start registration payment setup",
|
||||
email=user_data.get('email'), error=str(e))
|
||||
raise
|
||||
|
||||
async def complete_registration(self, setup_intent_id: str, user_data: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Complete registration after 3DS verification"""
|
||||
try:
|
||||
logger.info("Completing registration via tenant service",
|
||||
setup_intent_id=setup_intent_id,
|
||||
email=user_data.get('email'))
|
||||
|
||||
registration_data = {
|
||||
"setup_intent_id": setup_intent_id,
|
||||
"user_data": user_data
|
||||
}
|
||||
|
||||
result = await self._make_request(
|
||||
"POST",
|
||||
"registration/complete",
|
||||
data=registration_data
|
||||
)
|
||||
|
||||
if result and result.get("success"):
|
||||
logger.info("Registration completed successfully",
|
||||
setup_intent_id=setup_intent_id,
|
||||
subscription_id=result.get('subscription_id'))
|
||||
return result
|
||||
else:
|
||||
error_msg = result.get('detail') if result else 'Unknown error'
|
||||
logger.error("Registration completion failed",
|
||||
setup_intent_id=setup_intent_id, error=error_msg)
|
||||
raise Exception(f"Registration completion failed: {error_msg}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to complete registration",
|
||||
setup_intent_id=setup_intent_id, error=str(e))
|
||||
raise
|
||||
|
||||
async def get_registration_state(self, state_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get registration state by ID"""
|
||||
try:
|
||||
result = await self._make_request("GET", f"registration/state/{state_id}")
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error getting registration state",
|
||||
error=str(e), state_id=state_id)
|
||||
return None
|
||||
|
||||
# ================================================================
|
||||
# SETUP INTENT VERIFICATION (NEW URL PATTERNS)
|
||||
# ================================================================
|
||||
|
||||
async def verify_setup_intent(self, setup_intent_id: str) -> Dict[str, Any]:
|
||||
"""Verify SetupIntent status"""
|
||||
try:
|
||||
logger.info("Verifying SetupIntent via tenant service",
|
||||
setup_intent_id=setup_intent_id)
|
||||
|
||||
result = await self._make_request(
|
||||
"GET",
|
||||
f"setup-intents/{setup_intent_id}/verify"
|
||||
)
|
||||
|
||||
if result:
|
||||
logger.info("SetupIntent verification result",
|
||||
setup_intent_id=setup_intent_id,
|
||||
status=result.get('status'))
|
||||
return result
|
||||
else:
|
||||
raise Exception("SetupIntent verification failed: No result returned")
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to verify SetupIntent",
|
||||
setup_intent_id=setup_intent_id, error=str(e))
|
||||
raise
|
||||
|
||||
async def verify_setup_intent_for_registration(self, setup_intent_id: str) -> Dict[str, Any]:
|
||||
"""Verify SetupIntent status for registration flow (alias for verify_setup_intent)"""
|
||||
return await self.verify_setup_intent(setup_intent_id)
|
||||
|
||||
# ================================================================
|
||||
# PAYMENT CUSTOMER MANAGEMENT (NEW URL PATTERNS)
|
||||
# ================================================================
|
||||
|
||||
async def create_payment_customer(
|
||||
self,
|
||||
user_data: Dict[str, Any],
|
||||
payment_method_id: Optional[str] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""Create payment customer"""
|
||||
try:
|
||||
logger.info("Creating payment customer via tenant service",
|
||||
email=user_data.get('email'),
|
||||
payment_method_id=payment_method_id)
|
||||
|
||||
request_data = user_data
|
||||
params = {}
|
||||
if payment_method_id:
|
||||
params["payment_method_id"] = payment_method_id
|
||||
|
||||
result = await self._make_request(
|
||||
"POST",
|
||||
"payment-customers/create",
|
||||
data=request_data,
|
||||
params=params if params else None
|
||||
)
|
||||
|
||||
if result and result.get("success"):
|
||||
logger.info("Payment customer created successfully",
|
||||
email=user_data.get('email'),
|
||||
payment_customer_id=result.get('payment_customer_id'))
|
||||
return result
|
||||
else:
|
||||
error_msg = result.get('detail') if result else 'Unknown error'
|
||||
logger.error("Payment customer creation failed",
|
||||
email=user_data.get('email'), error=error_msg)
|
||||
raise Exception(f"Payment customer creation failed: {error_msg}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to create payment customer",
|
||||
email=user_data.get('email'), error=str(e))
|
||||
raise
|
||||
|
||||
# ================================================================
|
||||
# LEGACY COMPATIBILITY METHODS
|
||||
# ================================================================
|
||||
|
||||
async def create_registration_payment_setup(self, user_data: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Create registration payment setup via tenant service orchestration"""
|
||||
return await self.start_registration_payment_setup(user_data)
|
||||
|
||||
async def verify_and_complete_registration(
|
||||
self,
|
||||
setup_intent_id: str,
|
||||
user_data: Dict[str, Any]
|
||||
) -> Dict[str, Any]:
|
||||
"""Verify SetupIntent and complete registration"""
|
||||
return await self.complete_registration(setup_intent_id, user_data)
|
||||
|
||||
async def create_subscription_for_registration(
|
||||
self,
|
||||
user_data: Dict[str, Any],
|
||||
plan_id: str,
|
||||
payment_method_id: str,
|
||||
billing_cycle: str = "monthly",
|
||||
coupon_code: Optional[str] = None
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""Create a tenant-independent subscription during user registration"""
|
||||
try:
|
||||
logger.info("Creating tenant-independent subscription for registration",
|
||||
user_id=user_data.get('user_id'),
|
||||
plan_id=plan_id,
|
||||
billing_cycle=billing_cycle)
|
||||
|
||||
registration_data = {
|
||||
**user_data,
|
||||
"plan_id": plan_id,
|
||||
"payment_method_id": payment_method_id,
|
||||
"billing_cycle": billing_cycle,
|
||||
"coupon_code": coupon_code
|
||||
}
|
||||
|
||||
setup_result = await self.start_registration_payment_setup(registration_data)
|
||||
|
||||
if setup_result and setup_result.get("success"):
|
||||
return {
|
||||
"subscription_id": setup_result.get('setup_intent_id'),
|
||||
"customer_id": setup_result.get('customer_id'),
|
||||
"status": "pending_verification",
|
||||
"plan": plan_id,
|
||||
"billing_cycle": billing_cycle,
|
||||
"setup_intent_id": setup_result.get('setup_intent_id'),
|
||||
"client_secret": setup_result.get('client_secret')
|
||||
}
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to create subscription for registration",
|
||||
user_id=user_data.get('user_id'), error=str(e))
|
||||
return None
|
||||
|
||||
async def link_subscription_to_tenant(
|
||||
self,
|
||||
tenant_id: str,
|
||||
subscription_id: str,
|
||||
user_id: str
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""Link a pending subscription to a tenant"""
|
||||
try:
|
||||
logger.info("Linking subscription to tenant",
|
||||
tenant_id=tenant_id,
|
||||
subscription_id=subscription_id,
|
||||
user_id=user_id)
|
||||
|
||||
linking_data = {
|
||||
"subscription_id": subscription_id,
|
||||
"user_id": user_id
|
||||
}
|
||||
|
||||
result = await self._make_request(
|
||||
"POST",
|
||||
f"tenants/{tenant_id}/link-subscription",
|
||||
data=linking_data
|
||||
)
|
||||
|
||||
if result and result.get("success"):
|
||||
logger.info("Subscription linked to tenant successfully",
|
||||
tenant_id=tenant_id,
|
||||
subscription_id=subscription_id)
|
||||
return result
|
||||
else:
|
||||
logger.error("Subscription linking failed",
|
||||
tenant_id=tenant_id,
|
||||
subscription_id=subscription_id,
|
||||
error=result.get('detail') if result else 'No detail provided')
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to link subscription to tenant",
|
||||
tenant_id=tenant_id,
|
||||
subscription_id=subscription_id,
|
||||
error=str(e))
|
||||
return None
|
||||
|
||||
async def get_user_primary_tenant(self, user_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get the primary tenant for a user"""
|
||||
try:
|
||||
logger.info("Getting primary tenant for user",
|
||||
user_id=user_id)
|
||||
|
||||
result = await self._make_request(
|
||||
"GET",
|
||||
f"tenants/users/{user_id}/primary-tenant"
|
||||
)
|
||||
|
||||
if result:
|
||||
logger.info("Primary tenant retrieved successfully",
|
||||
user_id=user_id,
|
||||
tenant_id=result.get('tenant_id'))
|
||||
return result
|
||||
else:
|
||||
logger.warning("No primary tenant found for user",
|
||||
user_id=user_id)
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get primary tenant for user",
|
||||
user_id=user_id,
|
||||
error=str(e))
|
||||
return None
|
||||
|
||||
async def get_user_memberships(self, user_id: str) -> Optional[List[Dict[str, Any]]]:
|
||||
"""Get all tenant memberships for a user"""
|
||||
try:
|
||||
logger.info("Getting tenant memberships for user",
|
||||
user_id=user_id)
|
||||
|
||||
result = await self._make_request(
|
||||
"GET",
|
||||
f"tenants/members/user/{user_id}"
|
||||
)
|
||||
|
||||
if result:
|
||||
logger.info("User memberships retrieved successfully",
|
||||
user_id=user_id,
|
||||
membership_count=len(result))
|
||||
return result
|
||||
else:
|
||||
logger.warning("No memberships found for user",
|
||||
user_id=user_id)
|
||||
return []
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get user memberships",
|
||||
user_id=user_id,
|
||||
error=str(e))
|
||||
return None
|
||||
|
||||
|
||||
# Factory function for dependency injection
|
||||
def create_tenant_client(config: BaseServiceSettings) -> TenantServiceClient:
|
||||
"""Create tenant service client instance"""
|
||||
return TenantServiceClient(config)
|
||||
162
shared/clients/training_client.py
Executable file
162
shared/clients/training_client.py
Executable file
@@ -0,0 +1,162 @@
|
||||
# shared/clients/training_client.py
|
||||
"""
|
||||
Training Service Client
|
||||
Handles all API calls to the training service
|
||||
"""
|
||||
|
||||
from typing import Dict, Any, Optional, List
|
||||
from .base_service_client import BaseServiceClient
|
||||
from shared.config.base import BaseServiceSettings
|
||||
|
||||
|
||||
class TrainingServiceClient(BaseServiceClient):
|
||||
"""Client for communicating with the training service"""
|
||||
|
||||
def __init__(self, config: BaseServiceSettings, calling_service_name: str = "unknown"):
|
||||
super().__init__(calling_service_name, config)
|
||||
|
||||
def get_service_base_path(self) -> str:
|
||||
return "/api/v1"
|
||||
|
||||
# ================================================================
|
||||
# TRAINING JOBS
|
||||
# ================================================================
|
||||
|
||||
async def create_training_job(
|
||||
self,
|
||||
tenant_id: str,
|
||||
include_weather: bool = True,
|
||||
include_traffic: bool = False,
|
||||
min_data_points: int = 30,
|
||||
**kwargs
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""Create a new training job"""
|
||||
data = {
|
||||
"include_weather": include_weather,
|
||||
"include_traffic": include_traffic,
|
||||
"min_data_points": min_data_points,
|
||||
**kwargs
|
||||
}
|
||||
return await self.post("training/jobs", data=data, tenant_id=tenant_id)
|
||||
|
||||
async def get_training_job(self, tenant_id: str, job_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get training job details"""
|
||||
return await self.get(f"training/jobs/{job_id}/status", tenant_id=tenant_id)
|
||||
|
||||
async def list_training_jobs(
|
||||
self,
|
||||
tenant_id: str,
|
||||
status: Optional[str] = None,
|
||||
limit: int = 50
|
||||
) -> Optional[List[Dict[str, Any]]]:
|
||||
"""List training jobs for a tenant"""
|
||||
params = {"limit": limit}
|
||||
if status:
|
||||
params["status"] = status
|
||||
|
||||
result = await self.get("training/jobs", tenant_id=tenant_id, params=params)
|
||||
return result.get("jobs", []) if result else None
|
||||
|
||||
async def cancel_training_job(self, tenant_id: str, job_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Cancel a training job"""
|
||||
return await self.delete(f"training/jobs/{job_id}", tenant_id=tenant_id)
|
||||
|
||||
# ================================================================
|
||||
# MODELS
|
||||
# ================================================================
|
||||
|
||||
async def get_model(self, tenant_id: str, model_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get model details"""
|
||||
return await self.get(f"training/models/{model_id}", tenant_id=tenant_id)
|
||||
|
||||
async def list_models(
|
||||
self,
|
||||
tenant_id: str,
|
||||
status: Optional[str] = None,
|
||||
model_type: Optional[str] = None,
|
||||
limit: int = 50
|
||||
) -> Optional[List[Dict[str, Any]]]:
|
||||
"""List models for a tenant"""
|
||||
params = {"limit": limit}
|
||||
if status:
|
||||
params["status"] = status
|
||||
if model_type:
|
||||
params["model_type"] = model_type
|
||||
|
||||
result = await self.get("training/models", tenant_id=tenant_id, params=params)
|
||||
return result.get("models", []) if result else None
|
||||
|
||||
async def get_active_model_for_product(
|
||||
self,
|
||||
tenant_id: str,
|
||||
inventory_product_id: str
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get the active model for a specific product by inventory product ID
|
||||
This is the preferred method since models are stored per product.
|
||||
"""
|
||||
result = await self.get(f"training/models/{inventory_product_id}/active", tenant_id=tenant_id)
|
||||
return result
|
||||
|
||||
async def deploy_model(self, tenant_id: str, model_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Deploy a trained model"""
|
||||
return await self.post(f"training/models/{model_id}/deploy", data={}, tenant_id=tenant_id)
|
||||
|
||||
async def delete_model(self, tenant_id: str, model_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Delete a model"""
|
||||
return await self.delete(f"training/models/{model_id}", tenant_id=tenant_id)
|
||||
|
||||
# ================================================================
|
||||
# MODEL METRICS & PERFORMANCE
|
||||
# ================================================================
|
||||
|
||||
async def get_model_metrics(self, tenant_id: str, model_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get model performance metrics"""
|
||||
return await self.get(f"training/models/{model_id}/metrics", tenant_id=tenant_id)
|
||||
|
||||
async def get_model_predictions(
|
||||
self,
|
||||
tenant_id: str,
|
||||
model_id: str,
|
||||
start_date: Optional[str] = None,
|
||||
end_date: Optional[str] = None
|
||||
) -> Optional[List[Dict[str, Any]]]:
|
||||
"""Get model predictions for evaluation"""
|
||||
params = {}
|
||||
if start_date:
|
||||
params["start_date"] = start_date
|
||||
if end_date:
|
||||
params["end_date"] = end_date
|
||||
|
||||
result = await self.get(f"training/models/{model_id}/predictions", tenant_id=tenant_id, params=params)
|
||||
return result.get("predictions", []) if result else None
|
||||
|
||||
async def trigger_retrain(
|
||||
self,
|
||||
tenant_id: str,
|
||||
inventory_product_id: str,
|
||||
reason: str = 'manual',
|
||||
metadata: Optional[Dict[str, Any]] = None
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Trigger model retraining for a specific product.
|
||||
Used by orchestrator when forecast accuracy degrades.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
inventory_product_id: Product UUID to retrain model for
|
||||
reason: Reason for retraining (accuracy_degradation, manual, scheduled, etc.)
|
||||
metadata: Optional metadata (e.g., previous_mape, validation_date, etc.)
|
||||
|
||||
Returns:
|
||||
Training job details or None if failed
|
||||
"""
|
||||
data = {
|
||||
"inventory_product_id": inventory_product_id,
|
||||
"reason": reason,
|
||||
"metadata": metadata or {},
|
||||
"include_weather": True,
|
||||
"include_traffic": False,
|
||||
"min_data_points": 30
|
||||
}
|
||||
return await self.post("training/models/retrain", data=data, tenant_id=tenant_id)
|
||||
0
shared/config/__init__.py
Executable file
0
shared/config/__init__.py
Executable file
537
shared/config/base.py
Executable file
537
shared/config/base.py
Executable file
@@ -0,0 +1,537 @@
|
||||
# shared/config/base.py
|
||||
"""
|
||||
Base configuration for all microservices
|
||||
Provides common settings and patterns
|
||||
"""
|
||||
|
||||
import os
|
||||
from typing import List, Dict, Optional, Any, Set
|
||||
from pydantic_settings import BaseSettings
|
||||
from pydantic import validator, Field
|
||||
|
||||
|
||||
# ================================================================
|
||||
# INTERNAL SERVICE REGISTRY
|
||||
# ================================================================
|
||||
|
||||
# Central registry of all internal microservices that should have
|
||||
# automatic access to tenant resources without user membership
|
||||
# Service names should match the naming convention used in JWT tokens
|
||||
INTERNAL_SERVICES: Set[str] = {
|
||||
# Core services
|
||||
"auth-service",
|
||||
"tenant-service",
|
||||
"gateway", # API Gateway
|
||||
"gateway-service", # Alternative name for gateway
|
||||
|
||||
# Business logic services
|
||||
"inventory-service",
|
||||
"production-service",
|
||||
"recipes-service",
|
||||
"suppliers-service",
|
||||
"pos-service",
|
||||
"orders-service",
|
||||
"sales-service",
|
||||
"procurement-service",
|
||||
|
||||
# ML and analytics services
|
||||
"training-service",
|
||||
"forecasting-service",
|
||||
"ai-insights-service",
|
||||
|
||||
# Orchestration services
|
||||
"orchestrator-service",
|
||||
|
||||
# Support services
|
||||
"notification-service",
|
||||
"alert-service",
|
||||
"alert-processor-service",
|
||||
"alert-processor", # Alternative name (from k8s service name)
|
||||
"demo-session-service",
|
||||
"demo-service", # Alternative name for demo session service
|
||||
"external-service",
|
||||
|
||||
# Enterprise services
|
||||
"distribution-service",
|
||||
}
|
||||
|
||||
|
||||
def is_internal_service(service_identifier: str) -> bool:
|
||||
"""
|
||||
Check if a service identifier represents an internal service.
|
||||
|
||||
Args:
|
||||
service_identifier: Service name (e.g., 'production-service')
|
||||
|
||||
Returns:
|
||||
bool: True if the identifier is a recognized internal service
|
||||
"""
|
||||
return service_identifier in INTERNAL_SERVICES
|
||||
|
||||
|
||||
class BaseServiceSettings(BaseSettings):
|
||||
"""
|
||||
Base configuration class for all microservices
|
||||
Provides common settings and validation patterns
|
||||
"""
|
||||
|
||||
# ================================================================
|
||||
# CORE SERVICE SETTINGS
|
||||
# ================================================================
|
||||
|
||||
# Application Identity
|
||||
APP_NAME: str = "Bakery Service"
|
||||
SERVICE_NAME: str = "base-service"
|
||||
VERSION: str = "1.0.0"
|
||||
DESCRIPTION: str = "Base microservice for bakery platform"
|
||||
|
||||
# Environment & Debugging
|
||||
ENVIRONMENT: str = os.getenv("ENVIRONMENT", "development")
|
||||
DEBUG: bool = os.getenv("DEBUG", "false").lower() == "true"
|
||||
LOG_LEVEL: str = os.getenv("LOG_LEVEL", "INFO")
|
||||
|
||||
# Service Discovery & Health
|
||||
SERVICE_HOST: str = os.getenv("SERVICE_HOST", "0.0.0.0")
|
||||
SERVICE_PORT: int = int(os.getenv("SERVICE_PORT", "8000"))
|
||||
HEALTH_CHECK_ENABLED: bool = True
|
||||
METRICS_ENABLED: bool = True
|
||||
|
||||
# ================================================================
|
||||
# DATABASE CONFIGURATION
|
||||
# ================================================================
|
||||
|
||||
# Note: DATABASE_URL is defined as a property in each service-specific config
|
||||
# to construct the URL from secure environment variables
|
||||
|
||||
# Database connection settings
|
||||
DB_POOL_SIZE: int = int(os.getenv("DB_POOL_SIZE", "10"))
|
||||
DB_MAX_OVERFLOW: int = int(os.getenv("DB_MAX_OVERFLOW", "20"))
|
||||
DB_POOL_TIMEOUT: int = int(os.getenv("DB_POOL_TIMEOUT", "30"))
|
||||
DB_POOL_RECYCLE: int = int(os.getenv("DB_POOL_RECYCLE", "3600"))
|
||||
DB_POOL_PRE_PING: bool = os.getenv("DB_POOL_PRE_PING", "true").lower() == "true"
|
||||
DB_ECHO: bool = os.getenv("DB_ECHO", "false").lower() == "true"
|
||||
|
||||
# ================================================================
|
||||
# REDIS CONFIGURATION
|
||||
# ================================================================
|
||||
|
||||
@property
|
||||
def REDIS_URL(self) -> str:
|
||||
"""Build Redis URL from secure components with TLS support"""
|
||||
# Try complete URL first (for backward compatibility)
|
||||
complete_url = os.getenv("REDIS_URL")
|
||||
if complete_url:
|
||||
# Upgrade to TLS if not already
|
||||
if complete_url.startswith("redis://") and "tls" not in complete_url.lower():
|
||||
complete_url = complete_url.replace("redis://", "rediss://", 1)
|
||||
return complete_url
|
||||
|
||||
# Build from components (secure approach with TLS)
|
||||
password = os.getenv("REDIS_PASSWORD", "")
|
||||
host = os.getenv("REDIS_HOST", "redis-service")
|
||||
port = os.getenv("REDIS_PORT", "6379")
|
||||
use_tls = os.getenv("REDIS_TLS_ENABLED", "true").lower() == "true"
|
||||
|
||||
# Use rediss:// for TLS, redis:// for non-TLS
|
||||
protocol = "rediss" if use_tls else "redis"
|
||||
|
||||
# DEBUG: print what we're using
|
||||
import sys
|
||||
print(f"[DEBUG REDIS_URL] password={repr(password)}, host={host}, port={port}, tls={use_tls}", file=sys.stderr)
|
||||
|
||||
if password:
|
||||
url = f"{protocol}://:{password}@{host}:{port}"
|
||||
if use_tls:
|
||||
# Use ssl_cert_reqs=none for self-signed certs in internal cluster
|
||||
# Still encrypted, just skips cert validation
|
||||
url += "?ssl_cert_reqs=none"
|
||||
print(f"[DEBUG REDIS_URL] Returning URL with auth and TLS: {url}", file=sys.stderr)
|
||||
return url
|
||||
url = f"{protocol}://{host}:{port}"
|
||||
if use_tls:
|
||||
# Use ssl_cert_reqs=none for self-signed certs in internal cluster
|
||||
url += "?ssl_cert_reqs=none"
|
||||
print(f"[DEBUG REDIS_URL] Returning URL without auth: {url}", file=sys.stderr)
|
||||
return url
|
||||
|
||||
REDIS_DB: int = int(os.getenv("REDIS_DB", "0"))
|
||||
REDIS_MAX_CONNECTIONS: int = int(os.getenv("REDIS_MAX_CONNECTIONS", "50"))
|
||||
REDIS_RETRY_ON_TIMEOUT: bool = True
|
||||
REDIS_SOCKET_KEEPALIVE: bool = True
|
||||
REDIS_SOCKET_KEEPALIVE_OPTIONS: Dict[str, int] = {
|
||||
"TCP_KEEPIDLE": 1,
|
||||
"TCP_KEEPINTVL": 3,
|
||||
"TCP_KEEPCNT": 5,
|
||||
}
|
||||
|
||||
@property
|
||||
def REDIS_URL_WITH_DB(self) -> str:
|
||||
"""Get Redis URL with database number"""
|
||||
base_url = self.REDIS_URL.rstrip('/')
|
||||
return f"{base_url}/{self.REDIS_DB}"
|
||||
|
||||
# ================================================================
|
||||
# RABBITMQ CONFIGURATION
|
||||
# ================================================================
|
||||
|
||||
@property
|
||||
def RABBITMQ_URL(self) -> str:
|
||||
"""Build RabbitMQ URL from secure components"""
|
||||
# Try complete URL first (for backward compatibility)
|
||||
complete_url = os.getenv("RABBITMQ_URL")
|
||||
if complete_url:
|
||||
return complete_url
|
||||
|
||||
# Build from components (secure approach)
|
||||
user = os.getenv("RABBITMQ_USER", "bakery")
|
||||
password = os.getenv("RABBITMQ_PASSWORD", "forecast123")
|
||||
host = os.getenv("RABBITMQ_HOST", "rabbitmq-service")
|
||||
port = os.getenv("RABBITMQ_PORT", "5672")
|
||||
vhost = os.getenv("RABBITMQ_VHOST", "/")
|
||||
|
||||
return f"amqp://{user}:{password}@{host}:{port}{vhost}"
|
||||
RABBITMQ_EXCHANGE: str = os.getenv("RABBITMQ_EXCHANGE", "bakery_events")
|
||||
RABBITMQ_QUEUE_PREFIX: str = os.getenv("RABBITMQ_QUEUE_PREFIX", "bakery")
|
||||
RABBITMQ_RETRY_ATTEMPTS: int = int(os.getenv("RABBITMQ_RETRY_ATTEMPTS", "3"))
|
||||
RABBITMQ_RETRY_DELAY: int = int(os.getenv("RABBITMQ_RETRY_DELAY", "5"))
|
||||
|
||||
# ================================================================
|
||||
# AUTHENTICATION & SECURITY
|
||||
# ================================================================
|
||||
|
||||
# JWT Configuration
|
||||
# ✅ FIXED: Use production JWT secret key to match auth service
|
||||
# Must be same across all services for inter-service communication
|
||||
JWT_SECRET_KEY: str = os.getenv("JWT_SECRET_KEY", "usMHw9kQCQoyrc7wPmMi3bClr0lTY9wvzZmcTbADvL0=")
|
||||
JWT_ALGORITHM: str = os.getenv("JWT_ALGORITHM", "HS256")
|
||||
JWT_ACCESS_TOKEN_EXPIRE_MINUTES: int = int(os.getenv("JWT_ACCESS_TOKEN_EXPIRE_MINUTES", "30"))
|
||||
JWT_REFRESH_TOKEN_EXPIRE_DAYS: int = int(os.getenv("JWT_REFRESH_TOKEN_EXPIRE_DAYS", "7"))
|
||||
|
||||
|
||||
|
||||
# Password Requirements
|
||||
PASSWORD_MIN_LENGTH: int = int(os.getenv("PASSWORD_MIN_LENGTH", "8"))
|
||||
PASSWORD_REQUIRE_UPPERCASE: bool = os.getenv("PASSWORD_REQUIRE_UPPERCASE", "true").lower() == "true"
|
||||
PASSWORD_REQUIRE_LOWERCASE: bool = os.getenv("PASSWORD_REQUIRE_LOWERCASE", "true").lower() == "true"
|
||||
PASSWORD_REQUIRE_NUMBERS: bool = os.getenv("PASSWORD_REQUIRE_NUMBERS", "true").lower() == "true"
|
||||
PASSWORD_REQUIRE_SYMBOLS: bool = os.getenv("PASSWORD_REQUIRE_SYMBOLS", "false").lower() == "true"
|
||||
|
||||
# Security Settings
|
||||
BCRYPT_ROUNDS: int = int(os.getenv("BCRYPT_ROUNDS", "12"))
|
||||
MAX_LOGIN_ATTEMPTS: int = int(os.getenv("MAX_LOGIN_ATTEMPTS", "5"))
|
||||
LOCKOUT_DURATION_MINUTES: int = int(os.getenv("LOCKOUT_DURATION_MINUTES", "30"))
|
||||
|
||||
# ================================================================
|
||||
# INTER-SERVICE COMMUNICATION
|
||||
# ================================================================
|
||||
|
||||
# Service URLs (can be overridden by environment variables)
|
||||
GATEWAY_URL: str = os.getenv("GATEWAY_URL", "http://gateway-service:8000")
|
||||
AUTH_SERVICE_URL: str = os.getenv("AUTH_SERVICE_URL", "http://auth-service:8000")
|
||||
TRAINING_SERVICE_URL: str = os.getenv("TRAINING_SERVICE_URL", "http://training-service:8000")
|
||||
FORECASTING_SERVICE_URL: str = os.getenv("FORECASTING_SERVICE_URL", "http://forecasting-service:8000")
|
||||
SALES_SERVICE_URL: str = os.getenv("SALES_SERVICE_URL", "http://sales-service:8000")
|
||||
EXTERNAL_SERVICE_URL: str = os.getenv("EXTERNAL_SERVICE_URL", "http://external-service:8000")
|
||||
TENANT_SERVICE_URL: str = os.getenv("TENANT_SERVICE_URL", "http://tenant-service:8000")
|
||||
INVENTORY_SERVICE_URL: str = os.getenv("INVENTORY_SERVICE_URL", "http://inventory-service:8000")
|
||||
NOTIFICATION_SERVICE_URL: str = os.getenv("NOTIFICATION_SERVICE_URL", "http://notification-service:8000")
|
||||
PRODUCTION_SERVICE_URL: str = os.getenv("PRODUCTION_SERVICE_URL", "http://bakery-production-service:8000")
|
||||
ORDERS_SERVICE_URL: str = os.getenv("ORDERS_SERVICE_URL", "http://bakery-orders-service:8000")
|
||||
SUPPLIERS_SERVICE_URL: str = os.getenv("SUPPLIERS_SERVICE_URL", "http://bakery-suppliers-service:8000")
|
||||
RECIPES_SERVICE_URL: str = os.getenv("RECIPES_SERVICE_URL", "http://recipes-service:8000")
|
||||
POS_SERVICE_URL: str = os.getenv("POS_SERVICE_URL", "http://pos-service:8000")
|
||||
NOMINATIM_SERVICE_URL: str = os.getenv("NOMINATIM_SERVICE_URL", "http://nominatim:8080")
|
||||
DEMO_SESSION_SERVICE_URL: str = os.getenv("DEMO_SESSION_SERVICE_URL", "http://demo-session-service:8000")
|
||||
ALERT_PROCESSOR_SERVICE_URL: str = os.getenv("ALERT_PROCESSOR_SERVICE_URL", "http://alert-processor:8000")
|
||||
PROCUREMENT_SERVICE_URL: str = os.getenv("PROCUREMENT_SERVICE_URL", "http://procurement-service:8000")
|
||||
ORCHESTRATOR_SERVICE_URL: str = os.getenv("ORCHESTRATOR_SERVICE_URL", "http://orchestrator-service:8000")
|
||||
AI_INSIGHTS_SERVICE_URL: str = os.getenv("AI_INSIGHTS_SERVICE_URL", "http://ai-insights-service:8000")
|
||||
DISTRIBUTION_SERVICE_URL: str = os.getenv("DISTRIBUTION_SERVICE_URL", "http://distribution-service:8000")
|
||||
|
||||
# HTTP Client Settings
|
||||
HTTP_TIMEOUT: int = int(os.getenv("HTTP_TIMEOUT", "30"))
|
||||
HTTP_RETRIES: int = int(os.getenv("HTTP_RETRIES", "3"))
|
||||
HTTP_RETRY_DELAY: float = float(os.getenv("HTTP_RETRY_DELAY", "1.0"))
|
||||
|
||||
# ================================================================
|
||||
# CORS & API CONFIGURATION
|
||||
# ================================================================
|
||||
|
||||
CORS_ORIGINS: str = os.getenv("CORS_ORIGINS", "http://localhost:3000,http://localhost:3001")
|
||||
CORS_ALLOW_CREDENTIALS: bool = os.getenv("CORS_ALLOW_CREDENTIALS", "true").lower() == "true"
|
||||
CORS_ALLOW_METHODS: List[str] = ["GET", "POST", "PUT", "DELETE", "PATCH", "OPTIONS"]
|
||||
CORS_ALLOW_HEADERS: List[str] = ["*"]
|
||||
|
||||
@property
|
||||
def CORS_ORIGINS_LIST(self) -> List[str]:
|
||||
"""Get CORS origins as list"""
|
||||
return [origin.strip() for origin in self.CORS_ORIGINS.split(",") if origin.strip()]
|
||||
|
||||
# Rate Limiting
|
||||
RATE_LIMIT_ENABLED: bool = os.getenv("RATE_LIMIT_ENABLED", "true").lower() == "true"
|
||||
RATE_LIMIT_REQUESTS: int = int(os.getenv("RATE_LIMIT_REQUESTS", "100"))
|
||||
RATE_LIMIT_WINDOW: int = int(os.getenv("RATE_LIMIT_WINDOW", "60"))
|
||||
RATE_LIMIT_BURST: int = int(os.getenv("RATE_LIMIT_BURST", "10"))
|
||||
|
||||
# API Documentation
|
||||
API_DOCS_ENABLED: bool = os.getenv("API_DOCS_ENABLED", "true").lower() == "true"
|
||||
API_DOCS_URL: str = "/docs"
|
||||
API_REDOC_URL: str = "/redoc"
|
||||
API_OPENAPI_URL: str = "/openapi.json"
|
||||
|
||||
# ================================================================
|
||||
# EXTERNAL APIS & INTEGRATIONS
|
||||
# ================================================================
|
||||
|
||||
# Weather API (AEMET - Spanish Weather Service)
|
||||
AEMET_API_KEY: str = os.getenv("AEMET_API_KEY", "")
|
||||
AEMET_BASE_URL: str = "https://opendata.aemet.es/opendata"
|
||||
AEMET_TIMEOUT: int = int(os.getenv("AEMET_TIMEOUT", "30"))
|
||||
|
||||
# Madrid Open Data
|
||||
MADRID_OPENDATA_API_KEY: str = os.getenv("MADRID_OPENDATA_API_KEY", "")
|
||||
MADRID_OPENDATA_BASE_URL: str = "https://datos.madrid.es"
|
||||
MADRID_OPENDATA_TIMEOUT: int = int(os.getenv("MADRID_OPENDATA_TIMEOUT", "30"))
|
||||
|
||||
# Email Configuration
|
||||
SMTP_HOST: str = os.getenv("SMTP_HOST", "smtp.gmail.com")
|
||||
SMTP_PORT: int = int(os.getenv("SMTP_PORT", "587"))
|
||||
SMTP_USER: str = os.getenv("SMTP_USER", "")
|
||||
SMTP_PASSWORD: str = os.getenv("SMTP_PASSWORD", "")
|
||||
SMTP_TLS: bool = os.getenv("SMTP_TLS", "true").lower() == "true"
|
||||
SMTP_SSL: bool = os.getenv("SMTP_SSL", "false").lower() == "true"
|
||||
|
||||
# WhatsApp API
|
||||
WHATSAPP_API_KEY: str = os.getenv("WHATSAPP_API_KEY", "")
|
||||
WHATSAPP_BASE_URL: str = os.getenv("WHATSAPP_BASE_URL", "https://api.twilio.com")
|
||||
WHATSAPP_FROM_NUMBER: str = os.getenv("WHATSAPP_FROM_NUMBER", "")
|
||||
|
||||
# Stripe Payment Configuration
|
||||
STRIPE_PUBLISHABLE_KEY: str = os.getenv("STRIPE_PUBLISHABLE_KEY", "")
|
||||
STRIPE_SECRET_KEY: str = os.getenv("STRIPE_SECRET_KEY", "")
|
||||
STRIPE_WEBHOOK_SECRET: str = os.getenv("STRIPE_WEBHOOK_SECRET", "")
|
||||
STRIPE_API_VERSION: str = os.getenv("STRIPE_API_VERSION", "") # Empty = use SDK default
|
||||
|
||||
# ================================================================
|
||||
# ML & AI CONFIGURATION
|
||||
# ================================================================
|
||||
|
||||
# Model Storage Backend (MinIO is the primary storage)
|
||||
MODEL_STORAGE_BACKEND: str = os.getenv("MODEL_STORAGE_BACKEND", "minio")
|
||||
|
||||
# Training Configuration
|
||||
MAX_TRAINING_TIME_MINUTES: int = int(os.getenv("MAX_TRAINING_TIME_MINUTES", "30"))
|
||||
MIN_TRAINING_DATA_DAYS: int = int(os.getenv("MIN_TRAINING_DATA_DAYS", "30"))
|
||||
TRAINING_BATCH_SIZE: int = int(os.getenv("TRAINING_BATCH_SIZE", "1000"))
|
||||
|
||||
# Prophet Configuration
|
||||
PROPHET_SEASONALITY_MODE: str = os.getenv("PROPHET_SEASONALITY_MODE", "additive")
|
||||
PROPHET_CHANGEPOINT_PRIOR_SCALE: float = float(os.getenv("PROPHET_CHANGEPOINT_PRIOR_SCALE", "0.05"))
|
||||
PROPHET_SEASONALITY_PRIOR_SCALE: float = float(os.getenv("PROPHET_SEASONALITY_PRIOR_SCALE", "10.0"))
|
||||
|
||||
# Prediction Caching
|
||||
PREDICTION_CACHE_TTL_HOURS: int = int(os.getenv("PREDICTION_CACHE_TTL_HOURS", "6"))
|
||||
WEATHER_CACHE_TTL_HOURS: int = int(os.getenv("WEATHER_CACHE_TTL_HOURS", "1"))
|
||||
TRAFFIC_CACHE_TTL_HOURS: int = int(os.getenv("TRAFFIC_CACHE_TTL_HOURS", "1"))
|
||||
|
||||
# ================================================================
|
||||
# MONITORING & OBSERVABILITY
|
||||
# ================================================================
|
||||
|
||||
# Logging Configuration
|
||||
LOG_FORMAT: str = os.getenv("LOG_FORMAT", "json") # json, text
|
||||
LOG_FILE_ENABLED: bool = os.getenv("LOG_FILE_ENABLED", "false").lower() == "true"
|
||||
LOG_FILE_PATH: str = os.getenv("LOG_FILE_PATH", "/app/logs")
|
||||
LOG_ROTATION_SIZE: str = os.getenv("LOG_ROTATION_SIZE", "100MB")
|
||||
LOG_RETENTION_DAYS: int = int(os.getenv("LOG_RETENTION_DAYS", "30"))
|
||||
|
||||
# Metrics & Monitoring
|
||||
PROMETHEUS_ENABLED: bool = os.getenv("PROMETHEUS_ENABLED", "true").lower() == "true"
|
||||
PROMETHEUS_PORT: int = int(os.getenv("PROMETHEUS_PORT", "9090"))
|
||||
PROMETHEUS_PATH: str = "/metrics"
|
||||
|
||||
# Tracing
|
||||
JAEGER_ENABLED: bool = os.getenv("JAEGER_ENABLED", "false").lower() == "true"
|
||||
JAEGER_AGENT_HOST: str = os.getenv("JAEGER_AGENT_HOST", "localhost")
|
||||
JAEGER_AGENT_PORT: int = int(os.getenv("JAEGER_AGENT_PORT", "6831"))
|
||||
|
||||
# Health Checks
|
||||
HEALTH_CHECK_TIMEOUT: int = int(os.getenv("HEALTH_CHECK_TIMEOUT", "30"))
|
||||
HEALTH_CHECK_INTERVAL: int = int(os.getenv("HEALTH_CHECK_INTERVAL", "30"))
|
||||
|
||||
# ================================================================
|
||||
# DATA RETENTION & CLEANUP
|
||||
# ================================================================
|
||||
|
||||
DATA_RETENTION_DAYS: int = int(os.getenv("DATA_RETENTION_DAYS", "365"))
|
||||
LOG_RETENTION_DAYS: int = int(os.getenv("LOG_RETENTION_DAYS", "90"))
|
||||
METRIC_RETENTION_DAYS: int = int(os.getenv("METRIC_RETENTION_DAYS", "90"))
|
||||
TEMP_FILE_CLEANUP_HOURS: int = int(os.getenv("TEMP_FILE_CLEANUP_HOURS", "24"))
|
||||
|
||||
# ================================================================
|
||||
# BUSINESS RULES & CONSTRAINTS
|
||||
# ================================================================
|
||||
|
||||
# Forecasting Business Rules
|
||||
MAX_FORECAST_DAYS: int = int(os.getenv("MAX_FORECAST_DAYS", "30"))
|
||||
MIN_HISTORICAL_DAYS: int = int(os.getenv("MIN_HISTORICAL_DAYS", "60"))
|
||||
CONFIDENCE_THRESHOLD: float = float(os.getenv("CONFIDENCE_THRESHOLD", "0.8"))
|
||||
|
||||
# Spanish Business Context
|
||||
TIMEZONE: str = os.getenv("TIMEZONE", "Europe/Madrid")
|
||||
LOCALE: str = os.getenv("LOCALE", "es_ES.UTF-8")
|
||||
CURRENCY: str = os.getenv("CURRENCY", "EUR")
|
||||
|
||||
# Business Hours (24-hour format)
|
||||
BUSINESS_HOUR_START: int = int(os.getenv("BUSINESS_HOUR_START", "7"))
|
||||
BUSINESS_HOUR_END: int = int(os.getenv("BUSINESS_HOUR_END", "20"))
|
||||
|
||||
# Spanish Holidays & Seasonal Adjustments
|
||||
ENABLE_SPANISH_HOLIDAYS: bool = os.getenv("ENABLE_SPANISH_HOLIDAYS", "true").lower() == "true"
|
||||
ENABLE_MADRID_HOLIDAYS: bool = os.getenv("ENABLE_MADRID_HOLIDAYS", "true").lower() == "true"
|
||||
SCHOOL_CALENDAR_ENABLED: bool = os.getenv("SCHOOL_CALENDAR_ENABLED", "true").lower() == "true"
|
||||
|
||||
# ================================================================
|
||||
# PROCUREMENT AUTOMATION
|
||||
# ================================================================
|
||||
|
||||
# NOTE: Tenant-specific procurement settings (auto-approval thresholds, supplier scores,
|
||||
# approval rules, lead times, forecast days, etc.) have been moved to TenantSettings.
|
||||
# Services should fetch these using TenantSettingsClient from shared/utils/tenant_settings_client.py
|
||||
|
||||
# System-level procurement settings (apply to all tenants):
|
||||
AUTO_CREATE_POS_FROM_PLAN: bool = os.getenv("AUTO_CREATE_POS_FROM_PLAN", "true").lower() == "true"
|
||||
PROCUREMENT_TEST_MODE: bool = os.getenv("PROCUREMENT_TEST_MODE", "false").lower() == "true"
|
||||
SEND_AUTO_APPROVAL_SUMMARY: bool = os.getenv("SEND_AUTO_APPROVAL_SUMMARY", "true").lower() == "true"
|
||||
AUTO_APPROVAL_SUMMARY_TIME_HOUR: int = int(os.getenv("AUTO_APPROVAL_SUMMARY_TIME_HOUR", "18"))
|
||||
|
||||
# ================================================================
|
||||
# DEVELOPMENT & TESTING
|
||||
# ================================================================
|
||||
|
||||
# Testing Configuration
|
||||
TESTING: bool = os.getenv("TESTING", "false").lower() == "true"
|
||||
TEST_DATABASE_URL: str = os.getenv("TEST_DATABASE_URL", "")
|
||||
MOCK_EXTERNAL_APIS: bool = os.getenv("MOCK_EXTERNAL_APIS", "false").lower() == "true"
|
||||
|
||||
# Development Features
|
||||
AUTO_RELOAD: bool = os.getenv("AUTO_RELOAD", "false").lower() == "true"
|
||||
PROFILING_ENABLED: bool = os.getenv("PROFILING_ENABLED", "false").lower() == "true"
|
||||
|
||||
# ================================================================
|
||||
# VALIDATORS
|
||||
# ================================================================
|
||||
|
||||
@validator('JWT_SECRET_KEY')
|
||||
def validate_jwt_secret(cls, v):
|
||||
if v == "change-this-in-production" and os.getenv("ENVIRONMENT") == "production":
|
||||
raise ValueError("JWT_SECRET_KEY must be changed in production")
|
||||
if len(v) < 32:
|
||||
raise ValueError("JWT_SECRET_KEY must be at least 32 characters long")
|
||||
return v
|
||||
|
||||
@validator('LOG_LEVEL')
|
||||
def validate_log_level(cls, v):
|
||||
valid_levels = ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']
|
||||
if v.upper() not in valid_levels:
|
||||
raise ValueError(f"LOG_LEVEL must be one of: {valid_levels}")
|
||||
return v.upper()
|
||||
|
||||
@validator('ENVIRONMENT')
|
||||
def validate_environment(cls, v):
|
||||
valid_envs = ['development', 'staging', 'production', 'testing']
|
||||
if v.lower() not in valid_envs:
|
||||
raise ValueError(f"ENVIRONMENT must be one of: {valid_envs}")
|
||||
return v.lower()
|
||||
|
||||
# ================================================================
|
||||
# COMPUTED PROPERTIES
|
||||
# ================================================================
|
||||
|
||||
@property
|
||||
def IS_PRODUCTION(self) -> bool:
|
||||
"""Check if running in production"""
|
||||
return self.ENVIRONMENT == "production"
|
||||
|
||||
@property
|
||||
def IS_DEVELOPMENT(self) -> bool:
|
||||
"""Check if running in development"""
|
||||
return self.ENVIRONMENT == "development"
|
||||
|
||||
@property
|
||||
def IS_TESTING(self) -> bool:
|
||||
"""Check if running tests"""
|
||||
return self.TESTING or self.ENVIRONMENT == "testing"
|
||||
|
||||
@property
|
||||
def SERVICE_REGISTRY(self) -> Dict[str, str]:
|
||||
"""Get all service URLs"""
|
||||
return {
|
||||
"gateway": self.GATEWAY_URL,
|
||||
"auth": self.AUTH_SERVICE_URL,
|
||||
"training": self.TRAINING_SERVICE_URL,
|
||||
"forecasting": self.FORECASTING_SERVICE_URL,
|
||||
"sales": self.SALES_SERVICE_URL,
|
||||
"external": self.EXTERNAL_SERVICE_URL,
|
||||
"tenant": self.TENANT_SERVICE_URL,
|
||||
"inventory": self.INVENTORY_SERVICE_URL,
|
||||
"notification": self.NOTIFICATION_SERVICE_URL,
|
||||
"production": self.PRODUCTION_SERVICE_URL,
|
||||
"orders": self.ORDERS_SERVICE_URL,
|
||||
"suppliers": self.SUPPLIERS_SERVICE_URL,
|
||||
"recipes": self.RECIPES_SERVICE_URL,
|
||||
}
|
||||
|
||||
@property
|
||||
def DATABASE_CONFIG(self) -> Dict[str, Any]:
|
||||
"""Get database configuration for SQLAlchemy"""
|
||||
return {
|
||||
"url": self.DATABASE_URL,
|
||||
"pool_size": self.DB_POOL_SIZE,
|
||||
"max_overflow": self.DB_MAX_OVERFLOW,
|
||||
"pool_timeout": self.DB_POOL_TIMEOUT,
|
||||
"pool_recycle": self.DB_POOL_RECYCLE,
|
||||
"pool_pre_ping": self.DB_POOL_PRE_PING,
|
||||
"echo": self.DB_ECHO,
|
||||
}
|
||||
|
||||
@property
|
||||
def REDIS_CONFIG(self) -> Dict[str, Any]:
|
||||
"""Get Redis configuration"""
|
||||
return {
|
||||
"url": self.REDIS_URL_WITH_DB,
|
||||
"max_connections": self.REDIS_MAX_CONNECTIONS,
|
||||
"retry_on_timeout": self.REDIS_RETRY_ON_TIMEOUT,
|
||||
"socket_keepalive": self.REDIS_SOCKET_KEEPALIVE,
|
||||
"socket_keepalive_options": self.REDIS_SOCKET_KEEPALIVE_OPTIONS,
|
||||
}
|
||||
|
||||
# ================================================================
|
||||
# CONFIGURATION LOADING
|
||||
# ================================================================
|
||||
|
||||
class Config:
|
||||
env_file = ".env"
|
||||
env_file_encoding = 'utf-8'
|
||||
case_sensitive = True
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
|
||||
# Validate critical settings in production
|
||||
if self.IS_PRODUCTION:
|
||||
self._validate_production_settings()
|
||||
|
||||
def _validate_production_settings(self):
|
||||
"""Validate production-specific settings"""
|
||||
critical_settings = [
|
||||
'JWT_SECRET_KEY',
|
||||
'DATABASE_URL',
|
||||
'REDIS_URL',
|
||||
'RABBITMQ_URL'
|
||||
]
|
||||
|
||||
for setting in critical_settings:
|
||||
value = getattr(self, setting)
|
||||
if not value or 'change' in value.lower() or 'default' in value.lower():
|
||||
raise ValueError(f"{setting} must be properly configured for production")
|
||||
70
shared/config/environments.py
Executable file
70
shared/config/environments.py
Executable file
@@ -0,0 +1,70 @@
|
||||
# ================================================================
|
||||
# ENVIRONMENT-SPECIFIC CONFIGURATIONS
|
||||
# shared/config/environments.py
|
||||
# ================================================================
|
||||
|
||||
"""
|
||||
Environment-specific configuration overrides
|
||||
"""
|
||||
|
||||
from typing import Dict, Any
|
||||
|
||||
DEVELOPMENT_OVERRIDES: Dict[str, Any] = {
|
||||
"DEBUG": True,
|
||||
"LOG_LEVEL": "DEBUG",
|
||||
"DB_ECHO": True,
|
||||
"API_DOCS_ENABLED": True,
|
||||
"CORS_ORIGINS": "http://localhost:3000,http://localhost:3001,http://127.0.0.1:3000",
|
||||
"MOCK_EXTERNAL_APIS": True,
|
||||
"AUTO_RELOAD": True,
|
||||
}
|
||||
|
||||
STAGING_OVERRIDES: Dict[str, Any] = {
|
||||
"DEBUG": False,
|
||||
"LOG_LEVEL": "INFO",
|
||||
"DB_ECHO": False,
|
||||
"API_DOCS_ENABLED": True,
|
||||
"MOCK_EXTERNAL_APIS": False,
|
||||
"AUTO_RELOAD": False,
|
||||
}
|
||||
|
||||
PRODUCTION_OVERRIDES: Dict[str, Any] = {
|
||||
"DEBUG": False,
|
||||
"LOG_LEVEL": "WARNING",
|
||||
"DB_ECHO": False,
|
||||
"API_DOCS_ENABLED": False,
|
||||
"MOCK_EXTERNAL_APIS": False,
|
||||
"AUTO_RELOAD": False,
|
||||
"PROFILING_ENABLED": False,
|
||||
"RATE_LIMIT_ENABLED": True,
|
||||
}
|
||||
|
||||
TESTING_OVERRIDES: Dict[str, Any] = {
|
||||
"TESTING": True,
|
||||
"DEBUG": True,
|
||||
"LOG_LEVEL": "DEBUG",
|
||||
"DATABASE_URL": "postgresql+asyncpg://test_user:test_pass@test-db:5432/test_db",
|
||||
"REDIS_URL": "redis://test-redis:6379",
|
||||
"MOCK_EXTERNAL_APIS": True,
|
||||
"EMAIL_VERIFICATION_REQUIRED": False,
|
||||
"RATE_LIMIT_ENABLED": False,
|
||||
}
|
||||
|
||||
def get_environment_overrides(environment: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Get configuration overrides for specific environment
|
||||
|
||||
Args:
|
||||
environment: Environment name (development, staging, production, testing)
|
||||
|
||||
Returns:
|
||||
Dict: Configuration overrides
|
||||
"""
|
||||
overrides = {
|
||||
"development": DEVELOPMENT_OVERRIDES,
|
||||
"staging": STAGING_OVERRIDES,
|
||||
"production": PRODUCTION_OVERRIDES,
|
||||
"testing": TESTING_OVERRIDES,
|
||||
}
|
||||
|
||||
return overrides.get(environment.lower(), {})
|
||||
49
shared/config/feature_flags.py
Executable file
49
shared/config/feature_flags.py
Executable file
@@ -0,0 +1,49 @@
|
||||
"""
|
||||
Feature flags for enterprise tier functionality
|
||||
"""
|
||||
|
||||
import os
|
||||
from typing import Dict, Any
|
||||
|
||||
|
||||
class FeatureFlags:
|
||||
"""Enterprise feature flags configuration"""
|
||||
|
||||
# Main enterprise tier feature flag
|
||||
ENABLE_ENTERPRISE_TIER = os.getenv("ENABLE_ENTERPRISE_TIER", "true").lower() == "true"
|
||||
|
||||
# Internal transfer feature flag
|
||||
ENABLE_INTERNAL_TRANSFERS = os.getenv("ENABLE_INTERNAL_TRANSFERS", "true").lower() == "true"
|
||||
|
||||
# Distribution service feature flag
|
||||
ENABLE_DISTRIBUTION_SERVICE = os.getenv("ENABLE_DISTRIBUTION_SERVICE", "true").lower() == "true"
|
||||
|
||||
# Network dashboard feature flag
|
||||
ENABLE_NETWORK_DASHBOARD = os.getenv("ENABLE_NETWORK_DASHBOARD", "true").lower() == "true"
|
||||
|
||||
# Child tenant management feature flag
|
||||
ENABLE_CHILD_TENANT_MANAGEMENT = os.getenv("ENABLE_CHILD_TENANT_MANAGEMENT", "true").lower() == "true"
|
||||
|
||||
# Aggregated forecasting feature flag
|
||||
ENABLE_AGGREGATED_FORECASTING = os.getenv("ENABLE_AGGREGATED_FORECASTING", "true").lower() == "true"
|
||||
|
||||
@classmethod
|
||||
def get_all_flags(cls) -> Dict[str, Any]:
|
||||
"""Get all feature flags as a dictionary"""
|
||||
return {
|
||||
'ENABLE_ENTERPRISE_TIER': cls.ENABLE_ENTERPRISE_TIER,
|
||||
'ENABLE_INTERNAL_TRANSFERS': cls.ENABLE_INTERNAL_TRANSFERS,
|
||||
'ENABLE_DISTRIBUTION_SERVICE': cls.ENABLE_DISTRIBUTION_SERVICE,
|
||||
'ENABLE_NETWORK_DASHBOARD': cls.ENABLE_NETWORK_DASHBOARD,
|
||||
'ENABLE_CHILD_TENANT_MANAGEMENT': cls.ENABLE_CHILD_TENANT_MANAGEMENT,
|
||||
'ENABLE_AGGREGATED_FORECASTING': cls.ENABLE_AGGREGATED_FORECASTING,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def is_enabled(cls, flag_name: str) -> bool:
|
||||
"""Check if a specific feature flag is enabled"""
|
||||
return getattr(cls, flag_name, False)
|
||||
|
||||
|
||||
# Export the feature flags
|
||||
__all__ = ["FeatureFlags"]
|
||||
216
shared/config/rabbitmq_config.py
Executable file
216
shared/config/rabbitmq_config.py
Executable file
@@ -0,0 +1,216 @@
|
||||
# shared/config/rabbitmq_config.py
|
||||
"""
|
||||
RabbitMQ configuration for the event system
|
||||
|
||||
Supports three event classes through a unified topic exchange:
|
||||
- ALERT: Actionable events requiring user decision
|
||||
- NOTIFICATION: Informational state changes
|
||||
- RECOMMENDATION: AI-generated suggestions
|
||||
|
||||
Routing key pattern: {event_class}.{event_domain}.{severity}
|
||||
Examples:
|
||||
- alert.inventory.urgent
|
||||
- notification.production.info
|
||||
- recommendation.demand.medium
|
||||
"""
|
||||
|
||||
RABBITMQ_CONFIG = {
|
||||
"exchanges": {
|
||||
"events": {
|
||||
"name": "events.exchange",
|
||||
"type": "topic",
|
||||
"durable": True,
|
||||
"auto_delete": False
|
||||
},
|
||||
# Legacy exchange for backward compatibility during migration
|
||||
"alerts": {
|
||||
"name": "alerts.exchange",
|
||||
"type": "topic",
|
||||
"durable": True,
|
||||
"auto_delete": False
|
||||
},
|
||||
"dead_letter": {
|
||||
"name": "dlx.exchange",
|
||||
"type": "direct",
|
||||
"durable": True,
|
||||
"auto_delete": False
|
||||
}
|
||||
},
|
||||
"queues": {
|
||||
"event_processing": {
|
||||
"name": "event.processing.queue",
|
||||
"durable": True,
|
||||
"arguments": {
|
||||
"x-message-ttl": 3600000, # 1 hour TTL
|
||||
"x-max-length": 10000, # Max 10k messages
|
||||
"x-overflow": "reject-publish",
|
||||
"x-dead-letter-exchange": "dlx.exchange",
|
||||
"x-dead-letter-routing-key": "failed.events"
|
||||
}
|
||||
},
|
||||
# Legacy queue for backward compatibility
|
||||
"alert_processing": {
|
||||
"name": "alert.processing.queue",
|
||||
"durable": True,
|
||||
"arguments": {
|
||||
"x-message-ttl": 3600000,
|
||||
"x-max-length": 10000,
|
||||
"x-overflow": "reject-publish",
|
||||
"x-dead-letter-exchange": "dlx.exchange",
|
||||
"x-dead-letter-routing-key": "failed.items"
|
||||
}
|
||||
},
|
||||
"dead_letter": {
|
||||
"name": "event.dead_letter.queue",
|
||||
"durable": True,
|
||||
"arguments": {
|
||||
"x-message-ttl": 86400000 # 24 hours for dead letters
|
||||
}
|
||||
}
|
||||
},
|
||||
"bindings": [
|
||||
# New event architecture bindings
|
||||
{
|
||||
"queue": "event.processing.queue",
|
||||
"exchange": "events.exchange",
|
||||
"routing_key": "*.*.*" # event_class.event_domain.severity
|
||||
},
|
||||
# Legacy bindings for backward compatibility
|
||||
{
|
||||
"queue": "alert.processing.queue",
|
||||
"exchange": "alerts.exchange",
|
||||
"routing_key": "*.*.*" # alert/recommendation.severity.service
|
||||
},
|
||||
{
|
||||
"queue": "event.dead_letter.queue",
|
||||
"exchange": "dlx.exchange",
|
||||
"routing_key": "failed.events"
|
||||
},
|
||||
{
|
||||
"queue": "event.dead_letter.queue",
|
||||
"exchange": "dlx.exchange",
|
||||
"routing_key": "failed.items" # Legacy
|
||||
}
|
||||
],
|
||||
"routing_patterns": {
|
||||
# New event architecture patterns
|
||||
# event_class.event_domain.severity
|
||||
"alert_inventory": "alert.inventory.*",
|
||||
"alert_production": "alert.production.*",
|
||||
"alert_supply_chain": "alert.supply_chain.*",
|
||||
"notification_inventory": "notification.inventory.*",
|
||||
"notification_production": "notification.production.*",
|
||||
"notification_operations": "notification.operations.*",
|
||||
"recommendation_all": "recommendation.*.*",
|
||||
|
||||
# By severity
|
||||
"all_urgent": "*.*.urgent",
|
||||
"all_high": "*.*.high",
|
||||
"all_medium": "*.*.medium",
|
||||
"all_low": "*.*.low",
|
||||
|
||||
# By event class
|
||||
"all_alerts": "alert.*.*",
|
||||
"all_notifications": "notification.*.*",
|
||||
"all_recommendations": "recommendation.*.*",
|
||||
|
||||
# By domain
|
||||
"inventory_all": "*.inventory.*",
|
||||
"production_all": "*.production.*",
|
||||
"supply_chain_all": "*.supply_chain.*",
|
||||
"demand_all": "*.demand.*",
|
||||
"operations_all": "*.operations.*",
|
||||
|
||||
# Legacy patterns (for backward compatibility)
|
||||
"legacy_alert": "alert.{severity}.{service}",
|
||||
"legacy_recommendation": "recommendation.{severity}.{service}",
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def get_routing_key(event_class: str, event_domain: str, severity: str) -> str:
|
||||
"""
|
||||
Generate routing key for event publishing.
|
||||
|
||||
New pattern: {event_class}.{event_domain}.{severity}
|
||||
|
||||
Args:
|
||||
event_class: 'alert', 'notification', or 'recommendation'
|
||||
event_domain: 'inventory', 'production', 'supply_chain', 'demand', 'operations'
|
||||
severity: 'urgent', 'high', 'medium', 'low'
|
||||
|
||||
Returns:
|
||||
Routing key string
|
||||
|
||||
Examples:
|
||||
>>> get_routing_key('alert', 'inventory', 'urgent')
|
||||
'alert.inventory.urgent'
|
||||
>>> get_routing_key('notification', 'production', 'info')
|
||||
'notification.production.info'
|
||||
"""
|
||||
return f"{event_class}.{event_domain}.{severity}"
|
||||
|
||||
|
||||
def get_legacy_routing_key(item_type: str, severity: str, service: str) -> str:
|
||||
"""
|
||||
Generate legacy routing key for backward compatibility.
|
||||
|
||||
Legacy pattern: {item_type}.{severity}.{service}
|
||||
|
||||
TODO: Remove after migration is complete.
|
||||
"""
|
||||
return f"{item_type}.{severity}.{service}"
|
||||
|
||||
|
||||
def get_binding_patterns(
|
||||
event_classes: list = None,
|
||||
event_domains: list = None,
|
||||
severities: list = None
|
||||
) -> list:
|
||||
"""
|
||||
Generate binding patterns for selective consumption.
|
||||
|
||||
Args:
|
||||
event_classes: List of event classes to bind (default: all)
|
||||
event_domains: List of event domains to bind (default: all)
|
||||
severities: List of severities to bind (default: all)
|
||||
|
||||
Returns:
|
||||
List of routing key patterns
|
||||
|
||||
Examples:
|
||||
>>> get_binding_patterns(['alert'], ['inventory'], ['urgent', 'high'])
|
||||
['alert.inventory.urgent', 'alert.inventory.high']
|
||||
"""
|
||||
patterns = []
|
||||
|
||||
event_classes = event_classes or ["alert", "notification", "recommendation"]
|
||||
event_domains = event_domains or ["inventory", "production", "supply_chain", "demand", "operations"]
|
||||
severities = severities or ["urgent", "high", "medium", "low"]
|
||||
|
||||
for event_class in event_classes:
|
||||
for event_domain in event_domains:
|
||||
for severity in severities:
|
||||
patterns.append(f"{event_class}.{event_domain}.{severity}")
|
||||
|
||||
return patterns
|
||||
|
||||
|
||||
def priority_score_to_severity(priority_score: int) -> str:
|
||||
"""
|
||||
Convert priority score (0-100) to severity level.
|
||||
|
||||
Args:
|
||||
priority_score: Priority score (0-100)
|
||||
|
||||
Returns:
|
||||
Severity level: 'urgent', 'high', 'medium', or 'low'
|
||||
"""
|
||||
if priority_score >= 90:
|
||||
return "urgent"
|
||||
elif priority_score >= 70:
|
||||
return "high"
|
||||
elif priority_score >= 50:
|
||||
return "medium"
|
||||
else:
|
||||
return "low"
|
||||
83
shared/config/utils.py
Executable file
83
shared/config/utils.py
Executable file
@@ -0,0 +1,83 @@
|
||||
# ================================================================
|
||||
# SHARED CONFIGURATION UTILITIES
|
||||
# shared/config/utils.py
|
||||
# ================================================================
|
||||
|
||||
"""
|
||||
Configuration utilities and helpers
|
||||
"""
|
||||
|
||||
from typing import Dict, Any, Type
|
||||
from shared.config.base import BaseServiceSettings
|
||||
|
||||
# Service settings registry
|
||||
SERVICE_SETTINGS: Dict[str, Type[BaseServiceSettings]] = {
|
||||
"gateway": GatewaySettings,
|
||||
"auth-service": AuthSettings,
|
||||
"training-service": TrainingSettings,
|
||||
"forecasting-service": ForecastingSettings,
|
||||
"data-service": DataSettings,
|
||||
"tenant-service": TenantSettings,
|
||||
"notification-service": NotificationSettings,
|
||||
}
|
||||
|
||||
def get_settings_for_service(service_name: str) -> BaseServiceSettings:
|
||||
"""
|
||||
Get settings instance for a specific service
|
||||
|
||||
Args:
|
||||
service_name: Name of the service
|
||||
|
||||
Returns:
|
||||
BaseServiceSettings: Configured settings instance
|
||||
|
||||
Raises:
|
||||
ValueError: If service name is not recognized
|
||||
"""
|
||||
if service_name not in SERVICE_SETTINGS:
|
||||
raise ValueError(f"Unknown service: {service_name}. Available: {list(SERVICE_SETTINGS.keys())}")
|
||||
|
||||
settings_class = SERVICE_SETTINGS[service_name]
|
||||
return settings_class()
|
||||
|
||||
def validate_all_service_configs() -> Dict[str, Any]:
|
||||
"""
|
||||
Validate configuration for all services
|
||||
|
||||
Returns:
|
||||
Dict: Validation results for each service
|
||||
"""
|
||||
results = {}
|
||||
|
||||
for service_name, settings_class in SERVICE_SETTINGS.items():
|
||||
try:
|
||||
settings = settings_class()
|
||||
results[service_name] = {
|
||||
"status": "valid",
|
||||
"config": {
|
||||
"app_name": settings.APP_NAME,
|
||||
"version": settings.VERSION,
|
||||
"environment": settings.ENVIRONMENT,
|
||||
"database_configured": bool(settings.DATABASE_URL),
|
||||
"redis_configured": bool(settings.REDIS_URL),
|
||||
"debug_mode": settings.DEBUG,
|
||||
}
|
||||
}
|
||||
except Exception as e:
|
||||
results[service_name] = {
|
||||
"status": "error",
|
||||
"error": str(e)
|
||||
}
|
||||
|
||||
return results
|
||||
|
||||
def get_service_urls() -> Dict[str, str]:
|
||||
"""
|
||||
Get all service URLs from any service configuration
|
||||
|
||||
Returns:
|
||||
Dict: Service name to URL mapping
|
||||
"""
|
||||
# Use auth service settings as reference (all services have same URLs)
|
||||
settings = AuthSettings()
|
||||
return settings.SERVICE_REGISTRY
|
||||
68
shared/database/__init__.py
Executable file
68
shared/database/__init__.py
Executable file
@@ -0,0 +1,68 @@
|
||||
"""
|
||||
Shared Database Infrastructure
|
||||
Provides consistent database patterns across all microservices
|
||||
"""
|
||||
|
||||
from .base import DatabaseManager, Base, create_database_manager
|
||||
from .repository import BaseRepository
|
||||
from .unit_of_work import UnitOfWork, ServiceUnitOfWork, RepositoryRegistry
|
||||
from .transactions import (
|
||||
transactional,
|
||||
unit_of_work_transactional,
|
||||
managed_transaction,
|
||||
managed_unit_of_work,
|
||||
TransactionManager,
|
||||
run_in_transaction,
|
||||
run_with_unit_of_work
|
||||
)
|
||||
from .exceptions import (
|
||||
DatabaseError,
|
||||
ConnectionError,
|
||||
RecordNotFoundError,
|
||||
DuplicateRecordError,
|
||||
ConstraintViolationError,
|
||||
TransactionError,
|
||||
ValidationError,
|
||||
MigrationError,
|
||||
HealthCheckError
|
||||
)
|
||||
from .utils import DatabaseUtils, QueryLogger
|
||||
|
||||
__all__ = [
|
||||
# Core components
|
||||
"DatabaseManager",
|
||||
"Base",
|
||||
"create_database_manager",
|
||||
|
||||
# Repository pattern
|
||||
"BaseRepository",
|
||||
|
||||
# Unit of Work pattern
|
||||
"UnitOfWork",
|
||||
"ServiceUnitOfWork",
|
||||
"RepositoryRegistry",
|
||||
|
||||
# Transaction management
|
||||
"transactional",
|
||||
"unit_of_work_transactional",
|
||||
"managed_transaction",
|
||||
"managed_unit_of_work",
|
||||
"TransactionManager",
|
||||
"run_in_transaction",
|
||||
"run_with_unit_of_work",
|
||||
|
||||
# Exceptions
|
||||
"DatabaseError",
|
||||
"ConnectionError",
|
||||
"RecordNotFoundError",
|
||||
"DuplicateRecordError",
|
||||
"ConstraintViolationError",
|
||||
"TransactionError",
|
||||
"ValidationError",
|
||||
"MigrationError",
|
||||
"HealthCheckError",
|
||||
|
||||
# Utilities
|
||||
"DatabaseUtils",
|
||||
"QueryLogger"
|
||||
]
|
||||
408
shared/database/base.py
Executable file
408
shared/database/base.py
Executable file
@@ -0,0 +1,408 @@
|
||||
"""
|
||||
Enhanced Base Database Configuration for All Microservices
|
||||
Provides DatabaseManager with connection pooling, health checks, and multi-database support
|
||||
|
||||
Fixed: SSL configuration now uses connect_args instead of URL parameters to avoid asyncpg parameter parsing issues
|
||||
"""
|
||||
|
||||
import os
|
||||
import ssl
|
||||
from typing import Optional, Dict, Any, List
|
||||
from sqlalchemy import create_engine, text
|
||||
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine, async_sessionmaker
|
||||
from sqlalchemy.orm import sessionmaker, declarative_base
|
||||
from sqlalchemy.pool import StaticPool
|
||||
from contextlib import asynccontextmanager
|
||||
import structlog
|
||||
import time
|
||||
|
||||
from .exceptions import DatabaseError, ConnectionError, HealthCheckError
|
||||
from .utils import DatabaseUtils
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
class DatabaseManager:
|
||||
"""Enhanced Database Manager for Microservices
|
||||
|
||||
Provides:
|
||||
- Connection pooling with configurable settings
|
||||
- Health checks and monitoring
|
||||
- Multi-database support
|
||||
- Session lifecycle management
|
||||
- Background task session support
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
database_url: str,
|
||||
service_name: str = "unknown",
|
||||
pool_size: int = 20,
|
||||
max_overflow: int = 30,
|
||||
pool_recycle: int = 3600,
|
||||
pool_pre_ping: bool = True,
|
||||
echo: bool = False,
|
||||
connect_timeout: int = 30,
|
||||
**engine_kwargs
|
||||
):
|
||||
self.database_url = database_url
|
||||
|
||||
# Configure SSL for PostgreSQL via connect_args instead of URL parameters
|
||||
# This avoids asyncpg parameter parsing issues
|
||||
self.use_ssl = False
|
||||
if "postgresql" in database_url.lower():
|
||||
# Check if SSL is already configured in URL or should be enabled
|
||||
if "ssl" not in database_url.lower() and "sslmode" not in database_url.lower():
|
||||
# Enable SSL for production, but allow override via URL
|
||||
self.use_ssl = True
|
||||
logger.info(f"SSL will be enabled for PostgreSQL connection: {service_name}")
|
||||
self.service_name = service_name
|
||||
self.pool_size = pool_size
|
||||
self.max_overflow = max_overflow
|
||||
|
||||
# Configure pool for async engines
|
||||
# Note: SQLAlchemy 2.0 async engines automatically use AsyncAdaptedQueuePool
|
||||
# We should NOT specify poolclass for async engines unless using StaticPool for SQLite
|
||||
|
||||
# Prepare connect_args for asyncpg
|
||||
connect_args = {"timeout": connect_timeout}
|
||||
|
||||
# Add SSL configuration if needed (for asyncpg driver)
|
||||
if self.use_ssl and "asyncpg" in database_url.lower():
|
||||
# Create SSL context that doesn't verify certificates (for local development)
|
||||
# In production, you should use a proper SSL context with certificate verification
|
||||
ssl_context = ssl.create_default_context()
|
||||
ssl_context.check_hostname = False
|
||||
ssl_context.verify_mode = ssl.CERT_NONE
|
||||
connect_args["ssl"] = ssl_context
|
||||
logger.info(f"SSL enabled with relaxed verification for {service_name}")
|
||||
|
||||
engine_config = {
|
||||
"echo": echo,
|
||||
"pool_pre_ping": pool_pre_ping,
|
||||
"pool_recycle": pool_recycle,
|
||||
"pool_size": pool_size,
|
||||
"max_overflow": max_overflow,
|
||||
"connect_args": connect_args,
|
||||
**engine_kwargs
|
||||
}
|
||||
|
||||
# Only set poolclass for SQLite (requires StaticPool for async)
|
||||
if "sqlite" in database_url.lower():
|
||||
engine_config["poolclass"] = StaticPool
|
||||
engine_config["pool_size"] = 1
|
||||
engine_config["max_overflow"] = 0
|
||||
|
||||
self.async_engine = create_async_engine(database_url, **engine_config)
|
||||
|
||||
# Create session factory
|
||||
self.async_session_local = async_sessionmaker(
|
||||
self.async_engine,
|
||||
class_=AsyncSession,
|
||||
expire_on_commit=False,
|
||||
autoflush=False,
|
||||
autocommit=False
|
||||
)
|
||||
|
||||
logger.info(f"DatabaseManager initialized for {service_name}",
|
||||
pool_size=pool_size,
|
||||
max_overflow=max_overflow,
|
||||
database_type=self._get_database_type())
|
||||
|
||||
async def get_db(self):
|
||||
"""Get database session for request handlers (FastAPI dependency)"""
|
||||
async with self.async_session_local() as session:
|
||||
try:
|
||||
logger.debug("Database session created for request")
|
||||
yield session
|
||||
except Exception as e:
|
||||
await session.rollback()
|
||||
|
||||
# Don't wrap HTTPExceptions - let them pass through
|
||||
# Check by type name to avoid import dependencies
|
||||
exception_type = type(e).__name__
|
||||
if exception_type in ('HTTPException', 'StarletteHTTPException', 'RequestValidationError', 'ValidationError'):
|
||||
logger.debug(f"Re-raising {exception_type}: {e}", service=self.service_name)
|
||||
raise
|
||||
|
||||
error_msg = str(e) if str(e) else f"{type(e).__name__}: {repr(e)}"
|
||||
logger.error(f"Database session error: {error_msg}", service=self.service_name)
|
||||
|
||||
# Handle specific ASGI stream issues more gracefully
|
||||
if "EndOfStream" in str(type(e)) or "WouldBlock" in str(type(e)):
|
||||
raise DatabaseError(f"Session error: Request stream disconnected ({type(e).__name__})")
|
||||
else:
|
||||
raise DatabaseError(f"Session error: {error_msg}")
|
||||
finally:
|
||||
await session.close()
|
||||
logger.debug("Database session closed")
|
||||
|
||||
@asynccontextmanager
|
||||
async def get_background_session(self):
|
||||
"""
|
||||
Get database session for background tasks with auto-commit
|
||||
|
||||
Usage:
|
||||
async with database_manager.get_background_session() as session:
|
||||
# Your background task code here
|
||||
# Auto-commits on success, rolls back on exception
|
||||
"""
|
||||
async with self.async_session_local() as session:
|
||||
try:
|
||||
logger.debug("Background session created", service=self.service_name)
|
||||
yield session
|
||||
await session.commit()
|
||||
logger.debug("Background session committed")
|
||||
except Exception as e:
|
||||
await session.rollback()
|
||||
logger.error(f"Background task database error: {e}",
|
||||
service=self.service_name)
|
||||
raise DatabaseError(f"Background task failed: {str(e)}")
|
||||
finally:
|
||||
await session.close()
|
||||
logger.debug("Background session closed")
|
||||
|
||||
@asynccontextmanager
|
||||
async def get_session(self):
|
||||
"""Get a plain database session (no auto-commit)"""
|
||||
async with self.async_session_local() as session:
|
||||
try:
|
||||
yield session
|
||||
except Exception as e:
|
||||
await session.rollback()
|
||||
|
||||
# Don't wrap HTTPExceptions - let them pass through
|
||||
exception_type = type(e).__name__
|
||||
if exception_type in ('HTTPException', 'StarletteHTTPException'):
|
||||
logger.debug(f"Re-raising HTTPException: {e}", service=self.service_name)
|
||||
raise
|
||||
|
||||
logger.error(f"Session error: {e}", service=self.service_name)
|
||||
raise DatabaseError(f"Session error: {str(e)}")
|
||||
finally:
|
||||
await session.close()
|
||||
|
||||
# ===== TABLE MANAGEMENT =====
|
||||
|
||||
async def create_tables(self, metadata=None):
|
||||
"""Create database tables with enhanced error handling and transaction verification"""
|
||||
try:
|
||||
target_metadata = metadata or Base.metadata
|
||||
table_names = list(target_metadata.tables.keys())
|
||||
logger.info(f"Creating tables: {table_names}", service=self.service_name)
|
||||
|
||||
# Use explicit transaction with proper error handling
|
||||
async with self.async_engine.begin() as conn:
|
||||
try:
|
||||
# Create tables within the transaction
|
||||
await conn.run_sync(target_metadata.create_all, checkfirst=True)
|
||||
|
||||
# Verify transaction is not in error state
|
||||
# Try a simple query to ensure connection is still valid
|
||||
await conn.execute(text("SELECT 1"))
|
||||
|
||||
logger.info("Database tables creation transaction completed successfully",
|
||||
service=self.service_name, tables=table_names)
|
||||
|
||||
except Exception as create_error:
|
||||
logger.error(f"Error during table creation within transaction: {create_error}",
|
||||
service=self.service_name)
|
||||
# Re-raise to trigger transaction rollback
|
||||
raise
|
||||
|
||||
logger.info("Database tables created successfully", service=self.service_name)
|
||||
|
||||
except Exception as e:
|
||||
# Check if it's a "relation already exists" error which can be safely ignored
|
||||
error_str = str(e).lower()
|
||||
if "already exists" in error_str or "duplicate" in error_str:
|
||||
logger.warning(f"Some database objects already exist - continuing: {e}", service=self.service_name)
|
||||
logger.info("Database tables creation completed (some already existed)", service=self.service_name)
|
||||
else:
|
||||
logger.error(f"Failed to create tables: {e}", service=self.service_name)
|
||||
|
||||
# Check for specific transaction error indicators
|
||||
if any(indicator in error_str for indicator in [
|
||||
"transaction", "rollback", "aborted", "failed sql transaction"
|
||||
]):
|
||||
logger.error("Transaction-related error detected during table creation",
|
||||
service=self.service_name)
|
||||
|
||||
raise DatabaseError(f"Table creation failed: {str(e)}")
|
||||
|
||||
async def drop_tables(self, metadata=None):
|
||||
"""Drop database tables"""
|
||||
try:
|
||||
target_metadata = metadata or Base.metadata
|
||||
async with self.async_engine.begin() as conn:
|
||||
await conn.run_sync(target_metadata.drop_all)
|
||||
logger.info("Database tables dropped successfully", service=self.service_name)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to drop tables: {e}", service=self.service_name)
|
||||
raise DatabaseError(f"Table drop failed: {str(e)}")
|
||||
|
||||
# ===== HEALTH CHECKS AND MONITORING =====
|
||||
|
||||
async def health_check(self) -> Dict[str, Any]:
|
||||
"""Comprehensive health check for the database"""
|
||||
try:
|
||||
async with self.get_session() as session:
|
||||
return await DatabaseUtils.execute_health_check(session)
|
||||
except Exception as e:
|
||||
logger.error(f"Health check failed: {e}", service=self.service_name)
|
||||
raise HealthCheckError(f"Health check failed: {str(e)}")
|
||||
|
||||
async def get_connection_info(self) -> Dict[str, Any]:
|
||||
"""Get database connection information"""
|
||||
try:
|
||||
pool = self.async_engine.pool
|
||||
return {
|
||||
"service_name": self.service_name,
|
||||
"database_type": self._get_database_type(),
|
||||
"pool_size": self.pool_size,
|
||||
"max_overflow": self.max_overflow,
|
||||
"current_checked_in": pool.checkedin() if pool else 0,
|
||||
"current_checked_out": pool.checkedout() if pool else 0,
|
||||
"current_overflow": pool.overflow() if pool else 0,
|
||||
"invalid_connections": getattr(pool, 'invalid', lambda: 0)() if pool else 0
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get connection info: {e}", service=self.service_name)
|
||||
return {"error": str(e)}
|
||||
|
||||
def _get_database_type(self) -> str:
|
||||
"""Get database type from URL"""
|
||||
return self.database_url.split("://")[0].lower() if "://" in self.database_url else "unknown"
|
||||
|
||||
# ===== CLEANUP AND MAINTENANCE =====
|
||||
|
||||
async def close_connections(self):
|
||||
"""Close all database connections"""
|
||||
try:
|
||||
await self.async_engine.dispose()
|
||||
logger.info("Database connections closed", service=self.service_name)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to close connections: {e}", service=self.service_name)
|
||||
raise DatabaseError(f"Connection cleanup failed: {str(e)}")
|
||||
|
||||
async def execute_maintenance(self) -> Dict[str, Any]:
|
||||
"""Execute database maintenance tasks"""
|
||||
try:
|
||||
async with self.get_session() as session:
|
||||
return await DatabaseUtils.execute_maintenance(session)
|
||||
except Exception as e:
|
||||
logger.error(f"Maintenance failed: {e}", service=self.service_name)
|
||||
raise DatabaseError(f"Maintenance failed: {str(e)}")
|
||||
|
||||
# ===== UTILITY METHODS =====
|
||||
|
||||
async def test_connection(self) -> bool:
|
||||
"""Test database connectivity"""
|
||||
try:
|
||||
async with self.async_engine.begin() as conn:
|
||||
await conn.execute(text("SELECT 1"))
|
||||
logger.debug("Connection test successful", service=self.service_name)
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Connection test failed: {e}", service=self.service_name)
|
||||
return False
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"DatabaseManager(service='{self.service_name}', type='{self._get_database_type()}')"
|
||||
|
||||
async def execute(self, query: str, *args, **kwargs):
|
||||
"""
|
||||
Execute a raw SQL query with proper session management
|
||||
Note: Use this method carefully to avoid transaction conflicts
|
||||
"""
|
||||
from sqlalchemy import text
|
||||
|
||||
# Use a new session context to avoid conflicts with existing sessions
|
||||
async with self.get_session() as session:
|
||||
try:
|
||||
# Convert query to SQLAlchemy text object if it's a string
|
||||
if isinstance(query, str):
|
||||
query = text(query)
|
||||
|
||||
result = await session.execute(query, *args, **kwargs)
|
||||
# For UPDATE/DELETE operations that need to be committed
|
||||
if query.text.strip().upper().startswith(('UPDATE', 'DELETE', 'INSERT')):
|
||||
await session.commit()
|
||||
|
||||
return result
|
||||
except Exception as e:
|
||||
# Only rollback if it was a modifying operation
|
||||
if isinstance(query, str) and query.strip().upper().startswith(('UPDATE', 'DELETE', 'INSERT')):
|
||||
await session.rollback()
|
||||
logger.error("Database execute failed", error=str(e))
|
||||
raise
|
||||
|
||||
|
||||
# ===== CONVENIENCE FUNCTIONS =====
|
||||
|
||||
# ===== CONVENIENCE FUNCTIONS =====
|
||||
|
||||
def create_database_manager(
|
||||
database_url: str,
|
||||
service_name: str,
|
||||
**kwargs
|
||||
) -> DatabaseManager:
|
||||
"""Factory function to create DatabaseManager instances"""
|
||||
return DatabaseManager(database_url, service_name, **kwargs)
|
||||
|
||||
|
||||
# ===== LEGACY COMPATIBILITY =====
|
||||
|
||||
# Keep backward compatibility for existing code
|
||||
engine = None
|
||||
AsyncSessionLocal = None
|
||||
|
||||
def init_legacy_compatibility(database_url: str):
|
||||
"""Initialize legacy global variables for backward compatibility"""
|
||||
global engine, AsyncSessionLocal
|
||||
|
||||
# Configure SSL for PostgreSQL if needed
|
||||
connect_args = {}
|
||||
if "postgresql" in database_url.lower() and "asyncpg" in database_url.lower():
|
||||
if "ssl" not in database_url.lower() and "sslmode" not in database_url.lower():
|
||||
# Create SSL context that doesn't verify certificates (for local development)
|
||||
ssl_context = ssl.create_default_context()
|
||||
ssl_context.check_hostname = False
|
||||
ssl_context.verify_mode = ssl.CERT_NONE
|
||||
connect_args["ssl"] = ssl_context
|
||||
logger.info("SSL enabled with relaxed verification for legacy database connection")
|
||||
|
||||
engine = create_async_engine(
|
||||
database_url,
|
||||
echo=False,
|
||||
pool_pre_ping=True,
|
||||
pool_recycle=300,
|
||||
pool_size=20,
|
||||
max_overflow=30,
|
||||
connect_args=connect_args
|
||||
)
|
||||
|
||||
AsyncSessionLocal = async_sessionmaker(
|
||||
engine,
|
||||
class_=AsyncSession,
|
||||
expire_on_commit=False
|
||||
)
|
||||
|
||||
logger.warning("Using legacy database configuration - consider migrating to DatabaseManager")
|
||||
|
||||
|
||||
async def get_legacy_db():
|
||||
"""Legacy database session getter for backward compatibility"""
|
||||
if not AsyncSessionLocal:
|
||||
raise RuntimeError("Legacy database not initialized - call init_legacy_compatibility first")
|
||||
|
||||
async with AsyncSessionLocal() as session:
|
||||
try:
|
||||
yield session
|
||||
except Exception as e:
|
||||
logger.error(f"Legacy database session error: {e}")
|
||||
await session.rollback()
|
||||
raise
|
||||
finally:
|
||||
await session.close()
|
||||
52
shared/database/exceptions.py
Executable file
52
shared/database/exceptions.py
Executable file
@@ -0,0 +1,52 @@
|
||||
"""
|
||||
Custom Database Exceptions
|
||||
Provides consistent error handling across all microservices
|
||||
"""
|
||||
|
||||
class DatabaseError(Exception):
|
||||
"""Base exception for database-related errors"""
|
||||
|
||||
def __init__(self, message: str, details: dict = None):
|
||||
self.message = message
|
||||
self.details = details or {}
|
||||
super().__init__(self.message)
|
||||
|
||||
|
||||
class ConnectionError(DatabaseError):
|
||||
"""Raised when database connection fails"""
|
||||
pass
|
||||
|
||||
|
||||
class RecordNotFoundError(DatabaseError):
|
||||
"""Raised when a requested record is not found"""
|
||||
pass
|
||||
|
||||
|
||||
class DuplicateRecordError(DatabaseError):
|
||||
"""Raised when trying to create a duplicate record"""
|
||||
pass
|
||||
|
||||
|
||||
class ConstraintViolationError(DatabaseError):
|
||||
"""Raised when database constraints are violated"""
|
||||
pass
|
||||
|
||||
|
||||
class TransactionError(DatabaseError):
|
||||
"""Raised when transaction operations fail"""
|
||||
pass
|
||||
|
||||
|
||||
class ValidationError(DatabaseError):
|
||||
"""Raised when data validation fails before database operations"""
|
||||
pass
|
||||
|
||||
|
||||
class MigrationError(DatabaseError):
|
||||
"""Raised when database migration operations fail"""
|
||||
pass
|
||||
|
||||
|
||||
class HealthCheckError(DatabaseError):
|
||||
"""Raised when database health checks fail"""
|
||||
pass
|
||||
381
shared/database/init_manager.py
Executable file
381
shared/database/init_manager.py
Executable file
@@ -0,0 +1,381 @@
|
||||
"""
|
||||
Database Initialization Manager
|
||||
|
||||
Handles Alembic-based migrations with autogenerate support:
|
||||
1. First-time deployment: Generate initial migration from models
|
||||
2. Subsequent deployments: Run pending migrations
|
||||
3. Development reset: Drop tables and regenerate migrations
|
||||
"""
|
||||
|
||||
import os
|
||||
import asyncio
|
||||
import structlog
|
||||
from typing import Optional, List, Dict, Any
|
||||
from pathlib import Path
|
||||
from sqlalchemy import text, inspect
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from alembic.config import Config
|
||||
from alembic import command
|
||||
from alembic.runtime.migration import MigrationContext
|
||||
from alembic.script import ScriptDirectory
|
||||
|
||||
from .base import DatabaseManager, Base
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class DatabaseInitManager:
|
||||
"""
|
||||
Manages database initialization using Alembic migrations exclusively.
|
||||
|
||||
Two modes:
|
||||
1. Migration mode (for migration jobs): Runs alembic upgrade head
|
||||
2. Verification mode (for services): Only verifies database is ready
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
database_manager: DatabaseManager,
|
||||
service_name: str,
|
||||
alembic_ini_path: Optional[str] = None,
|
||||
models_module: Optional[str] = None,
|
||||
verify_only: bool = True, # Default: services only verify
|
||||
force_recreate: bool = False
|
||||
):
|
||||
self.database_manager = database_manager
|
||||
self.service_name = service_name
|
||||
self.alembic_ini_path = alembic_ini_path
|
||||
self.models_module = models_module
|
||||
self.verify_only = verify_only
|
||||
self.force_recreate = force_recreate
|
||||
self.logger = logger.bind(service=service_name)
|
||||
|
||||
async def initialize_database(self) -> Dict[str, Any]:
|
||||
"""
|
||||
Main initialization method.
|
||||
|
||||
Two modes:
|
||||
1. verify_only=True (default, for services):
|
||||
- Verifies database is ready
|
||||
- Checks tables exist
|
||||
- Checks alembic_version exists
|
||||
- DOES NOT run migrations
|
||||
|
||||
2. verify_only=False (for migration jobs only):
|
||||
- Runs alembic upgrade head
|
||||
- Applies pending migrations
|
||||
- Can force recreate if needed
|
||||
"""
|
||||
if self.verify_only:
|
||||
self.logger.info("Database verification mode - checking database is ready")
|
||||
return await self._verify_database_ready()
|
||||
else:
|
||||
self.logger.info("Migration mode - running database migrations")
|
||||
return await self._run_migrations_mode()
|
||||
|
||||
async def _verify_database_ready(self) -> Dict[str, Any]:
|
||||
"""
|
||||
Verify database is ready for service startup.
|
||||
Services should NOT run migrations - only verify they've been applied.
|
||||
"""
|
||||
try:
|
||||
# Check alembic configuration exists
|
||||
if not self.alembic_ini_path or not os.path.exists(self.alembic_ini_path):
|
||||
raise Exception(f"Alembic configuration not found at {self.alembic_ini_path}")
|
||||
|
||||
# Check database state
|
||||
db_state = await self._check_database_state()
|
||||
self.logger.info("Database state checked", state=db_state)
|
||||
|
||||
# Verify migrations exist
|
||||
if not db_state["has_migrations"]:
|
||||
raise Exception(
|
||||
f"No migration files found for {self.service_name}. "
|
||||
f"Migrations must be generated and included in the Docker image."
|
||||
)
|
||||
|
||||
# Verify database is not empty
|
||||
if db_state["is_empty"]:
|
||||
raise Exception(
|
||||
f"Database is empty. Migration job must run before service startup. "
|
||||
f"Ensure migration job completes successfully before starting services."
|
||||
)
|
||||
|
||||
# Verify alembic_version table exists
|
||||
if not db_state["has_alembic_version"]:
|
||||
raise Exception(
|
||||
f"No alembic_version table found. Migration job must run before service startup."
|
||||
)
|
||||
|
||||
# Verify current revision exists
|
||||
if not db_state["current_revision"]:
|
||||
raise Exception(
|
||||
f"No current migration revision found. Database may not be properly initialized."
|
||||
)
|
||||
|
||||
self.logger.info(
|
||||
"Database verification successful",
|
||||
migration_count=db_state["migration_count"],
|
||||
current_revision=db_state["current_revision"],
|
||||
table_count=len(db_state["existing_tables"])
|
||||
)
|
||||
|
||||
return {
|
||||
"action": "verified",
|
||||
"message": "Database verified successfully - ready for service",
|
||||
"current_revision": db_state["current_revision"],
|
||||
"migration_count": db_state["migration_count"],
|
||||
"table_count": len(db_state["existing_tables"])
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error("Database verification failed", error=str(e))
|
||||
raise
|
||||
|
||||
async def _run_migrations_mode(self) -> Dict[str, Any]:
|
||||
"""
|
||||
Run migrations mode - for migration jobs only.
|
||||
"""
|
||||
try:
|
||||
if not self.alembic_ini_path or not os.path.exists(self.alembic_ini_path):
|
||||
raise Exception(f"Alembic configuration not found at {self.alembic_ini_path}")
|
||||
|
||||
# Check current database state
|
||||
db_state = await self._check_database_state()
|
||||
self.logger.info("Database state checked", state=db_state)
|
||||
|
||||
# Handle force recreate
|
||||
if self.force_recreate:
|
||||
return await self._handle_force_recreate()
|
||||
|
||||
# Check migrations exist
|
||||
if not db_state["has_migrations"]:
|
||||
raise Exception(
|
||||
f"No migration files found for {self.service_name}. "
|
||||
f"Generate migrations using regenerate_migrations_k8s.sh script."
|
||||
)
|
||||
|
||||
# Run migrations
|
||||
result = await self._handle_run_migrations()
|
||||
|
||||
self.logger.info("Migration mode completed", result=result)
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error("Migration mode failed", error=str(e))
|
||||
raise
|
||||
|
||||
async def _check_database_state(self) -> Dict[str, Any]:
|
||||
"""Check the current state of migrations"""
|
||||
state = {
|
||||
"has_migrations": False,
|
||||
"migration_count": 0,
|
||||
"is_empty": False,
|
||||
"existing_tables": [],
|
||||
"has_alembic_version": False,
|
||||
"current_revision": None
|
||||
}
|
||||
|
||||
try:
|
||||
# Check if migration files exist
|
||||
migrations_dir = self._get_migrations_versions_dir()
|
||||
if migrations_dir.exists():
|
||||
migration_files = list(migrations_dir.glob("*.py"))
|
||||
migration_files = [f for f in migration_files if f.name != "__pycache__" and not f.name.startswith("_")]
|
||||
state["migration_count"] = len(migration_files)
|
||||
state["has_migrations"] = len(migration_files) > 0
|
||||
self.logger.info("Found migration files", count=len(migration_files))
|
||||
|
||||
# Check database tables
|
||||
async with self.database_manager.get_session() as session:
|
||||
existing_tables = await self._get_existing_tables(session)
|
||||
state["existing_tables"] = existing_tables
|
||||
state["is_empty"] = len(existing_tables) == 0
|
||||
|
||||
# Check alembic_version table
|
||||
if "alembic_version" in existing_tables:
|
||||
state["has_alembic_version"] = True
|
||||
result = await session.execute(text("SELECT version_num FROM alembic_version"))
|
||||
version = result.scalar()
|
||||
state["current_revision"] = version
|
||||
|
||||
except Exception as e:
|
||||
self.logger.warning("Error checking database state", error=str(e))
|
||||
|
||||
return state
|
||||
|
||||
async def _handle_run_migrations(self) -> Dict[str, Any]:
|
||||
"""Handle normal migration scenario - run pending migrations"""
|
||||
self.logger.info("Running pending migrations")
|
||||
|
||||
try:
|
||||
await self._run_migrations()
|
||||
|
||||
return {
|
||||
"action": "migrations_applied",
|
||||
"message": "Pending migrations applied successfully"
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error("Failed to run migrations", error=str(e))
|
||||
raise
|
||||
|
||||
async def _handle_force_recreate(self) -> Dict[str, Any]:
|
||||
"""Handle development reset scenario - drop and recreate tables using existing migrations"""
|
||||
self.logger.info("Force recreate: dropping tables and rerunning migrations")
|
||||
|
||||
try:
|
||||
# Drop all tables
|
||||
await self._drop_all_tables()
|
||||
|
||||
# Apply migrations from scratch
|
||||
await self._run_migrations()
|
||||
|
||||
return {
|
||||
"action": "force_recreate",
|
||||
"tables_dropped": True,
|
||||
"migrations_applied": True,
|
||||
"message": "Database recreated from existing migrations"
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error("Failed to force recreate", error=str(e))
|
||||
raise
|
||||
|
||||
async def _run_migrations(self):
|
||||
"""Run pending Alembic migrations (upgrade head)"""
|
||||
try:
|
||||
def run_alembic_upgrade():
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
# Ensure we're in the correct working directory
|
||||
alembic_dir = Path(self.alembic_ini_path).parent
|
||||
original_cwd = os.getcwd()
|
||||
|
||||
try:
|
||||
os.chdir(alembic_dir)
|
||||
|
||||
alembic_cfg = Config(self.alembic_ini_path)
|
||||
|
||||
# Set the SQLAlchemy URL from the database manager
|
||||
alembic_cfg.set_main_option("sqlalchemy.url", str(self.database_manager.database_url))
|
||||
|
||||
# Run upgrade
|
||||
command.upgrade(alembic_cfg, "head")
|
||||
|
||||
finally:
|
||||
os.chdir(original_cwd)
|
||||
|
||||
# Run in executor to avoid blocking
|
||||
await asyncio.get_event_loop().run_in_executor(None, run_alembic_upgrade)
|
||||
self.logger.info("Migrations applied successfully")
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error("Failed to run migrations", error=str(e))
|
||||
raise
|
||||
|
||||
|
||||
async def _drop_all_tables(self):
|
||||
"""Drop all tables (for development reset)"""
|
||||
try:
|
||||
async with self.database_manager.async_engine.begin() as conn:
|
||||
await conn.run_sync(Base.metadata.drop_all)
|
||||
self.logger.info("All tables dropped")
|
||||
except Exception as e:
|
||||
self.logger.error("Failed to drop tables", error=str(e))
|
||||
raise
|
||||
|
||||
|
||||
def _get_migrations_versions_dir(self) -> Path:
|
||||
"""Get the migrations/versions directory path"""
|
||||
alembic_path = Path(self.alembic_ini_path).parent
|
||||
return alembic_path / "migrations" / "versions"
|
||||
|
||||
async def _get_existing_tables(self, session: AsyncSession) -> List[str]:
|
||||
"""Get list of existing tables in the database"""
|
||||
def get_tables_sync(connection):
|
||||
insp = inspect(connection)
|
||||
return insp.get_table_names()
|
||||
|
||||
connection = await session.connection()
|
||||
return await connection.run_sync(get_tables_sync)
|
||||
|
||||
|
||||
def create_init_manager(
|
||||
database_manager: DatabaseManager,
|
||||
service_name: str,
|
||||
service_path: Optional[str] = None,
|
||||
verify_only: bool = True,
|
||||
force_recreate: bool = False
|
||||
) -> DatabaseInitManager:
|
||||
"""
|
||||
Factory function to create a DatabaseInitManager with auto-detected paths
|
||||
|
||||
Args:
|
||||
database_manager: DatabaseManager instance
|
||||
service_name: Name of the service
|
||||
service_path: Path to service directory (auto-detected if None)
|
||||
verify_only: True = verify DB ready (services), False = run migrations (jobs only)
|
||||
force_recreate: Force recreate tables (requires verify_only=False)
|
||||
"""
|
||||
# Auto-detect paths if not provided
|
||||
if service_path is None:
|
||||
# Try Docker container path first (service files at root level)
|
||||
if os.path.exists("alembic.ini"):
|
||||
service_path = "."
|
||||
else:
|
||||
# Fallback to development path
|
||||
service_path = f"services/{service_name}"
|
||||
|
||||
# Set up paths based on environment
|
||||
if service_path == ".":
|
||||
# Docker container environment
|
||||
alembic_ini_path = "alembic.ini"
|
||||
models_module = "app.models"
|
||||
else:
|
||||
# Development environment
|
||||
alembic_ini_path = f"{service_path}/alembic.ini"
|
||||
models_module = f"services.{service_name}.app.models"
|
||||
|
||||
# Check if paths exist
|
||||
if not os.path.exists(alembic_ini_path):
|
||||
logger.warning("Alembic config not found", path=alembic_ini_path)
|
||||
alembic_ini_path = None
|
||||
|
||||
return DatabaseInitManager(
|
||||
database_manager=database_manager,
|
||||
service_name=service_name,
|
||||
alembic_ini_path=alembic_ini_path,
|
||||
models_module=models_module,
|
||||
verify_only=verify_only,
|
||||
force_recreate=force_recreate
|
||||
)
|
||||
|
||||
|
||||
async def initialize_service_database(
|
||||
database_manager: DatabaseManager,
|
||||
service_name: str,
|
||||
verify_only: bool = True,
|
||||
force_recreate: bool = False
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Convenience function for database initialization
|
||||
|
||||
Args:
|
||||
database_manager: DatabaseManager instance
|
||||
service_name: Name of the service
|
||||
verify_only: True = verify DB ready (default, services), False = run migrations (jobs only)
|
||||
force_recreate: Force recreate tables (requires verify_only=False)
|
||||
|
||||
Returns:
|
||||
Dict with initialization results
|
||||
"""
|
||||
init_manager = create_init_manager(
|
||||
database_manager=database_manager,
|
||||
service_name=service_name,
|
||||
verify_only=verify_only,
|
||||
force_recreate=force_recreate
|
||||
)
|
||||
|
||||
return await init_manager.initialize_database()
|
||||
428
shared/database/repository.py
Executable file
428
shared/database/repository.py
Executable file
@@ -0,0 +1,428 @@
|
||||
"""
|
||||
Base Repository Pattern for Database Operations
|
||||
Provides generic CRUD operations, query building, and caching
|
||||
"""
|
||||
|
||||
from typing import Optional, List, Dict, Any, TypeVar, Generic, Type, Union
|
||||
from abc import ABC, abstractmethod
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.orm import declarative_base
|
||||
from sqlalchemy import select, update, delete, and_, or_, desc, asc, func, text
|
||||
from sqlalchemy.exc import IntegrityError, SQLAlchemyError
|
||||
from contextlib import asynccontextmanager
|
||||
import structlog
|
||||
|
||||
from .exceptions import (
|
||||
DatabaseError,
|
||||
RecordNotFoundError,
|
||||
DuplicateRecordError,
|
||||
ConstraintViolationError
|
||||
)
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
# Type variables for generic repository
|
||||
Model = TypeVar('Model', bound=declarative_base())
|
||||
CreateSchema = TypeVar('CreateSchema')
|
||||
UpdateSchema = TypeVar('UpdateSchema')
|
||||
|
||||
|
||||
class BaseRepository(Generic[Model, CreateSchema, UpdateSchema], ABC):
|
||||
"""
|
||||
Base repository providing generic CRUD operations
|
||||
|
||||
Args:
|
||||
model: SQLAlchemy model class
|
||||
session: Database session
|
||||
cache_ttl: Cache time-to-live in seconds (optional)
|
||||
"""
|
||||
|
||||
def __init__(self, model: Type[Model], session: AsyncSession, cache_ttl: Optional[int] = None):
|
||||
self.model = model
|
||||
self.session = session
|
||||
self.cache_ttl = cache_ttl
|
||||
self._cache = {} if cache_ttl else None
|
||||
|
||||
# ===== CORE CRUD OPERATIONS =====
|
||||
|
||||
async def create(self, obj_in: CreateSchema, **kwargs) -> Model:
|
||||
"""Create a new record"""
|
||||
try:
|
||||
# Convert schema to dict if needed
|
||||
if hasattr(obj_in, 'model_dump'):
|
||||
obj_data = obj_in.model_dump()
|
||||
elif hasattr(obj_in, 'dict'):
|
||||
obj_data = obj_in.dict()
|
||||
else:
|
||||
obj_data = obj_in
|
||||
|
||||
# Merge with additional kwargs
|
||||
obj_data.update(kwargs)
|
||||
|
||||
db_obj = self.model(**obj_data)
|
||||
self.session.add(db_obj)
|
||||
await self.session.flush() # Get ID without committing
|
||||
await self.session.refresh(db_obj)
|
||||
|
||||
logger.debug(f"Created {self.model.__name__}", record_id=getattr(db_obj, 'id', None))
|
||||
return db_obj
|
||||
|
||||
except IntegrityError as e:
|
||||
await self.session.rollback()
|
||||
logger.error(f"Integrity error creating {self.model.__name__}", error=str(e))
|
||||
raise DuplicateRecordError(f"Record with provided data already exists")
|
||||
except SQLAlchemyError as e:
|
||||
await self.session.rollback()
|
||||
logger.error(f"Database error creating {self.model.__name__}", error=str(e))
|
||||
raise DatabaseError(f"Failed to create record: {str(e)}")
|
||||
|
||||
async def get_by_id(self, record_id: Any) -> Optional[Model]:
|
||||
"""Get record by ID with optional caching"""
|
||||
cache_key = f"{self.model.__name__}:{record_id}"
|
||||
|
||||
# Check cache first
|
||||
if self._cache and cache_key in self._cache:
|
||||
logger.debug(f"Cache hit for {cache_key}")
|
||||
return self._cache[cache_key]
|
||||
|
||||
try:
|
||||
result = await self.session.execute(
|
||||
select(self.model).where(self.model.id == record_id)
|
||||
)
|
||||
record = result.scalar_one_or_none()
|
||||
|
||||
# Cache the result
|
||||
if self._cache and record:
|
||||
self._cache[cache_key] = record
|
||||
|
||||
return record
|
||||
|
||||
except SQLAlchemyError as e:
|
||||
logger.error(f"Database error getting {self.model.__name__} by ID",
|
||||
record_id=record_id, error=str(e))
|
||||
raise DatabaseError(f"Failed to get record: {str(e)}")
|
||||
|
||||
async def get_by_field(self, field_name: str, value: Any) -> Optional[Model]:
|
||||
"""Get record by specific field"""
|
||||
try:
|
||||
result = await self.session.execute(
|
||||
select(self.model).where(getattr(self.model, field_name) == value)
|
||||
)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
except AttributeError:
|
||||
raise ValueError(f"Field '{field_name}' not found in {self.model.__name__}")
|
||||
except SQLAlchemyError as e:
|
||||
logger.error(f"Database error getting {self.model.__name__} by {field_name}",
|
||||
value=value, error=str(e))
|
||||
raise DatabaseError(f"Failed to get record: {str(e)}")
|
||||
|
||||
async def get_multi(
|
||||
self,
|
||||
skip: int = 0,
|
||||
limit: int = 100,
|
||||
order_by: Optional[str] = None,
|
||||
order_desc: bool = False,
|
||||
filters: Optional[Dict[str, Any]] = None
|
||||
) -> List[Model]:
|
||||
"""Get multiple records with pagination, sorting, and filtering"""
|
||||
try:
|
||||
query = select(self.model)
|
||||
|
||||
# Apply filters
|
||||
if filters:
|
||||
conditions = []
|
||||
for field, value in filters.items():
|
||||
if hasattr(self.model, field):
|
||||
if isinstance(value, list):
|
||||
conditions.append(getattr(self.model, field).in_(value))
|
||||
else:
|
||||
conditions.append(getattr(self.model, field) == value)
|
||||
|
||||
if conditions:
|
||||
query = query.where(and_(*conditions))
|
||||
|
||||
# Apply ordering
|
||||
if order_by and hasattr(self.model, order_by):
|
||||
order_field = getattr(self.model, order_by)
|
||||
if order_desc:
|
||||
query = query.order_by(desc(order_field))
|
||||
else:
|
||||
query = query.order_by(asc(order_field))
|
||||
|
||||
# Apply pagination
|
||||
query = query.offset(skip).limit(limit)
|
||||
|
||||
result = await self.session.execute(query)
|
||||
return result.scalars().all()
|
||||
|
||||
except SQLAlchemyError as e:
|
||||
logger.error(f"Database error getting multiple {self.model.__name__} records",
|
||||
error=str(e))
|
||||
raise DatabaseError(f"Failed to get records: {str(e)}")
|
||||
|
||||
async def update(self, record_id: Any, obj_in: UpdateSchema, **kwargs) -> Optional[Model]:
|
||||
"""Update record by ID"""
|
||||
try:
|
||||
# Convert schema to dict if needed
|
||||
if hasattr(obj_in, 'model_dump'):
|
||||
update_data = obj_in.model_dump(exclude_unset=True)
|
||||
elif hasattr(obj_in, 'dict'):
|
||||
update_data = obj_in.dict(exclude_unset=True)
|
||||
else:
|
||||
update_data = obj_in
|
||||
|
||||
# Merge with additional kwargs
|
||||
update_data.update(kwargs)
|
||||
|
||||
# Remove None values
|
||||
update_data = {k: v for k, v in update_data.items() if v is not None}
|
||||
|
||||
if not update_data:
|
||||
logger.warning(f"No data to update for {self.model.__name__}", record_id=record_id)
|
||||
return await self.get_by_id(record_id)
|
||||
|
||||
# Perform update
|
||||
result = await self.session.execute(
|
||||
update(self.model)
|
||||
.where(self.model.id == record_id)
|
||||
.values(**update_data)
|
||||
.returning(self.model)
|
||||
)
|
||||
|
||||
updated_record = result.scalar_one_or_none()
|
||||
|
||||
if not updated_record:
|
||||
raise RecordNotFoundError(f"{self.model.__name__} with id {record_id} not found")
|
||||
|
||||
# Clear cache
|
||||
if self._cache:
|
||||
cache_key = f"{self.model.__name__}:{record_id}"
|
||||
self._cache.pop(cache_key, None)
|
||||
|
||||
logger.debug(f"Updated {self.model.__name__}", record_id=record_id)
|
||||
return updated_record
|
||||
|
||||
except IntegrityError as e:
|
||||
await self.session.rollback()
|
||||
logger.error(f"Integrity error updating {self.model.__name__}",
|
||||
record_id=record_id, error=str(e))
|
||||
raise ConstraintViolationError(f"Update violates database constraints")
|
||||
except SQLAlchemyError as e:
|
||||
await self.session.rollback()
|
||||
logger.error(f"Database error updating {self.model.__name__}",
|
||||
record_id=record_id, error=str(e))
|
||||
raise DatabaseError(f"Failed to update record: {str(e)}")
|
||||
|
||||
async def delete(self, record_id: Any) -> bool:
|
||||
"""Delete record by ID"""
|
||||
try:
|
||||
result = await self.session.execute(
|
||||
delete(self.model).where(self.model.id == record_id)
|
||||
)
|
||||
|
||||
deleted_count = result.rowcount
|
||||
|
||||
if deleted_count == 0:
|
||||
raise RecordNotFoundError(f"{self.model.__name__} with id {record_id} not found")
|
||||
|
||||
# Clear cache
|
||||
if self._cache:
|
||||
cache_key = f"{self.model.__name__}:{record_id}"
|
||||
self._cache.pop(cache_key, None)
|
||||
|
||||
logger.debug(f"Deleted {self.model.__name__}", record_id=record_id)
|
||||
return True
|
||||
|
||||
except SQLAlchemyError as e:
|
||||
await self.session.rollback()
|
||||
logger.error(f"Database error deleting {self.model.__name__}",
|
||||
record_id=record_id, error=str(e))
|
||||
raise DatabaseError(f"Failed to delete record: {str(e)}")
|
||||
|
||||
# ===== ADVANCED QUERY OPERATIONS =====
|
||||
|
||||
async def count(self, filters: Optional[Dict[str, Any]] = None) -> int:
|
||||
"""Count records with optional filters"""
|
||||
try:
|
||||
query = select(func.count(self.model.id))
|
||||
|
||||
if filters:
|
||||
conditions = []
|
||||
for field, value in filters.items():
|
||||
if hasattr(self.model, field):
|
||||
if isinstance(value, list):
|
||||
conditions.append(getattr(self.model, field).in_(value))
|
||||
else:
|
||||
conditions.append(getattr(self.model, field) == value)
|
||||
|
||||
if conditions:
|
||||
query = query.where(and_(*conditions))
|
||||
|
||||
result = await self.session.execute(query)
|
||||
return result.scalar() or 0
|
||||
|
||||
except SQLAlchemyError as e:
|
||||
logger.error(f"Database error counting {self.model.__name__} records", error=str(e))
|
||||
raise DatabaseError(f"Failed to count records: {str(e)}")
|
||||
|
||||
async def exists(self, record_id: Any) -> bool:
|
||||
"""Check if record exists by ID"""
|
||||
try:
|
||||
result = await self.session.execute(
|
||||
select(func.count(self.model.id)).where(self.model.id == record_id)
|
||||
)
|
||||
count = result.scalar() or 0
|
||||
return count > 0
|
||||
|
||||
except SQLAlchemyError as e:
|
||||
logger.error(f"Database error checking existence of {self.model.__name__}",
|
||||
record_id=record_id, error=str(e))
|
||||
raise DatabaseError(f"Failed to check record existence: {str(e)}")
|
||||
|
||||
async def bulk_create(self, objects: List[CreateSchema]) -> List[Model]:
|
||||
"""Create multiple records in bulk"""
|
||||
try:
|
||||
if not objects:
|
||||
return []
|
||||
|
||||
db_objects = []
|
||||
for obj_in in objects:
|
||||
if hasattr(obj_in, 'model_dump'):
|
||||
obj_data = obj_in.model_dump()
|
||||
elif hasattr(obj_in, 'dict'):
|
||||
obj_data = obj_in.dict()
|
||||
else:
|
||||
obj_data = obj_in
|
||||
|
||||
db_objects.append(self.model(**obj_data))
|
||||
|
||||
self.session.add_all(db_objects)
|
||||
await self.session.flush()
|
||||
|
||||
# Skip expensive individual refresh operations for large datasets
|
||||
# Only refresh if we have a small number of objects
|
||||
if len(db_objects) <= 100:
|
||||
for db_obj in db_objects:
|
||||
await self.session.refresh(db_obj)
|
||||
else:
|
||||
# For large datasets, just log without refresh to prevent memory issues
|
||||
logger.debug(f"Skipped individual refresh for large bulk operation ({len(db_objects)} records)")
|
||||
|
||||
logger.debug(f"Bulk created {len(db_objects)} {self.model.__name__} records")
|
||||
return db_objects
|
||||
|
||||
except IntegrityError as e:
|
||||
await self.session.rollback()
|
||||
logger.error(f"Integrity error bulk creating {self.model.__name__}", error=str(e))
|
||||
raise DuplicateRecordError(f"One or more records already exist")
|
||||
except SQLAlchemyError as e:
|
||||
await self.session.rollback()
|
||||
logger.error(f"Database error bulk creating {self.model.__name__}", error=str(e))
|
||||
raise DatabaseError(f"Failed to create records: {str(e)}")
|
||||
|
||||
async def bulk_update(self, updates: List[Dict[str, Any]]) -> int:
|
||||
"""Update multiple records in bulk"""
|
||||
try:
|
||||
if not updates:
|
||||
return 0
|
||||
|
||||
# Group updates by fields being updated for efficiency
|
||||
for update_data in updates:
|
||||
if 'id' not in update_data:
|
||||
raise ValueError("Each update must include 'id' field")
|
||||
|
||||
record_id = update_data.pop('id')
|
||||
await self.session.execute(
|
||||
update(self.model)
|
||||
.where(self.model.id == record_id)
|
||||
.values(**update_data)
|
||||
)
|
||||
|
||||
# Clear relevant cache entries
|
||||
if self._cache:
|
||||
for update_data in updates:
|
||||
record_id = update_data.get('id')
|
||||
if record_id:
|
||||
cache_key = f"{self.model.__name__}:{record_id}"
|
||||
self._cache.pop(cache_key, None)
|
||||
|
||||
logger.debug(f"Bulk updated {len(updates)} {self.model.__name__} records")
|
||||
return len(updates)
|
||||
|
||||
except SQLAlchemyError as e:
|
||||
await self.session.rollback()
|
||||
logger.error(f"Database error bulk updating {self.model.__name__}", error=str(e))
|
||||
raise DatabaseError(f"Failed to update records: {str(e)}")
|
||||
|
||||
# ===== SEARCH AND QUERY BUILDING =====
|
||||
|
||||
async def search(
|
||||
self,
|
||||
search_term: str,
|
||||
search_fields: List[str],
|
||||
skip: int = 0,
|
||||
limit: int = 100
|
||||
) -> List[Model]:
|
||||
"""Search records across multiple fields"""
|
||||
try:
|
||||
conditions = []
|
||||
for field in search_fields:
|
||||
if hasattr(self.model, field):
|
||||
field_obj = getattr(self.model, field)
|
||||
# Case-insensitive partial match
|
||||
conditions.append(field_obj.ilike(f"%{search_term}%"))
|
||||
|
||||
if not conditions:
|
||||
logger.warning(f"No valid search fields provided for {self.model.__name__}")
|
||||
return []
|
||||
|
||||
query = select(self.model).where(or_(*conditions)).offset(skip).limit(limit)
|
||||
result = await self.session.execute(query)
|
||||
return result.scalars().all()
|
||||
|
||||
except SQLAlchemyError as e:
|
||||
logger.error(f"Database error searching {self.model.__name__}",
|
||||
search_term=search_term, error=str(e))
|
||||
raise DatabaseError(f"Failed to search records: {str(e)}")
|
||||
|
||||
async def execute_raw_query(self, query: str, params: Optional[Dict[str, Any]] = None) -> Any:
|
||||
"""Execute raw SQL query (use with caution)"""
|
||||
try:
|
||||
result = await self.session.execute(text(query), params or {})
|
||||
return result
|
||||
|
||||
except SQLAlchemyError as e:
|
||||
logger.error(f"Database error executing raw query", query=query, error=str(e))
|
||||
raise DatabaseError(f"Failed to execute query: {str(e)}")
|
||||
|
||||
# ===== CACHE MANAGEMENT =====
|
||||
|
||||
def clear_cache(self, record_id: Optional[Any] = None):
|
||||
"""Clear cache for specific record or all records"""
|
||||
if not self._cache:
|
||||
return
|
||||
|
||||
if record_id:
|
||||
cache_key = f"{self.model.__name__}:{record_id}"
|
||||
self._cache.pop(cache_key, None)
|
||||
else:
|
||||
# Clear all cache entries for this model
|
||||
keys_to_remove = [k for k in self._cache.keys() if k.startswith(f"{self.model.__name__}:")]
|
||||
for key in keys_to_remove:
|
||||
self._cache.pop(key, None)
|
||||
|
||||
logger.debug(f"Cleared cache for {self.model.__name__}", record_id=record_id)
|
||||
|
||||
# ===== CONTEXT MANAGERS =====
|
||||
|
||||
@asynccontextmanager
|
||||
async def transaction(self):
|
||||
"""Context manager for explicit transaction handling"""
|
||||
try:
|
||||
yield self.session
|
||||
await self.session.commit()
|
||||
except Exception as e:
|
||||
await self.session.rollback()
|
||||
logger.error(f"Transaction failed for {self.model.__name__}", error=str(e))
|
||||
raise
|
||||
306
shared/database/transactions.py
Executable file
306
shared/database/transactions.py
Executable file
@@ -0,0 +1,306 @@
|
||||
"""
|
||||
Transaction Decorators and Context Managers
|
||||
Provides convenient transaction handling for service methods
|
||||
"""
|
||||
|
||||
from functools import wraps
|
||||
from typing import Callable, Any, Optional
|
||||
from contextlib import asynccontextmanager
|
||||
import structlog
|
||||
|
||||
from .base import DatabaseManager
|
||||
from .unit_of_work import UnitOfWork
|
||||
from .exceptions import TransactionError
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
def transactional(database_manager: DatabaseManager, auto_commit: bool = True):
|
||||
"""
|
||||
Decorator that wraps a method in a database transaction
|
||||
|
||||
Args:
|
||||
database_manager: DatabaseManager instance
|
||||
auto_commit: Whether to auto-commit on success
|
||||
|
||||
Usage:
|
||||
@transactional(database_manager)
|
||||
async def create_user_with_profile(self, user_data, profile_data):
|
||||
# Your business logic here
|
||||
# Transaction is automatically managed
|
||||
pass
|
||||
"""
|
||||
def decorator(func: Callable) -> Callable:
|
||||
@wraps(func)
|
||||
async def wrapper(*args, **kwargs):
|
||||
async with database_manager.get_background_session() as session:
|
||||
try:
|
||||
# Inject session into kwargs if not present
|
||||
if 'session' not in kwargs:
|
||||
kwargs['session'] = session
|
||||
|
||||
result = await func(*args, **kwargs)
|
||||
|
||||
# Session is auto-committed by get_background_session
|
||||
logger.debug(f"Transaction completed successfully for {func.__name__}")
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
# Session is auto-rolled back by get_background_session
|
||||
logger.error(f"Transaction failed for {func.__name__}", error=str(e))
|
||||
raise TransactionError(f"Transaction failed: {str(e)}")
|
||||
|
||||
return wrapper
|
||||
return decorator
|
||||
|
||||
|
||||
def unit_of_work_transactional(database_manager: DatabaseManager):
|
||||
"""
|
||||
Decorator that provides Unit of Work pattern for complex operations
|
||||
|
||||
Usage:
|
||||
@unit_of_work_transactional(database_manager)
|
||||
async def complex_business_operation(self, data, uow: UnitOfWork):
|
||||
user_repo = uow.register_repository("users", UserRepository, User)
|
||||
sales_repo = uow.register_repository("sales", SalesRepository, SalesData)
|
||||
|
||||
user = await user_repo.create(data.user)
|
||||
sale = await sales_repo.create(data.sale)
|
||||
|
||||
# UnitOfWork automatically commits
|
||||
return {"user": user, "sale": sale}
|
||||
"""
|
||||
def decorator(func: Callable) -> Callable:
|
||||
@wraps(func)
|
||||
async def wrapper(*args, **kwargs):
|
||||
async with database_manager.get_background_session() as session:
|
||||
async with UnitOfWork(session, auto_commit=True) as uow:
|
||||
try:
|
||||
# Inject UnitOfWork into kwargs
|
||||
kwargs['uow'] = uow
|
||||
|
||||
result = await func(*args, **kwargs)
|
||||
|
||||
logger.debug(f"Unit of Work transaction completed for {func.__name__}")
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Unit of Work transaction failed for {func.__name__}",
|
||||
error=str(e))
|
||||
raise TransactionError(f"Transaction failed: {str(e)}")
|
||||
|
||||
return wrapper
|
||||
return decorator
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def managed_transaction(database_manager: DatabaseManager):
|
||||
"""
|
||||
Context manager for explicit transaction control
|
||||
|
||||
Usage:
|
||||
async with managed_transaction(database_manager) as session:
|
||||
# Your database operations here
|
||||
user = User(name="John")
|
||||
session.add(user)
|
||||
# Auto-commits on exit, rolls back on exception
|
||||
"""
|
||||
async with database_manager.get_background_session() as session:
|
||||
try:
|
||||
logger.debug("Starting managed transaction")
|
||||
yield session
|
||||
logger.debug("Managed transaction completed successfully")
|
||||
except Exception as e:
|
||||
logger.error("Managed transaction failed", error=str(e))
|
||||
raise
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def managed_unit_of_work(database_manager: DatabaseManager, event_publisher=None):
|
||||
"""
|
||||
Context manager for explicit Unit of Work control
|
||||
|
||||
Usage:
|
||||
async with managed_unit_of_work(database_manager) as uow:
|
||||
user_repo = uow.register_repository("users", UserRepository, User)
|
||||
user = await user_repo.create(user_data)
|
||||
await uow.commit()
|
||||
"""
|
||||
async with database_manager.get_background_session() as session:
|
||||
uow = UnitOfWork(session)
|
||||
try:
|
||||
logger.debug("Starting managed Unit of Work")
|
||||
yield uow
|
||||
|
||||
if not uow._committed:
|
||||
await uow.commit()
|
||||
|
||||
logger.debug("Managed Unit of Work completed successfully")
|
||||
|
||||
except Exception as e:
|
||||
if not uow._rolled_back:
|
||||
await uow.rollback()
|
||||
logger.error("Managed Unit of Work failed", error=str(e))
|
||||
raise
|
||||
|
||||
|
||||
class TransactionManager:
|
||||
"""
|
||||
Advanced transaction manager for complex scenarios
|
||||
|
||||
Usage:
|
||||
tx_manager = TransactionManager(database_manager)
|
||||
|
||||
async with tx_manager.create_transaction() as tx:
|
||||
await tx.execute_in_transaction(my_business_logic, data)
|
||||
"""
|
||||
|
||||
def __init__(self, database_manager: DatabaseManager):
|
||||
self.database_manager = database_manager
|
||||
|
||||
@asynccontextmanager
|
||||
async def create_transaction(self, isolation_level: Optional[str] = None):
|
||||
"""Create a transaction with optional isolation level"""
|
||||
async with self.database_manager.get_background_session() as session:
|
||||
transaction_context = TransactionContext(session, isolation_level)
|
||||
try:
|
||||
yield transaction_context
|
||||
except Exception as e:
|
||||
logger.error("Transaction manager failed", error=str(e))
|
||||
raise
|
||||
|
||||
async def execute_with_retry(
|
||||
self,
|
||||
func: Callable,
|
||||
max_retries: int = 3,
|
||||
*args,
|
||||
**kwargs
|
||||
):
|
||||
"""Execute function with transaction retry on failure"""
|
||||
last_error = None
|
||||
|
||||
for attempt in range(max_retries):
|
||||
try:
|
||||
async with managed_transaction(self.database_manager) as session:
|
||||
kwargs['session'] = session
|
||||
result = await func(*args, **kwargs)
|
||||
logger.debug(f"Transaction succeeded on attempt {attempt + 1}")
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
last_error = e
|
||||
logger.warning(f"Transaction attempt {attempt + 1} failed",
|
||||
error=str(e), remaining_attempts=max_retries - attempt - 1)
|
||||
|
||||
if attempt == max_retries - 1:
|
||||
break
|
||||
|
||||
logger.error(f"All transaction attempts failed after {max_retries} tries")
|
||||
raise TransactionError(f"Transaction failed after {max_retries} retries: {str(last_error)}")
|
||||
|
||||
|
||||
class TransactionContext:
|
||||
"""Context for managing individual transactions"""
|
||||
|
||||
def __init__(self, session, isolation_level: Optional[str] = None):
|
||||
self.session = session
|
||||
self.isolation_level = isolation_level
|
||||
|
||||
async def execute_in_transaction(self, func: Callable, *args, **kwargs):
|
||||
"""Execute function within the transaction context"""
|
||||
try:
|
||||
kwargs['session'] = self.session
|
||||
result = await func(*args, **kwargs)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Function execution failed in transaction context", error=str(e))
|
||||
raise
|
||||
|
||||
|
||||
# ===== UTILITY FUNCTIONS =====
|
||||
|
||||
async def run_in_transaction(database_manager: DatabaseManager, func: Callable, *args, **kwargs):
|
||||
"""
|
||||
Utility function to run any async function in a transaction
|
||||
|
||||
Usage:
|
||||
result = await run_in_transaction(
|
||||
database_manager,
|
||||
my_async_function,
|
||||
arg1, arg2,
|
||||
kwarg1="value"
|
||||
)
|
||||
"""
|
||||
async with managed_transaction(database_manager) as session:
|
||||
kwargs['session'] = session
|
||||
return await func(*args, **kwargs)
|
||||
|
||||
|
||||
async def run_with_unit_of_work(
|
||||
database_manager: DatabaseManager,
|
||||
func: Callable,
|
||||
*args,
|
||||
**kwargs
|
||||
):
|
||||
"""
|
||||
Utility function to run any async function with Unit of Work
|
||||
|
||||
Usage:
|
||||
result = await run_with_unit_of_work(
|
||||
database_manager,
|
||||
my_complex_function,
|
||||
arg1, arg2
|
||||
)
|
||||
"""
|
||||
async with managed_unit_of_work(database_manager) as uow:
|
||||
kwargs['uow'] = uow
|
||||
return await func(*args, **kwargs)
|
||||
|
||||
|
||||
# ===== BATCH OPERATIONS =====
|
||||
|
||||
@asynccontextmanager
|
||||
async def batch_operation(database_manager: DatabaseManager, batch_size: int = 1000):
|
||||
"""
|
||||
Context manager for batch operations with automatic commit batching
|
||||
|
||||
Usage:
|
||||
async with batch_operation(database_manager, batch_size=500) as batch:
|
||||
for item in large_dataset:
|
||||
await batch.add_operation(create_record, item)
|
||||
"""
|
||||
async with database_manager.get_background_session() as session:
|
||||
batch_context = BatchOperationContext(session, batch_size)
|
||||
try:
|
||||
yield batch_context
|
||||
await batch_context.flush_remaining()
|
||||
except Exception as e:
|
||||
logger.error("Batch operation failed", error=str(e))
|
||||
raise
|
||||
|
||||
|
||||
class BatchOperationContext:
|
||||
"""Context for managing batch database operations"""
|
||||
|
||||
def __init__(self, session, batch_size: int):
|
||||
self.session = session
|
||||
self.batch_size = batch_size
|
||||
self.operation_count = 0
|
||||
|
||||
async def add_operation(self, func: Callable, *args, **kwargs):
|
||||
"""Add operation to batch"""
|
||||
kwargs['session'] = self.session
|
||||
await func(*args, **kwargs)
|
||||
|
||||
self.operation_count += 1
|
||||
|
||||
if self.operation_count >= self.batch_size:
|
||||
await self.session.commit()
|
||||
self.operation_count = 0
|
||||
logger.debug(f"Batch committed at {self.batch_size} operations")
|
||||
|
||||
async def flush_remaining(self):
|
||||
"""Commit any remaining operations"""
|
||||
if self.operation_count > 0:
|
||||
await self.session.commit()
|
||||
logger.debug(f"Final batch committed with {self.operation_count} operations")
|
||||
304
shared/database/unit_of_work.py
Executable file
304
shared/database/unit_of_work.py
Executable file
@@ -0,0 +1,304 @@
|
||||
"""
|
||||
Unit of Work Pattern Implementation
|
||||
Manages transactions across multiple repositories with event publishing
|
||||
"""
|
||||
|
||||
from typing import Dict, Any, List, Optional, Type, TypeVar, Generic
|
||||
from contextlib import asynccontextmanager
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
from abc import ABC, abstractmethod
|
||||
import structlog
|
||||
|
||||
from .repository import BaseRepository
|
||||
from .exceptions import TransactionError
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
Model = TypeVar('Model')
|
||||
Repository = TypeVar('Repository', bound=BaseRepository)
|
||||
|
||||
|
||||
class BaseEvent(ABC):
|
||||
"""Base class for domain events"""
|
||||
|
||||
def __init__(self, event_type: str, data: Dict[str, Any]):
|
||||
self.event_type = event_type
|
||||
self.data = data
|
||||
|
||||
@abstractmethod
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Convert event to dictionary for publishing"""
|
||||
pass
|
||||
|
||||
|
||||
class DomainEvent(BaseEvent):
|
||||
"""Standard domain event implementation"""
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"event_type": self.event_type,
|
||||
"data": self.data
|
||||
}
|
||||
|
||||
|
||||
class UnitOfWork:
|
||||
"""
|
||||
Unit of Work pattern for managing transactions and coordinating repositories
|
||||
|
||||
Usage:
|
||||
async with UnitOfWork(session) as uow:
|
||||
user_repo = uow.register_repository("users", UserRepository, User)
|
||||
sales_repo = uow.register_repository("sales", SalesRepository, SalesData)
|
||||
|
||||
user = await user_repo.create(user_data)
|
||||
sale = await sales_repo.create(sales_data)
|
||||
|
||||
await uow.commit()
|
||||
"""
|
||||
|
||||
def __init__(self, session: AsyncSession, auto_commit: bool = False):
|
||||
self.session = session
|
||||
self.auto_commit = auto_commit
|
||||
self._repositories: Dict[str, BaseRepository] = {}
|
||||
self._events: List[BaseEvent] = []
|
||||
self._committed = False
|
||||
self._rolled_back = False
|
||||
|
||||
def register_repository(
|
||||
self,
|
||||
name: str,
|
||||
repository_class: Type[Repository],
|
||||
model_class: Type[Model],
|
||||
**kwargs
|
||||
) -> Repository:
|
||||
"""
|
||||
Register a repository with the unit of work
|
||||
|
||||
Args:
|
||||
name: Unique name for the repository
|
||||
repository_class: Repository class to instantiate
|
||||
model_class: SQLAlchemy model class
|
||||
**kwargs: Additional arguments for repository
|
||||
|
||||
Returns:
|
||||
Instantiated repository
|
||||
"""
|
||||
if name in self._repositories:
|
||||
logger.warning(f"Repository '{name}' already registered, returning existing instance")
|
||||
return self._repositories[name]
|
||||
|
||||
repository = repository_class(model_class, self.session, **kwargs)
|
||||
self._repositories[name] = repository
|
||||
|
||||
logger.debug(f"Registered repository", name=name, model=model_class.__name__)
|
||||
return repository
|
||||
|
||||
def get_repository(self, name: str) -> Optional[Repository]:
|
||||
"""Get registered repository by name"""
|
||||
return self._repositories.get(name)
|
||||
|
||||
def add_event(self, event: BaseEvent):
|
||||
"""Add domain event to be published after commit"""
|
||||
self._events.append(event)
|
||||
logger.debug(f"Added event", event_type=event.event_type)
|
||||
|
||||
async def commit(self):
|
||||
"""Commit the transaction and publish events"""
|
||||
if self._committed:
|
||||
logger.warning("Unit of Work already committed")
|
||||
return
|
||||
|
||||
if self._rolled_back:
|
||||
raise TransactionError("Cannot commit after rollback")
|
||||
|
||||
try:
|
||||
await self.session.commit()
|
||||
self._committed = True
|
||||
|
||||
# Publish events after successful commit
|
||||
await self._publish_events()
|
||||
|
||||
logger.debug(f"Unit of Work committed successfully",
|
||||
repositories=list(self._repositories.keys()),
|
||||
events_published=len(self._events))
|
||||
|
||||
except SQLAlchemyError as e:
|
||||
await self.rollback()
|
||||
logger.error("Failed to commit Unit of Work", error=str(e))
|
||||
raise TransactionError(f"Commit failed: {str(e)}")
|
||||
|
||||
async def rollback(self):
|
||||
"""Rollback the transaction"""
|
||||
if self._rolled_back:
|
||||
logger.warning("Unit of Work already rolled back")
|
||||
return
|
||||
|
||||
try:
|
||||
await self.session.rollback()
|
||||
self._rolled_back = True
|
||||
self._events.clear() # Clear events on rollback
|
||||
|
||||
logger.debug(f"Unit of Work rolled back",
|
||||
repositories=list(self._repositories.keys()))
|
||||
|
||||
except SQLAlchemyError as e:
|
||||
logger.error("Failed to rollback Unit of Work", error=str(e))
|
||||
raise TransactionError(f"Rollback failed: {str(e)}")
|
||||
|
||||
async def _publish_events(self):
|
||||
"""Publish domain events (override in subclasses for actual publishing)"""
|
||||
if not self._events:
|
||||
return
|
||||
|
||||
# Default implementation just logs events
|
||||
# Override this method in service-specific implementations
|
||||
for event in self._events:
|
||||
logger.info(f"Publishing event",
|
||||
event_type=event.event_type,
|
||||
event_data=event.to_dict())
|
||||
|
||||
# Clear events after publishing
|
||||
self._events.clear()
|
||||
|
||||
async def __aenter__(self):
|
||||
"""Async context manager entry"""
|
||||
return self
|
||||
|
||||
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
||||
"""Async context manager exit"""
|
||||
if exc_type is not None:
|
||||
# Exception occurred, rollback
|
||||
await self.rollback()
|
||||
return False
|
||||
|
||||
# No exception, auto-commit if enabled
|
||||
if self.auto_commit and not self._committed:
|
||||
await self.commit()
|
||||
|
||||
return False
|
||||
|
||||
|
||||
class ServiceUnitOfWork(UnitOfWork):
|
||||
"""
|
||||
Service-specific Unit of Work with event publishing integration
|
||||
|
||||
Example usage with message publishing:
|
||||
|
||||
class AuthUnitOfWork(ServiceUnitOfWork):
|
||||
def __init__(self, session: AsyncSession, message_publisher=None):
|
||||
super().__init__(session)
|
||||
self.message_publisher = message_publisher
|
||||
|
||||
async def _publish_events(self):
|
||||
for event in self._events:
|
||||
if self.message_publisher:
|
||||
await self.message_publisher.publish(
|
||||
topic="auth.events",
|
||||
message=event.to_dict()
|
||||
)
|
||||
"""
|
||||
|
||||
def __init__(self, session: AsyncSession, event_publisher=None, auto_commit: bool = False):
|
||||
super().__init__(session, auto_commit)
|
||||
self.event_publisher = event_publisher
|
||||
|
||||
async def _publish_events(self):
|
||||
"""Publish events using the provided event publisher"""
|
||||
if not self._events or not self.event_publisher:
|
||||
return
|
||||
|
||||
try:
|
||||
for event in self._events:
|
||||
await self.event_publisher.publish(event)
|
||||
logger.debug(f"Published event via publisher",
|
||||
event_type=event.event_type)
|
||||
|
||||
self._events.clear()
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to publish events", error=str(e))
|
||||
# Don't raise here to avoid breaking the transaction
|
||||
# Events will be retried or handled by the event publisher
|
||||
|
||||
|
||||
# ===== TRANSACTION CONTEXT MANAGER =====
|
||||
|
||||
@asynccontextmanager
|
||||
async def transaction_scope(session: AsyncSession, auto_commit: bool = True):
|
||||
"""
|
||||
Simple transaction context manager for single-repository operations
|
||||
|
||||
Usage:
|
||||
async with transaction_scope(session) as tx_session:
|
||||
user = User(name="John")
|
||||
tx_session.add(user)
|
||||
# Auto-commits on success, rolls back on exception
|
||||
"""
|
||||
try:
|
||||
yield session
|
||||
if auto_commit:
|
||||
await session.commit()
|
||||
except Exception as e:
|
||||
await session.rollback()
|
||||
logger.error("Transaction scope failed", error=str(e))
|
||||
raise
|
||||
|
||||
|
||||
# ===== UTILITIES =====
|
||||
|
||||
class RepositoryRegistry:
|
||||
"""Registry for commonly used repository configurations"""
|
||||
|
||||
_registry: Dict[str, Dict[str, Any]] = {}
|
||||
|
||||
@classmethod
|
||||
def register(
|
||||
self,
|
||||
name: str,
|
||||
repository_class: Type[Repository],
|
||||
model_class: Type[Model],
|
||||
**kwargs
|
||||
):
|
||||
"""Register a repository configuration"""
|
||||
self._registry[name] = {
|
||||
"repository_class": repository_class,
|
||||
"model_class": model_class,
|
||||
"kwargs": kwargs
|
||||
}
|
||||
logger.debug(f"Registered repository configuration", name=name)
|
||||
|
||||
@classmethod
|
||||
def create_repository(self, name: str, session: AsyncSession) -> Optional[Repository]:
|
||||
"""Create repository instance from registry"""
|
||||
config = self._registry.get(name)
|
||||
if not config:
|
||||
logger.warning(f"Repository configuration '{name}' not found in registry")
|
||||
return None
|
||||
|
||||
return config["repository_class"](
|
||||
config["model_class"],
|
||||
session,
|
||||
**config["kwargs"]
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def list_registered(self) -> List[str]:
|
||||
"""List all registered repository names"""
|
||||
return list(self._registry.keys())
|
||||
|
||||
|
||||
# ===== FACTORY FUNCTIONS =====
|
||||
|
||||
def create_unit_of_work(session: AsyncSession, **kwargs) -> UnitOfWork:
|
||||
"""Factory function to create Unit of Work instances"""
|
||||
return UnitOfWork(session, **kwargs)
|
||||
|
||||
|
||||
def create_service_unit_of_work(
|
||||
session: AsyncSession,
|
||||
event_publisher=None,
|
||||
**kwargs
|
||||
) -> ServiceUnitOfWork:
|
||||
"""Factory function to create Service Unit of Work instances"""
|
||||
return ServiceUnitOfWork(session, event_publisher, **kwargs)
|
||||
402
shared/database/utils.py
Executable file
402
shared/database/utils.py
Executable file
@@ -0,0 +1,402 @@
|
||||
"""
|
||||
Database Utilities
|
||||
Helper functions for database operations and maintenance
|
||||
"""
|
||||
|
||||
from typing import Dict, Any, List, Optional
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import text, inspect
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
import structlog
|
||||
|
||||
from .exceptions import DatabaseError, HealthCheckError
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class DatabaseUtils:
|
||||
"""Utility functions for database operations"""
|
||||
|
||||
@staticmethod
|
||||
async def execute_health_check(session: AsyncSession, timeout: int = 5) -> Dict[str, Any]:
|
||||
"""
|
||||
Comprehensive database health check
|
||||
|
||||
Returns:
|
||||
Dict with health status, metrics, and diagnostics
|
||||
"""
|
||||
try:
|
||||
# Basic connectivity test
|
||||
start_time = __import__('time').time()
|
||||
await session.execute(text("SELECT 1"))
|
||||
response_time = __import__('time').time() - start_time
|
||||
|
||||
# Get database info
|
||||
db_info = await DatabaseUtils._get_database_info(session)
|
||||
|
||||
# Connection pool status (if available)
|
||||
pool_info = await DatabaseUtils._get_pool_info(session)
|
||||
|
||||
return {
|
||||
"status": "healthy",
|
||||
"response_time_seconds": round(response_time, 4),
|
||||
"database": db_info,
|
||||
"connection_pool": pool_info,
|
||||
"timestamp": __import__('datetime').datetime.utcnow().isoformat()
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Database health check failed", error=str(e))
|
||||
raise HealthCheckError(f"Health check failed: {str(e)}")
|
||||
|
||||
@staticmethod
|
||||
async def _get_database_info(session: AsyncSession) -> Dict[str, Any]:
|
||||
"""Get database server information"""
|
||||
try:
|
||||
# Try to get database version and basic stats
|
||||
if session.bind.dialect.name == 'postgresql':
|
||||
version_result = await session.execute(text("SELECT version()"))
|
||||
version = version_result.scalar()
|
||||
|
||||
stats_result = await session.execute(text("""
|
||||
SELECT
|
||||
count(*) as active_connections,
|
||||
(SELECT setting FROM pg_settings WHERE name = 'max_connections') as max_connections
|
||||
FROM pg_stat_activity
|
||||
WHERE state = 'active'
|
||||
"""))
|
||||
stats = stats_result.fetchone()
|
||||
|
||||
return {
|
||||
"type": "postgresql",
|
||||
"version": version,
|
||||
"active_connections": stats.active_connections if stats else 0,
|
||||
"max_connections": stats.max_connections if stats else "unknown"
|
||||
}
|
||||
|
||||
elif session.bind.dialect.name == 'sqlite':
|
||||
version_result = await session.execute(text("SELECT sqlite_version()"))
|
||||
version = version_result.scalar()
|
||||
|
||||
return {
|
||||
"type": "sqlite",
|
||||
"version": version,
|
||||
"active_connections": 1,
|
||||
"max_connections": "unlimited"
|
||||
}
|
||||
|
||||
else:
|
||||
return {
|
||||
"type": session.bind.dialect.name,
|
||||
"version": "unknown",
|
||||
"active_connections": "unknown",
|
||||
"max_connections": "unknown"
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.warning("Could not retrieve database info", error=str(e))
|
||||
return {
|
||||
"type": session.bind.dialect.name,
|
||||
"version": "unknown",
|
||||
"error": str(e)
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
async def _get_pool_info(session: AsyncSession) -> Dict[str, Any]:
|
||||
"""Get connection pool information"""
|
||||
try:
|
||||
pool = session.bind.pool
|
||||
if pool:
|
||||
return {
|
||||
"size": pool.size(),
|
||||
"checked_in": pool.checkedin(),
|
||||
"checked_out": pool.checkedout(),
|
||||
"overflow": pool.overflow(),
|
||||
"status": pool.status()
|
||||
}
|
||||
else:
|
||||
return {"status": "no_pool"}
|
||||
|
||||
except Exception as e:
|
||||
logger.warning("Could not retrieve pool info", error=str(e))
|
||||
return {"error": str(e)}
|
||||
|
||||
@staticmethod
|
||||
async def validate_schema(session: AsyncSession, expected_tables: List[str]) -> Dict[str, Any]:
|
||||
"""
|
||||
Validate database schema against expected tables
|
||||
|
||||
Args:
|
||||
session: Database session
|
||||
expected_tables: List of table names that should exist
|
||||
|
||||
Returns:
|
||||
Validation results with missing/extra tables
|
||||
"""
|
||||
try:
|
||||
# Get existing tables
|
||||
inspector = inspect(session.bind)
|
||||
existing_tables = set(inspector.get_table_names())
|
||||
expected_tables_set = set(expected_tables)
|
||||
|
||||
missing_tables = expected_tables_set - existing_tables
|
||||
extra_tables = existing_tables - expected_tables_set
|
||||
|
||||
return {
|
||||
"valid": len(missing_tables) == 0,
|
||||
"existing_tables": list(existing_tables),
|
||||
"expected_tables": expected_tables,
|
||||
"missing_tables": list(missing_tables),
|
||||
"extra_tables": list(extra_tables),
|
||||
"total_tables": len(existing_tables)
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Schema validation failed", error=str(e))
|
||||
raise DatabaseError(f"Schema validation failed: {str(e)}")
|
||||
|
||||
@staticmethod
|
||||
async def get_table_stats(session: AsyncSession, table_names: List[str]) -> Dict[str, Any]:
|
||||
"""
|
||||
Get statistics for specified tables
|
||||
|
||||
Args:
|
||||
session: Database session
|
||||
table_names: List of table names to analyze
|
||||
|
||||
Returns:
|
||||
Dictionary with table statistics
|
||||
"""
|
||||
try:
|
||||
stats = {}
|
||||
|
||||
for table_name in table_names:
|
||||
if session.bind.dialect.name == 'postgresql':
|
||||
# PostgreSQL specific queries
|
||||
count_result = await session.execute(
|
||||
text(f"SELECT COUNT(*) FROM {table_name}")
|
||||
)
|
||||
row_count = count_result.scalar()
|
||||
|
||||
size_result = await session.execute(
|
||||
text(f"SELECT pg_total_relation_size('{table_name}')")
|
||||
)
|
||||
table_size = size_result.scalar()
|
||||
|
||||
stats[table_name] = {
|
||||
"row_count": row_count,
|
||||
"size_bytes": table_size,
|
||||
"size_mb": round(table_size / (1024 * 1024), 2) if table_size else 0
|
||||
}
|
||||
|
||||
elif session.bind.dialect.name == 'sqlite':
|
||||
# SQLite specific queries
|
||||
count_result = await session.execute(
|
||||
text(f"SELECT COUNT(*) FROM {table_name}")
|
||||
)
|
||||
row_count = count_result.scalar()
|
||||
|
||||
stats[table_name] = {
|
||||
"row_count": row_count,
|
||||
"size_bytes": "unknown",
|
||||
"size_mb": "unknown"
|
||||
}
|
||||
|
||||
else:
|
||||
# Generic fallback
|
||||
count_result = await session.execute(
|
||||
text(f"SELECT COUNT(*) FROM {table_name}")
|
||||
)
|
||||
row_count = count_result.scalar()
|
||||
|
||||
stats[table_name] = {
|
||||
"row_count": row_count,
|
||||
"size_bytes": "unknown",
|
||||
"size_mb": "unknown"
|
||||
}
|
||||
|
||||
return stats
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get table statistics",
|
||||
tables=table_names, error=str(e))
|
||||
raise DatabaseError(f"Failed to get table stats: {str(e)}")
|
||||
|
||||
@staticmethod
|
||||
async def cleanup_old_records(
|
||||
session: AsyncSession,
|
||||
table_name: str,
|
||||
date_column: str,
|
||||
days_old: int,
|
||||
batch_size: int = 1000
|
||||
) -> int:
|
||||
"""
|
||||
Clean up old records from a table
|
||||
|
||||
Args:
|
||||
session: Database session
|
||||
table_name: Name of table to clean
|
||||
date_column: Date column to filter by
|
||||
days_old: Records older than this many days will be deleted
|
||||
batch_size: Number of records to delete per batch
|
||||
|
||||
Returns:
|
||||
Total number of records deleted
|
||||
"""
|
||||
try:
|
||||
total_deleted = 0
|
||||
|
||||
while True:
|
||||
if session.bind.dialect.name == 'postgresql':
|
||||
delete_query = text(f"""
|
||||
DELETE FROM {table_name}
|
||||
WHERE {date_column} < NOW() - INTERVAL :days_param
|
||||
AND ctid IN (
|
||||
SELECT ctid FROM {table_name}
|
||||
WHERE {date_column} < NOW() - INTERVAL :days_param
|
||||
LIMIT :batch_size
|
||||
)
|
||||
""")
|
||||
params = {
|
||||
"days_param": f"{days_old} days",
|
||||
"batch_size": batch_size
|
||||
}
|
||||
|
||||
elif session.bind.dialect.name == 'sqlite':
|
||||
delete_query = text(f"""
|
||||
DELETE FROM {table_name}
|
||||
WHERE {date_column} < datetime('now', :days_param)
|
||||
AND rowid IN (
|
||||
SELECT rowid FROM {table_name}
|
||||
WHERE {date_column} < datetime('now', :days_param)
|
||||
LIMIT :batch_size
|
||||
)
|
||||
""")
|
||||
params = {
|
||||
"days_param": f"-{days_old} days",
|
||||
"batch_size": batch_size
|
||||
}
|
||||
|
||||
else:
|
||||
# Generic fallback (may not work for all databases)
|
||||
delete_query = text(f"""
|
||||
DELETE FROM {table_name}
|
||||
WHERE {date_column} < DATE_SUB(NOW(), INTERVAL :days_old DAY)
|
||||
LIMIT :batch_size
|
||||
""")
|
||||
params = {
|
||||
"days_old": days_old,
|
||||
"batch_size": batch_size
|
||||
}
|
||||
|
||||
result = await session.execute(delete_query, params)
|
||||
deleted_count = result.rowcount
|
||||
|
||||
if deleted_count == 0:
|
||||
break
|
||||
|
||||
total_deleted += deleted_count
|
||||
await session.commit()
|
||||
|
||||
logger.debug(f"Deleted batch from {table_name}",
|
||||
batch_size=deleted_count,
|
||||
total_deleted=total_deleted)
|
||||
|
||||
logger.info(f"Cleanup completed for {table_name}",
|
||||
total_deleted=total_deleted,
|
||||
days_old=days_old)
|
||||
|
||||
return total_deleted
|
||||
|
||||
except Exception as e:
|
||||
await session.rollback()
|
||||
logger.error(f"Cleanup failed for {table_name}", error=str(e))
|
||||
raise DatabaseError(f"Cleanup failed: {str(e)}")
|
||||
|
||||
@staticmethod
|
||||
async def execute_maintenance(session: AsyncSession) -> Dict[str, Any]:
|
||||
"""
|
||||
Execute database maintenance tasks
|
||||
|
||||
Returns:
|
||||
Dictionary with maintenance results
|
||||
"""
|
||||
try:
|
||||
results = {}
|
||||
|
||||
if session.bind.dialect.name == 'postgresql':
|
||||
# PostgreSQL maintenance
|
||||
await session.execute(text("VACUUM ANALYZE"))
|
||||
results["vacuum"] = "completed"
|
||||
|
||||
# Update statistics
|
||||
await session.execute(text("ANALYZE"))
|
||||
results["analyze"] = "completed"
|
||||
|
||||
elif session.bind.dialect.name == 'sqlite':
|
||||
# SQLite maintenance
|
||||
await session.execute(text("VACUUM"))
|
||||
results["vacuum"] = "completed"
|
||||
|
||||
await session.execute(text("ANALYZE"))
|
||||
results["analyze"] = "completed"
|
||||
|
||||
else:
|
||||
results["maintenance"] = "not_supported"
|
||||
|
||||
await session.commit()
|
||||
|
||||
logger.info("Database maintenance completed", results=results)
|
||||
return results
|
||||
|
||||
except Exception as e:
|
||||
await session.rollback()
|
||||
logger.error("Database maintenance failed", error=str(e))
|
||||
raise DatabaseError(f"Maintenance failed: {str(e)}")
|
||||
|
||||
|
||||
class QueryLogger:
|
||||
"""Utility for logging and analyzing database queries"""
|
||||
|
||||
def __init__(self, session: AsyncSession):
|
||||
self.session = session
|
||||
self._query_log = []
|
||||
|
||||
async def log_query(self, query: str, params: Optional[Dict] = None, execution_time: Optional[float] = None):
|
||||
"""Log a database query with metadata"""
|
||||
log_entry = {
|
||||
"query": query,
|
||||
"params": params,
|
||||
"execution_time": execution_time,
|
||||
"timestamp": __import__('datetime').datetime.utcnow().isoformat()
|
||||
}
|
||||
|
||||
self._query_log.append(log_entry)
|
||||
|
||||
# Log slow queries
|
||||
if execution_time and execution_time > 1.0: # 1 second threshold
|
||||
logger.warning("Slow query detected",
|
||||
query=query,
|
||||
execution_time=execution_time)
|
||||
|
||||
def get_query_stats(self) -> Dict[str, Any]:
|
||||
"""Get statistics about logged queries"""
|
||||
if not self._query_log:
|
||||
return {"total_queries": 0}
|
||||
|
||||
execution_times = [
|
||||
entry["execution_time"]
|
||||
for entry in self._query_log
|
||||
if entry["execution_time"] is not None
|
||||
]
|
||||
|
||||
return {
|
||||
"total_queries": len(self._query_log),
|
||||
"avg_execution_time": sum(execution_times) / len(execution_times) if execution_times else 0,
|
||||
"max_execution_time": max(execution_times) if execution_times else 0,
|
||||
"slow_queries_count": len([t for t in execution_times if t > 1.0])
|
||||
}
|
||||
|
||||
def clear_log(self):
|
||||
"""Clear the query log"""
|
||||
self._query_log.clear()
|
||||
@@ -0,0 +1,24 @@
|
||||
{
|
||||
"location": {
|
||||
"id": "A0000000-0000-4000-a000-000000000001",
|
||||
"parent_tenant_id": "80000000-0000-4000-a000-000000000001",
|
||||
"name": "Madrid - Salamanca",
|
||||
"location_code": "MAD",
|
||||
"city": "Madrid",
|
||||
"zone": "Salamanca",
|
||||
"address": "Calle de Serrano, 48",
|
||||
"postal_code": "28001",
|
||||
"country": "España",
|
||||
"latitude": 40.4284,
|
||||
"longitude": -3.6847,
|
||||
"status": "ACTIVE",
|
||||
"opening_hours": "07:00-21:00",
|
||||
"daily_capacity": 2500,
|
||||
"storage_capacity_kg": 1500,
|
||||
"created_at": "2024-06-01T00:00:00Z",
|
||||
"enterprise_location": true,
|
||||
"location_type": "retail",
|
||||
"staff_count": 12,
|
||||
"description": "Premium location in upscale Salamanca district"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,24 @@
|
||||
{
|
||||
"users": [
|
||||
{
|
||||
"id": "68c1b366-a760-5c63-89bc-fafed412bafe",
|
||||
"tenant_id": "A0000000-0000-4000-a000-000000000001",
|
||||
"name": "Gerente Madrid - Salamanca",
|
||||
"email": "gerente.a0000000-0000-4000-a000-000000000001@panaderiaartesana.es",
|
||||
"role": "admin",
|
||||
"is_active": true,
|
||||
"created_at": "BASE_TS - 180d",
|
||||
"updated_at": "BASE_TS - 180d"
|
||||
},
|
||||
{
|
||||
"id": "f21eac29-4810-5778-84d5-388c57d7d1aa",
|
||||
"tenant_id": "A0000000-0000-4000-a000-000000000001",
|
||||
"name": "Empleado Madrid - Salamanca",
|
||||
"email": "empleado.a0000000-0000-4000-a000-000000000001@panaderiaartesana.es",
|
||||
"role": "member",
|
||||
"is_active": true,
|
||||
"created_at": "BASE_TS - 150d",
|
||||
"updated_at": "BASE_TS - 150d"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,242 @@
|
||||
{
|
||||
"stock": [
|
||||
{
|
||||
"id": "965d50e9-c9dd-420f-a6e3-06bbd39186f4",
|
||||
"tenant_id": "A0000000-0000-4000-a000-000000000001",
|
||||
"ingredient_id": "10000000-0000-0000-0000-000000000001",
|
||||
"production_stage": "raw_ingredient",
|
||||
"quality_status": "APPROVED",
|
||||
"expiration_date": "BASE_TS + 1d 6h",
|
||||
"supplier_id": "40000000-0000-0000-0000-000000000001",
|
||||
"batch_number": "MAD-PRO-20250116-001",
|
||||
"created_at": "BASE_TS - 6h",
|
||||
"current_quantity": 32.0,
|
||||
"reserved_quantity": 0.0,
|
||||
"available_quantity": 32.0,
|
||||
"storage_location": "Madrid - Salamanca - Display Area",
|
||||
"updated_at": "BASE_TS - 6h",
|
||||
"is_available": true,
|
||||
"is_expired": false
|
||||
},
|
||||
{
|
||||
"id": "80d9e71d-7468-47f9-b74c-1d7190cbfd46",
|
||||
"tenant_id": "A0000000-0000-4000-a000-000000000001",
|
||||
"ingredient_id": "10000000-0000-0000-0000-000000000002",
|
||||
"production_stage": "raw_ingredient",
|
||||
"quality_status": "APPROVED",
|
||||
"expiration_date": "BASE_TS + 1d 6h",
|
||||
"supplier_id": "40000000-0000-0000-0000-000000000001",
|
||||
"batch_number": "MAD-PRO-20250116-002",
|
||||
"created_at": "BASE_TS - 6h",
|
||||
"current_quantity": 36.0,
|
||||
"reserved_quantity": 0.0,
|
||||
"available_quantity": 36.0,
|
||||
"storage_location": "Madrid - Salamanca - Display Area",
|
||||
"updated_at": "BASE_TS - 6h",
|
||||
"is_available": true,
|
||||
"is_expired": false
|
||||
},
|
||||
{
|
||||
"id": "6f4b9fc2-15a4-471b-abb0-734c0b814a64",
|
||||
"tenant_id": "A0000000-0000-4000-a000-000000000001",
|
||||
"ingredient_id": "10000000-0000-0000-0000-000000000003",
|
||||
"production_stage": "raw_ingredient",
|
||||
"quality_status": "APPROVED",
|
||||
"expiration_date": "BASE_TS + 1d 6h",
|
||||
"supplier_id": "40000000-0000-0000-0000-000000000001",
|
||||
"batch_number": "MAD-PRO-20250116-003",
|
||||
"created_at": "BASE_TS - 6h",
|
||||
"current_quantity": 40.0,
|
||||
"reserved_quantity": 0.0,
|
||||
"available_quantity": 40.0,
|
||||
"storage_location": "Madrid - Salamanca - Display Area",
|
||||
"updated_at": "BASE_TS - 6h",
|
||||
"is_available": true,
|
||||
"is_expired": false
|
||||
},
|
||||
{
|
||||
"id": "e02147f2-86c4-48f7-9109-73775f997798",
|
||||
"tenant_id": "A0000000-0000-4000-a000-000000000001",
|
||||
"ingredient_id": "10000000-0000-0000-0000-000000000004",
|
||||
"production_stage": "raw_ingredient",
|
||||
"quality_status": "APPROVED",
|
||||
"expiration_date": "BASE_TS + 1d 6h",
|
||||
"supplier_id": "40000000-0000-0000-0000-000000000001",
|
||||
"batch_number": "MAD-PRO-20250116-004",
|
||||
"created_at": "BASE_TS - 6h",
|
||||
"current_quantity": 44.0,
|
||||
"reserved_quantity": 0.0,
|
||||
"available_quantity": 44.0,
|
||||
"storage_location": "Madrid - Salamanca - Display Area",
|
||||
"updated_at": "BASE_TS - 6h",
|
||||
"is_available": true,
|
||||
"is_expired": false
|
||||
}
|
||||
],
|
||||
"ingredients": [
|
||||
{
|
||||
"id": "10000000-0000-0000-0000-000000000001",
|
||||
"tenant_id": "A0000000-0000-4000-a000-000000000001",
|
||||
"name": "Harina de Trigo T55",
|
||||
"sku": "HAR-T55-ENT-001",
|
||||
"barcode": null,
|
||||
"product_type": "INGREDIENT",
|
||||
"ingredient_category": "FLOUR",
|
||||
"product_category": "BREAD",
|
||||
"subcategory": null,
|
||||
"description": "Harina de trigo refinada tipo 55, ideal para panes tradicionales y bollería",
|
||||
"brand": "Molinos San José - Enterprise Grade",
|
||||
"unit_of_measure": "KILOGRAMS",
|
||||
"package_size": null,
|
||||
"average_cost": 0.78,
|
||||
"last_purchase_price": null,
|
||||
"standard_cost": null,
|
||||
"low_stock_threshold": 700.0,
|
||||
"reorder_point": 1050.0,
|
||||
"reorder_quantity": null,
|
||||
"max_stock_level": null,
|
||||
"shelf_life_days": null,
|
||||
"display_life_hours": null,
|
||||
"best_before_hours": null,
|
||||
"storage_instructions": null,
|
||||
"central_baker_product_code": null,
|
||||
"delivery_days": null,
|
||||
"minimum_order_quantity": null,
|
||||
"pack_size": null,
|
||||
"is_active": true,
|
||||
"is_perishable": false,
|
||||
"allergen_info": [
|
||||
"gluten"
|
||||
],
|
||||
"nutritional_info": null,
|
||||
"produced_locally": false,
|
||||
"recipe_id": null,
|
||||
"created_at": "BASE_TS",
|
||||
"updated_at": "BASE_TS",
|
||||
"created_by": "ae38accc-1ad4-410d-adbc-a55630908924"
|
||||
},
|
||||
{
|
||||
"id": "10000000-0000-0000-0000-000000000002",
|
||||
"tenant_id": "A0000000-0000-4000-a000-000000000001",
|
||||
"name": "Harina de Trigo T65",
|
||||
"sku": "HAR-T65-ENT-002",
|
||||
"barcode": null,
|
||||
"product_type": "INGREDIENT",
|
||||
"ingredient_category": "FLOUR",
|
||||
"product_category": "BREAD",
|
||||
"subcategory": null,
|
||||
"description": "Harina de trigo semi-integral tipo 65, perfecta para panes rústicos",
|
||||
"brand": "Molinos San José - Enterprise Grade",
|
||||
"unit_of_measure": "KILOGRAMS",
|
||||
"package_size": null,
|
||||
"average_cost": 0.87,
|
||||
"last_purchase_price": null,
|
||||
"standard_cost": null,
|
||||
"low_stock_threshold": 560.0,
|
||||
"reorder_point": 840.0,
|
||||
"reorder_quantity": null,
|
||||
"max_stock_level": null,
|
||||
"shelf_life_days": null,
|
||||
"display_life_hours": null,
|
||||
"best_before_hours": null,
|
||||
"storage_instructions": null,
|
||||
"central_baker_product_code": null,
|
||||
"delivery_days": null,
|
||||
"minimum_order_quantity": null,
|
||||
"pack_size": null,
|
||||
"is_active": true,
|
||||
"is_perishable": false,
|
||||
"allergen_info": [
|
||||
"gluten"
|
||||
],
|
||||
"nutritional_info": null,
|
||||
"produced_locally": false,
|
||||
"recipe_id": null,
|
||||
"created_at": "BASE_TS",
|
||||
"updated_at": "BASE_TS",
|
||||
"created_by": "ae38accc-1ad4-410d-adbc-a55630908924"
|
||||
},
|
||||
{
|
||||
"id": "10000000-0000-0000-0000-000000000003",
|
||||
"tenant_id": "A0000000-0000-4000-a000-000000000001",
|
||||
"name": "Harina de Fuerza W300",
|
||||
"sku": "HAR-FUE-003",
|
||||
"barcode": null,
|
||||
"product_type": "INGREDIENT",
|
||||
"ingredient_category": "FLOUR",
|
||||
"product_category": "BREAD",
|
||||
"subcategory": null,
|
||||
"description": "Harina de gran fuerza W300, ideal para masas con alta hidratación",
|
||||
"brand": "Harinas Premium - Enterprise Grade",
|
||||
"unit_of_measure": "KILOGRAMS",
|
||||
"package_size": null,
|
||||
"average_cost": 1.06,
|
||||
"last_purchase_price": null,
|
||||
"standard_cost": null,
|
||||
"low_stock_threshold": 350.0,
|
||||
"reorder_point": 560.0,
|
||||
"reorder_quantity": null,
|
||||
"max_stock_level": null,
|
||||
"shelf_life_days": null,
|
||||
"display_life_hours": null,
|
||||
"best_before_hours": null,
|
||||
"storage_instructions": null,
|
||||
"central_baker_product_code": null,
|
||||
"delivery_days": null,
|
||||
"minimum_order_quantity": null,
|
||||
"pack_size": null,
|
||||
"is_active": true,
|
||||
"is_perishable": false,
|
||||
"allergen_info": [
|
||||
"gluten"
|
||||
],
|
||||
"nutritional_info": null,
|
||||
"produced_locally": false,
|
||||
"recipe_id": null,
|
||||
"created_at": "BASE_TS",
|
||||
"updated_at": "BASE_TS",
|
||||
"created_by": "ae38accc-1ad4-410d-adbc-a55630908924"
|
||||
},
|
||||
{
|
||||
"id": "10000000-0000-0000-0000-000000000004",
|
||||
"tenant_id": "A0000000-0000-4000-a000-000000000001",
|
||||
"name": "Harina Integral de Trigo",
|
||||
"sku": "HAR-INT-004",
|
||||
"barcode": null,
|
||||
"product_type": "INGREDIENT",
|
||||
"ingredient_category": "FLOUR",
|
||||
"product_category": "BREAD",
|
||||
"subcategory": null,
|
||||
"description": "Harina integral 100% con salvado, rica en fibra",
|
||||
"brand": "Bio Cereales - Enterprise Grade",
|
||||
"unit_of_measure": "KILOGRAMS",
|
||||
"package_size": null,
|
||||
"average_cost": 1.1,
|
||||
"last_purchase_price": null,
|
||||
"standard_cost": null,
|
||||
"low_stock_threshold": 420.0,
|
||||
"reorder_point": 630.0,
|
||||
"reorder_quantity": null,
|
||||
"max_stock_level": null,
|
||||
"shelf_life_days": null,
|
||||
"display_life_hours": null,
|
||||
"best_before_hours": null,
|
||||
"storage_instructions": null,
|
||||
"central_baker_product_code": null,
|
||||
"delivery_days": null,
|
||||
"minimum_order_quantity": null,
|
||||
"pack_size": null,
|
||||
"is_active": true,
|
||||
"is_perishable": false,
|
||||
"allergen_info": [
|
||||
"gluten"
|
||||
],
|
||||
"nutritional_info": null,
|
||||
"produced_locally": false,
|
||||
"recipe_id": null,
|
||||
"created_at": "BASE_TS",
|
||||
"updated_at": "BASE_TS",
|
||||
"created_by": "ae38accc-1ad4-410d-adbc-a55630908924"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"recipes": [],
|
||||
"recipe_ingredients": [],
|
||||
"recipe_instructions": []
|
||||
}
|
||||
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"suppliers": []
|
||||
}
|
||||
@@ -0,0 +1,75 @@
|
||||
{
|
||||
"equipment": [],
|
||||
"quality_check_templates": [],
|
||||
"quality_checks": [],
|
||||
"batches": [
|
||||
{
|
||||
"id": "50000001-0000-4000-a000-000000000001",
|
||||
"tenant_id": "A0000000-0000-4000-a000-000000000001",
|
||||
"recipe_id": "30000000-0000-0000-0000-000000000001",
|
||||
"product_id": "20000000-0000-0000-0000-000000000001",
|
||||
"batch_number": "BATCH-A000-0001",
|
||||
"status": "completed",
|
||||
"quantity_produced": 50,
|
||||
"quantity_good": 50,
|
||||
"quantity_defective": 0,
|
||||
"production_date": "BASE_TS - 1d",
|
||||
"expiration_date": "BASE_TS + 2d",
|
||||
"production_line": "Linea 1",
|
||||
"shift": "morning",
|
||||
"produced_by": "ae38accc-1ad4-410d-adbc-a55630908924",
|
||||
"approved_by": "80765906-0074-4206-8f58-5867df1975fd",
|
||||
"created_at": "BASE_TS - 1d",
|
||||
"updated_at": "BASE_TS - 1d",
|
||||
"is_active": true,
|
||||
"ingredients": [
|
||||
{
|
||||
"ingredient_id": "10000000-0000-0000-0000-000000000001",
|
||||
"quantity_used": 25.0,
|
||||
"unit": "kg"
|
||||
}
|
||||
],
|
||||
"product_name": "Baguette Tradicional",
|
||||
"planned_start_time": "BASE_TS",
|
||||
"planned_end_time": "BASE_TS + 4h",
|
||||
"actual_start_time": "BASE_TS - 1d",
|
||||
"actual_end_time": "BASE_TS - 1d + 4h",
|
||||
"planned_quantity": 50.0,
|
||||
"planned_duration_minutes": 240
|
||||
},
|
||||
{
|
||||
"id": "50000002-0000-4000-a000-000000000001",
|
||||
"tenant_id": "A0000000-0000-4000-a000-000000000001",
|
||||
"recipe_id": "30000000-0000-0000-0000-000000000002",
|
||||
"product_id": "20000000-0000-0000-0000-000000000002",
|
||||
"batch_number": "BATCH-A000-0002",
|
||||
"status": "completed",
|
||||
"quantity_produced": 60,
|
||||
"quantity_good": 60,
|
||||
"quantity_defective": 0,
|
||||
"production_date": "BASE_TS - 2d",
|
||||
"expiration_date": "BASE_TS + 1d",
|
||||
"production_line": "Linea 2",
|
||||
"shift": "morning",
|
||||
"produced_by": "ae38accc-1ad4-410d-adbc-a55630908924",
|
||||
"approved_by": "80765906-0074-4206-8f58-5867df1975fd",
|
||||
"created_at": "BASE_TS - 2d",
|
||||
"updated_at": "BASE_TS - 2d",
|
||||
"is_active": true,
|
||||
"ingredients": [
|
||||
{
|
||||
"ingredient_id": "10000000-0000-0000-0000-000000000001",
|
||||
"quantity_used": 30.0,
|
||||
"unit": "kg"
|
||||
}
|
||||
],
|
||||
"product_name": "Croissant de Mantequilla",
|
||||
"planned_start_time": "BASE_TS",
|
||||
"planned_end_time": "BASE_TS + 4h",
|
||||
"actual_start_time": "BASE_TS - 1d",
|
||||
"actual_end_time": "BASE_TS - 1d + 4h",
|
||||
"planned_quantity": 50.0,
|
||||
"planned_duration_minutes": 240
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"purchase_orders": [],
|
||||
"purchase_order_items": []
|
||||
}
|
||||
@@ -0,0 +1,44 @@
|
||||
{
|
||||
"customers": [
|
||||
{
|
||||
"id": "60000000-0000-0000-0000-000000000001",
|
||||
"tenant_id": "A0000000-0000-4000-a000-000000000001",
|
||||
"customer_code": "CUST-001",
|
||||
"name": "Restaurante El Buen Yantar - Madrid",
|
||||
"customer_type": "WHOLESALE",
|
||||
"contact_person": "Luis Gómez",
|
||||
"email": "compras@buenyantar.es",
|
||||
"phone": "+34 912 345 678",
|
||||
"address": "Calle Mayor, 45",
|
||||
"city": "Madrid",
|
||||
"postal_code": "28013",
|
||||
"country": "España",
|
||||
"status": "ACTIVE",
|
||||
"total_orders": 45,
|
||||
"total_spent": 3250.75,
|
||||
"created_at": "BASE_TS",
|
||||
"notes": "Regular wholesale customer - weekly orders"
|
||||
},
|
||||
{
|
||||
"id": "60000000-0000-0000-0000-000000000002",
|
||||
"tenant_id": "A0000000-0000-4000-a000-000000000001",
|
||||
"customer_code": "CUST-002",
|
||||
"name": "Cafetería La Esquina - Madrid",
|
||||
"customer_type": "RETAIL",
|
||||
"contact_person": "Marta Ruiz",
|
||||
"email": "cafeteria@laesquina.com",
|
||||
"phone": "+34 913 456 789",
|
||||
"address": "Plaza del Sol, 12",
|
||||
"city": "Madrid",
|
||||
"postal_code": "28012",
|
||||
"country": "España",
|
||||
"status": "ACTIVE",
|
||||
"total_orders": 12,
|
||||
"total_spent": 850.2,
|
||||
"created_at": "BASE_TS",
|
||||
"notes": "Small retail customer - biweekly orders"
|
||||
}
|
||||
],
|
||||
"customer_orders": [],
|
||||
"order_items": []
|
||||
}
|
||||
@@ -0,0 +1,379 @@
|
||||
{
|
||||
"sales_data": [
|
||||
{
|
||||
"id": "dde67992-5abc-4557-b927-0bd8fea21c38",
|
||||
"tenant_id": "A0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 16h 0m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000001",
|
||||
"quantity_sold": 2,
|
||||
"unit_price": 0.9,
|
||||
"total_revenue": 1.8,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "cash",
|
||||
"created_at": "BASE_TS - 16h 0m",
|
||||
"notes": "Venta local en Madrid - Salamanca",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "7791177b-72f3-4c7a-8af6-31b1f69c97a5",
|
||||
"tenant_id": "A0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 15h 3m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000002",
|
||||
"quantity_sold": 3,
|
||||
"unit_price": 1.39,
|
||||
"total_revenue": 4.16,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 15h 3m",
|
||||
"notes": "Venta local en Madrid - Salamanca",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "928e27ba-76e5-4e86-8dda-c85bde2666ba",
|
||||
"tenant_id": "A0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 14h 6m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000003",
|
||||
"quantity_sold": 4,
|
||||
"unit_price": 3.74,
|
||||
"total_revenue": 14.96,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 14h 6m",
|
||||
"notes": "Venta local en Madrid - Salamanca",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "8cb0f672-7b98-4fb8-8d40-c6157df9ac33",
|
||||
"tenant_id": "A0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 13h 9m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000004",
|
||||
"quantity_sold": 5,
|
||||
"unit_price": 1.45,
|
||||
"total_revenue": 7.26,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 13h 9m",
|
||||
"notes": "Venta local en Madrid - Salamanca",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "1bd14e1c-7cff-4502-a2b8-1c5a4cb2ef6b",
|
||||
"tenant_id": "A0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 12h 12m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000001",
|
||||
"quantity_sold": 6,
|
||||
"unit_price": 0.9,
|
||||
"total_revenue": 5.41,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "cash",
|
||||
"created_at": "BASE_TS - 12h 12m",
|
||||
"notes": "Venta local en Madrid - Salamanca",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "ada4985e-242e-45aa-9ed7-2197360f765b",
|
||||
"tenant_id": "A0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 11h 15m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000002",
|
||||
"quantity_sold": 2,
|
||||
"unit_price": 1.39,
|
||||
"total_revenue": 2.77,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 11h 15m",
|
||||
"notes": "Venta local en Madrid - Salamanca",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "a8954875-de31-4c2a-b4dc-0995cc918284",
|
||||
"tenant_id": "A0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 10h 18m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000003",
|
||||
"quantity_sold": 3,
|
||||
"unit_price": 3.74,
|
||||
"total_revenue": 11.22,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 10h 18m",
|
||||
"notes": "Venta local en Madrid - Salamanca",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "c8ddb626-bae7-4ab7-9e84-0d7bdd873607",
|
||||
"tenant_id": "A0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 9h 21m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000004",
|
||||
"quantity_sold": 4,
|
||||
"unit_price": 1.45,
|
||||
"total_revenue": 5.81,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 9h 21m",
|
||||
"notes": "Venta local en Madrid - Salamanca",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "a1a326e6-2194-42dd-bede-77ebcbebc5ec",
|
||||
"tenant_id": "A0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 8h 24m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000001",
|
||||
"quantity_sold": 5,
|
||||
"unit_price": 0.9,
|
||||
"total_revenue": 4.51,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "cash",
|
||||
"created_at": "BASE_TS - 8h 24m",
|
||||
"notes": "Venta local en Madrid - Salamanca",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "f95d7a80-13fe-4d48-b0ef-231e7e826f56",
|
||||
"tenant_id": "A0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 7h 27m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000002",
|
||||
"quantity_sold": 6,
|
||||
"unit_price": 1.39,
|
||||
"total_revenue": 8.32,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 7h 27m",
|
||||
"notes": "Venta local en Madrid - Salamanca",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "5c5cfa90-816b-4f9b-a04e-132470143533",
|
||||
"tenant_id": "A0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 6h 30m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000003",
|
||||
"quantity_sold": 2,
|
||||
"unit_price": 3.74,
|
||||
"total_revenue": 7.48,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 6h 30m",
|
||||
"notes": "Venta local en Madrid - Salamanca",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "a25b4c22-bedb-48ed-83d9-1660e4c5d174",
|
||||
"tenant_id": "A0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 5h 33m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000004",
|
||||
"quantity_sold": 3,
|
||||
"unit_price": 1.45,
|
||||
"total_revenue": 4.36,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 5h 33m",
|
||||
"notes": "Venta local en Madrid - Salamanca",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "86bd4fb8-138d-4d4e-8174-55b61627460b",
|
||||
"tenant_id": "A0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 16h 36m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000001",
|
||||
"quantity_sold": 4,
|
||||
"unit_price": 0.9,
|
||||
"total_revenue": 3.61,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "cash",
|
||||
"created_at": "BASE_TS - 16h 36m",
|
||||
"notes": "Venta local en Madrid - Salamanca",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "21a15b7d-1ee9-4809-9ad0-15da0372cb63",
|
||||
"tenant_id": "A0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 15h 39m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000002",
|
||||
"quantity_sold": 5,
|
||||
"unit_price": 1.39,
|
||||
"total_revenue": 6.93,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 15h 39m",
|
||||
"notes": "Venta local en Madrid - Salamanca",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "cb6ab4c0-205f-4a17-959f-56e9fbbfb353",
|
||||
"tenant_id": "A0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 14h 42m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000003",
|
||||
"quantity_sold": 6,
|
||||
"unit_price": 3.74,
|
||||
"total_revenue": 22.44,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 14h 42m",
|
||||
"notes": "Venta local en Madrid - Salamanca",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "d3995f7e-fa21-48c9-bd77-9f296d936907",
|
||||
"tenant_id": "A0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 13h 45m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000004",
|
||||
"quantity_sold": 2,
|
||||
"unit_price": 1.45,
|
||||
"total_revenue": 2.9,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 13h 45m",
|
||||
"notes": "Venta local en Madrid - Salamanca",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "911efb88-a1fa-449a-8470-35a0e0517a3f",
|
||||
"tenant_id": "A0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 12h 48m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000001",
|
||||
"quantity_sold": 3,
|
||||
"unit_price": 0.9,
|
||||
"total_revenue": 2.71,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "cash",
|
||||
"created_at": "BASE_TS - 12h 48m",
|
||||
"notes": "Venta local en Madrid - Salamanca",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "a15316e1-4ae4-4684-be06-b8e30165889b",
|
||||
"tenant_id": "A0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 11h 51m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000002",
|
||||
"quantity_sold": 4,
|
||||
"unit_price": 1.39,
|
||||
"total_revenue": 5.54,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 11h 51m",
|
||||
"notes": "Venta local en Madrid - Salamanca",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "0b912be6-8ba4-4071-888d-acbb85e2bd34",
|
||||
"tenant_id": "A0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 10h 54m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000003",
|
||||
"quantity_sold": 5,
|
||||
"unit_price": 3.74,
|
||||
"total_revenue": 18.7,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 10h 54m",
|
||||
"notes": "Venta local en Madrid - Salamanca",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "2f6604b3-ecbe-4846-b901-94bbbaef5e48",
|
||||
"tenant_id": "A0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 9h 57m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000004",
|
||||
"quantity_sold": 6,
|
||||
"unit_price": 1.45,
|
||||
"total_revenue": 8.71,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 9h 57m",
|
||||
"notes": "Venta local en Madrid - Salamanca",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "7fb34ca8-3b8b-4007-a6fc-7e32d72bf198",
|
||||
"tenant_id": "A0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 8h 60m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000001",
|
||||
"quantity_sold": 2,
|
||||
"unit_price": 0.9,
|
||||
"total_revenue": 1.8,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "cash",
|
||||
"created_at": "BASE_TS - 8h 60m",
|
||||
"notes": "Venta local en Madrid - Salamanca",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "277c663d-2e14-4dee-ba17-38cf26cc9736",
|
||||
"tenant_id": "A0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 7h 63m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000002",
|
||||
"quantity_sold": 3,
|
||||
"unit_price": 1.39,
|
||||
"total_revenue": 4.16,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 7h 63m",
|
||||
"notes": "Venta local en Madrid - Salamanca",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "91bd5c55-1273-42b9-9530-054704a356df",
|
||||
"tenant_id": "A0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 6h 66m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000003",
|
||||
"quantity_sold": 4,
|
||||
"unit_price": 3.74,
|
||||
"total_revenue": 14.96,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 6h 66m",
|
||||
"notes": "Venta local en Madrid - Salamanca",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "5f58dd35-ea0c-49cd-864f-2fd0c55a0b9e",
|
||||
"tenant_id": "A0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 5h 69m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000004",
|
||||
"quantity_sold": 5,
|
||||
"unit_price": 1.45,
|
||||
"total_revenue": 7.26,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 5h 69m",
|
||||
"notes": "Venta local en Madrid - Salamanca",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "70defcf2-c89f-47fd-ab4d-d2af07611baf",
|
||||
"tenant_id": "A0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 16h 72m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000001",
|
||||
"quantity_sold": 6,
|
||||
"unit_price": 0.9,
|
||||
"total_revenue": 5.41,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "cash",
|
||||
"created_at": "BASE_TS - 16h 72m",
|
||||
"notes": "Venta local en Madrid - Salamanca",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"orchestration_run": null,
|
||||
"alerts": []
|
||||
}
|
||||
@@ -0,0 +1,24 @@
|
||||
{
|
||||
"location": {
|
||||
"id": "B0000000-0000-4000-a000-000000000001",
|
||||
"parent_tenant_id": "80000000-0000-4000-a000-000000000001",
|
||||
"name": "Barcelona - Eixample",
|
||||
"location_code": "BCN",
|
||||
"city": "Barcelona",
|
||||
"zone": "Eixample",
|
||||
"address": "Passeig de Gràcia, 92",
|
||||
"postal_code": "08008",
|
||||
"country": "España",
|
||||
"latitude": 41.3947,
|
||||
"longitude": 2.1616,
|
||||
"status": "ACTIVE",
|
||||
"opening_hours": "07:00-21:00",
|
||||
"daily_capacity": 3000,
|
||||
"storage_capacity_kg": 2000,
|
||||
"created_at": "2024-06-01T00:00:00Z",
|
||||
"enterprise_location": true,
|
||||
"location_type": "retail",
|
||||
"staff_count": 15,
|
||||
"description": "High-volume tourist and local area in central Barcelona"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,24 @@
|
||||
{
|
||||
"users": [
|
||||
{
|
||||
"id": "c2563327-897b-506f-ac17-e7484cbee154",
|
||||
"tenant_id": "B0000000-0000-4000-a000-000000000001",
|
||||
"name": "Gerente Barcelona - Eixample",
|
||||
"email": "gerente.b0000000-0000-4000-a000-000000000001@panaderiaartesana.es",
|
||||
"role": "admin",
|
||||
"is_active": true,
|
||||
"created_at": "BASE_TS - 180d",
|
||||
"updated_at": "BASE_TS - 180d"
|
||||
},
|
||||
{
|
||||
"id": "42909c80-9479-5adb-9b98-8fe32cbedab9",
|
||||
"tenant_id": "B0000000-0000-4000-a000-000000000001",
|
||||
"name": "Empleado Barcelona - Eixample",
|
||||
"email": "empleado.b0000000-0000-4000-a000-000000000001@panaderiaartesana.es",
|
||||
"role": "member",
|
||||
"is_active": true,
|
||||
"created_at": "BASE_TS - 150d",
|
||||
"updated_at": "BASE_TS - 150d"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,242 @@
|
||||
{
|
||||
"stock": [
|
||||
{
|
||||
"id": "4f94abdc-fcc1-45f7-8f4a-bc0781b983d1",
|
||||
"tenant_id": "B0000000-0000-4000-a000-000000000001",
|
||||
"ingredient_id": "10000000-0000-0000-0000-000000000001",
|
||||
"production_stage": "raw_ingredient",
|
||||
"quality_status": "APPROVED",
|
||||
"expiration_date": "BASE_TS + 1d 6h",
|
||||
"supplier_id": "40000000-0000-0000-0000-000000000001",
|
||||
"batch_number": "BCN-PRO-20250116-001",
|
||||
"created_at": "BASE_TS - 6h",
|
||||
"current_quantity": 48.0,
|
||||
"reserved_quantity": 0.0,
|
||||
"available_quantity": 48.0,
|
||||
"storage_location": "Barcelona - Eixample - Display Area",
|
||||
"updated_at": "BASE_TS - 6h",
|
||||
"is_available": true,
|
||||
"is_expired": false
|
||||
},
|
||||
{
|
||||
"id": "f1abe185-4ab8-400f-ab34-204843f65b4e",
|
||||
"tenant_id": "B0000000-0000-4000-a000-000000000001",
|
||||
"ingredient_id": "10000000-0000-0000-0000-000000000002",
|
||||
"production_stage": "raw_ingredient",
|
||||
"quality_status": "APPROVED",
|
||||
"expiration_date": "BASE_TS + 1d 6h",
|
||||
"supplier_id": "40000000-0000-0000-0000-000000000001",
|
||||
"batch_number": "BCN-PRO-20250116-002",
|
||||
"created_at": "BASE_TS - 6h",
|
||||
"current_quantity": 54.0,
|
||||
"reserved_quantity": 0.0,
|
||||
"available_quantity": 54.0,
|
||||
"storage_location": "Barcelona - Eixample - Display Area",
|
||||
"updated_at": "BASE_TS - 6h",
|
||||
"is_available": true,
|
||||
"is_expired": false
|
||||
},
|
||||
{
|
||||
"id": "bde1a1c7-08a9-4de2-bce4-823bf0d8f58e",
|
||||
"tenant_id": "B0000000-0000-4000-a000-000000000001",
|
||||
"ingredient_id": "10000000-0000-0000-0000-000000000003",
|
||||
"production_stage": "raw_ingredient",
|
||||
"quality_status": "APPROVED",
|
||||
"expiration_date": "BASE_TS + 1d 6h",
|
||||
"supplier_id": "40000000-0000-0000-0000-000000000001",
|
||||
"batch_number": "BCN-PRO-20250116-003",
|
||||
"created_at": "BASE_TS - 6h",
|
||||
"current_quantity": 60.0,
|
||||
"reserved_quantity": 0.0,
|
||||
"available_quantity": 60.0,
|
||||
"storage_location": "Barcelona - Eixample - Display Area",
|
||||
"updated_at": "BASE_TS - 6h",
|
||||
"is_available": true,
|
||||
"is_expired": false
|
||||
},
|
||||
{
|
||||
"id": "90cbc91b-2853-430a-bc8c-50498b823ffb",
|
||||
"tenant_id": "B0000000-0000-4000-a000-000000000001",
|
||||
"ingredient_id": "10000000-0000-0000-0000-000000000004",
|
||||
"production_stage": "raw_ingredient",
|
||||
"quality_status": "APPROVED",
|
||||
"expiration_date": "BASE_TS + 1d 6h",
|
||||
"supplier_id": "40000000-0000-0000-0000-000000000001",
|
||||
"batch_number": "BCN-PRO-20250116-004",
|
||||
"created_at": "BASE_TS - 6h",
|
||||
"current_quantity": 66.0,
|
||||
"reserved_quantity": 0.0,
|
||||
"available_quantity": 66.0,
|
||||
"storage_location": "Barcelona - Eixample - Display Area",
|
||||
"updated_at": "BASE_TS - 6h",
|
||||
"is_available": true,
|
||||
"is_expired": false
|
||||
}
|
||||
],
|
||||
"ingredients": [
|
||||
{
|
||||
"id": "10000000-0000-0000-0000-000000000001",
|
||||
"tenant_id": "B0000000-0000-4000-a000-000000000001",
|
||||
"name": "Harina de Trigo T55",
|
||||
"sku": "HAR-T55-ENT-001",
|
||||
"barcode": null,
|
||||
"product_type": "INGREDIENT",
|
||||
"ingredient_category": "FLOUR",
|
||||
"product_category": "BREAD",
|
||||
"subcategory": null,
|
||||
"description": "Harina de trigo refinada tipo 55, ideal para panes tradicionales y bollería",
|
||||
"brand": "Molinos San José - Enterprise Grade",
|
||||
"unit_of_measure": "KILOGRAMS",
|
||||
"package_size": null,
|
||||
"average_cost": 0.78,
|
||||
"last_purchase_price": null,
|
||||
"standard_cost": null,
|
||||
"low_stock_threshold": 700.0,
|
||||
"reorder_point": 1050.0,
|
||||
"reorder_quantity": null,
|
||||
"max_stock_level": null,
|
||||
"shelf_life_days": null,
|
||||
"display_life_hours": null,
|
||||
"best_before_hours": null,
|
||||
"storage_instructions": null,
|
||||
"central_baker_product_code": null,
|
||||
"delivery_days": null,
|
||||
"minimum_order_quantity": null,
|
||||
"pack_size": null,
|
||||
"is_active": true,
|
||||
"is_perishable": false,
|
||||
"allergen_info": [
|
||||
"gluten"
|
||||
],
|
||||
"nutritional_info": null,
|
||||
"produced_locally": false,
|
||||
"recipe_id": null,
|
||||
"created_at": "BASE_TS",
|
||||
"updated_at": "BASE_TS",
|
||||
"created_by": "ae38accc-1ad4-410d-adbc-a55630908924"
|
||||
},
|
||||
{
|
||||
"id": "10000000-0000-0000-0000-000000000002",
|
||||
"tenant_id": "B0000000-0000-4000-a000-000000000001",
|
||||
"name": "Harina de Trigo T65",
|
||||
"sku": "HAR-T65-ENT-002",
|
||||
"barcode": null,
|
||||
"product_type": "INGREDIENT",
|
||||
"ingredient_category": "FLOUR",
|
||||
"product_category": "BREAD",
|
||||
"subcategory": null,
|
||||
"description": "Harina de trigo semi-integral tipo 65, perfecta para panes rústicos",
|
||||
"brand": "Molinos San José - Enterprise Grade",
|
||||
"unit_of_measure": "KILOGRAMS",
|
||||
"package_size": null,
|
||||
"average_cost": 0.87,
|
||||
"last_purchase_price": null,
|
||||
"standard_cost": null,
|
||||
"low_stock_threshold": 560.0,
|
||||
"reorder_point": 840.0,
|
||||
"reorder_quantity": null,
|
||||
"max_stock_level": null,
|
||||
"shelf_life_days": null,
|
||||
"display_life_hours": null,
|
||||
"best_before_hours": null,
|
||||
"storage_instructions": null,
|
||||
"central_baker_product_code": null,
|
||||
"delivery_days": null,
|
||||
"minimum_order_quantity": null,
|
||||
"pack_size": null,
|
||||
"is_active": true,
|
||||
"is_perishable": false,
|
||||
"allergen_info": [
|
||||
"gluten"
|
||||
],
|
||||
"nutritional_info": null,
|
||||
"produced_locally": false,
|
||||
"recipe_id": null,
|
||||
"created_at": "BASE_TS",
|
||||
"updated_at": "BASE_TS",
|
||||
"created_by": "ae38accc-1ad4-410d-adbc-a55630908924"
|
||||
},
|
||||
{
|
||||
"id": "10000000-0000-0000-0000-000000000003",
|
||||
"tenant_id": "B0000000-0000-4000-a000-000000000001",
|
||||
"name": "Harina de Fuerza W300",
|
||||
"sku": "HAR-FUE-003",
|
||||
"barcode": null,
|
||||
"product_type": "INGREDIENT",
|
||||
"ingredient_category": "FLOUR",
|
||||
"product_category": "BREAD",
|
||||
"subcategory": null,
|
||||
"description": "Harina de gran fuerza W300, ideal para masas con alta hidratación",
|
||||
"brand": "Harinas Premium - Enterprise Grade",
|
||||
"unit_of_measure": "KILOGRAMS",
|
||||
"package_size": null,
|
||||
"average_cost": 1.06,
|
||||
"last_purchase_price": null,
|
||||
"standard_cost": null,
|
||||
"low_stock_threshold": 350.0,
|
||||
"reorder_point": 560.0,
|
||||
"reorder_quantity": null,
|
||||
"max_stock_level": null,
|
||||
"shelf_life_days": null,
|
||||
"display_life_hours": null,
|
||||
"best_before_hours": null,
|
||||
"storage_instructions": null,
|
||||
"central_baker_product_code": null,
|
||||
"delivery_days": null,
|
||||
"minimum_order_quantity": null,
|
||||
"pack_size": null,
|
||||
"is_active": true,
|
||||
"is_perishable": false,
|
||||
"allergen_info": [
|
||||
"gluten"
|
||||
],
|
||||
"nutritional_info": null,
|
||||
"produced_locally": false,
|
||||
"recipe_id": null,
|
||||
"created_at": "BASE_TS",
|
||||
"updated_at": "BASE_TS",
|
||||
"created_by": "ae38accc-1ad4-410d-adbc-a55630908924"
|
||||
},
|
||||
{
|
||||
"id": "10000000-0000-0000-0000-000000000004",
|
||||
"tenant_id": "B0000000-0000-4000-a000-000000000001",
|
||||
"name": "Harina Integral de Trigo",
|
||||
"sku": "HAR-INT-004",
|
||||
"barcode": null,
|
||||
"product_type": "INGREDIENT",
|
||||
"ingredient_category": "FLOUR",
|
||||
"product_category": "BREAD",
|
||||
"subcategory": null,
|
||||
"description": "Harina integral 100% con salvado, rica en fibra",
|
||||
"brand": "Bio Cereales - Enterprise Grade",
|
||||
"unit_of_measure": "KILOGRAMS",
|
||||
"package_size": null,
|
||||
"average_cost": 1.1,
|
||||
"last_purchase_price": null,
|
||||
"standard_cost": null,
|
||||
"low_stock_threshold": 420.0,
|
||||
"reorder_point": 630.0,
|
||||
"reorder_quantity": null,
|
||||
"max_stock_level": null,
|
||||
"shelf_life_days": null,
|
||||
"display_life_hours": null,
|
||||
"best_before_hours": null,
|
||||
"storage_instructions": null,
|
||||
"central_baker_product_code": null,
|
||||
"delivery_days": null,
|
||||
"minimum_order_quantity": null,
|
||||
"pack_size": null,
|
||||
"is_active": true,
|
||||
"is_perishable": false,
|
||||
"allergen_info": [
|
||||
"gluten"
|
||||
],
|
||||
"nutritional_info": null,
|
||||
"produced_locally": false,
|
||||
"recipe_id": null,
|
||||
"created_at": "BASE_TS",
|
||||
"updated_at": "BASE_TS",
|
||||
"created_by": "ae38accc-1ad4-410d-adbc-a55630908924"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"recipes": [],
|
||||
"recipe_ingredients": [],
|
||||
"recipe_instructions": []
|
||||
}
|
||||
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"suppliers": []
|
||||
}
|
||||
@@ -0,0 +1,75 @@
|
||||
{
|
||||
"equipment": [],
|
||||
"quality_check_templates": [],
|
||||
"quality_checks": [],
|
||||
"batches": [
|
||||
{
|
||||
"id": "50000001-0000-4000-a000-000000000001",
|
||||
"tenant_id": "B0000000-0000-4000-a000-000000000001",
|
||||
"recipe_id": "30000000-0000-0000-0000-000000000001",
|
||||
"product_id": "20000000-0000-0000-0000-000000000001",
|
||||
"batch_number": "BATCH-B000-0001",
|
||||
"status": "completed",
|
||||
"quantity_produced": 50,
|
||||
"quantity_good": 50,
|
||||
"quantity_defective": 0,
|
||||
"production_date": "BASE_TS - 1d",
|
||||
"expiration_date": "BASE_TS + 2d",
|
||||
"production_line": "Linea 1",
|
||||
"shift": "morning",
|
||||
"produced_by": "ae38accc-1ad4-410d-adbc-a55630908924",
|
||||
"approved_by": "80765906-0074-4206-8f58-5867df1975fd",
|
||||
"created_at": "BASE_TS - 1d",
|
||||
"updated_at": "BASE_TS - 1d",
|
||||
"is_active": true,
|
||||
"ingredients": [
|
||||
{
|
||||
"ingredient_id": "10000000-0000-0000-0000-000000000001",
|
||||
"quantity_used": 25.0,
|
||||
"unit": "kg"
|
||||
}
|
||||
],
|
||||
"product_name": "Baguette Tradicional",
|
||||
"planned_start_time": "BASE_TS",
|
||||
"planned_end_time": "BASE_TS + 4h",
|
||||
"actual_start_time": "BASE_TS - 1d",
|
||||
"actual_end_time": "BASE_TS - 1d + 4h",
|
||||
"planned_quantity": 50.0,
|
||||
"planned_duration_minutes": 240
|
||||
},
|
||||
{
|
||||
"id": "50000002-0000-4000-a000-000000000001",
|
||||
"tenant_id": "B0000000-0000-4000-a000-000000000001",
|
||||
"recipe_id": "30000000-0000-0000-0000-000000000002",
|
||||
"product_id": "20000000-0000-0000-0000-000000000002",
|
||||
"batch_number": "BATCH-B000-0002",
|
||||
"status": "completed",
|
||||
"quantity_produced": 60,
|
||||
"quantity_good": 60,
|
||||
"quantity_defective": 0,
|
||||
"production_date": "BASE_TS - 2d",
|
||||
"expiration_date": "BASE_TS + 1d",
|
||||
"production_line": "Linea 2",
|
||||
"shift": "morning",
|
||||
"produced_by": "ae38accc-1ad4-410d-adbc-a55630908924",
|
||||
"approved_by": "80765906-0074-4206-8f58-5867df1975fd",
|
||||
"created_at": "BASE_TS - 2d",
|
||||
"updated_at": "BASE_TS - 2d",
|
||||
"is_active": true,
|
||||
"ingredients": [
|
||||
{
|
||||
"ingredient_id": "10000000-0000-0000-0000-000000000001",
|
||||
"quantity_used": 30.0,
|
||||
"unit": "kg"
|
||||
}
|
||||
],
|
||||
"product_name": "Croissant de Mantequilla",
|
||||
"planned_start_time": "BASE_TS",
|
||||
"planned_end_time": "BASE_TS + 4h",
|
||||
"actual_start_time": "BASE_TS - 1d",
|
||||
"actual_end_time": "BASE_TS - 1d + 4h",
|
||||
"planned_quantity": 50.0,
|
||||
"planned_duration_minutes": 240
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"purchase_orders": [],
|
||||
"purchase_order_items": []
|
||||
}
|
||||
@@ -0,0 +1,44 @@
|
||||
{
|
||||
"customers": [
|
||||
{
|
||||
"id": "60000000-0000-0000-0000-000000000001",
|
||||
"tenant_id": "B0000000-0000-4000-a000-000000000001",
|
||||
"customer_code": "CUST-001",
|
||||
"name": "Restaurante El Buen Yantar - Barcelona",
|
||||
"customer_type": "WHOLESALE",
|
||||
"contact_person": "Luis Gómez",
|
||||
"email": "compras@buenyantar.es",
|
||||
"phone": "+34 912 345 678",
|
||||
"address": "Calle Mayor, 45",
|
||||
"city": "Barcelona",
|
||||
"postal_code": "08013",
|
||||
"country": "España",
|
||||
"status": "ACTIVE",
|
||||
"total_orders": 45,
|
||||
"total_spent": 3250.75,
|
||||
"created_at": "BASE_TS",
|
||||
"notes": "Regular wholesale customer - weekly orders"
|
||||
},
|
||||
{
|
||||
"id": "60000000-0000-0000-0000-000000000002",
|
||||
"tenant_id": "B0000000-0000-4000-a000-000000000001",
|
||||
"customer_code": "CUST-002",
|
||||
"name": "Cafetería La Esquina - Barcelona",
|
||||
"customer_type": "RETAIL",
|
||||
"contact_person": "Marta Ruiz",
|
||||
"email": "cafeteria@laesquina.com",
|
||||
"phone": "+34 913 456 789",
|
||||
"address": "Plaza del Sol, 12",
|
||||
"city": "Barcelona",
|
||||
"postal_code": "08012",
|
||||
"country": "España",
|
||||
"status": "ACTIVE",
|
||||
"total_orders": 12,
|
||||
"total_spent": 850.2,
|
||||
"created_at": "BASE_TS",
|
||||
"notes": "Small retail customer - biweekly orders"
|
||||
}
|
||||
],
|
||||
"customer_orders": [],
|
||||
"order_items": []
|
||||
}
|
||||
@@ -0,0 +1,529 @@
|
||||
{
|
||||
"sales_data": [
|
||||
{
|
||||
"id": "c98c079b-58c0-4604-8dd6-3857ffad5d0a",
|
||||
"tenant_id": "B0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 16h 0m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000001",
|
||||
"quantity_sold": 2,
|
||||
"unit_price": 0.9,
|
||||
"total_revenue": 1.8,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "cash",
|
||||
"created_at": "BASE_TS - 16h 0m",
|
||||
"notes": "Venta local en Barcelona - Eixample",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "fefa3670-b349-42a1-9ff5-ef4f8b6d2b9f",
|
||||
"tenant_id": "B0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 15h 3m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000002",
|
||||
"quantity_sold": 3,
|
||||
"unit_price": 1.39,
|
||||
"total_revenue": 4.16,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 15h 3m",
|
||||
"notes": "Venta local en Barcelona - Eixample",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "330fe4a8-519f-416b-82bf-b723bc46940a",
|
||||
"tenant_id": "B0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 14h 6m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000003",
|
||||
"quantity_sold": 4,
|
||||
"unit_price": 3.74,
|
||||
"total_revenue": 14.96,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 14h 6m",
|
||||
"notes": "Venta local en Barcelona - Eixample",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "87c3e0d2-a2cd-4601-a761-b843439bfa37",
|
||||
"tenant_id": "B0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 13h 9m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000004",
|
||||
"quantity_sold": 5,
|
||||
"unit_price": 1.45,
|
||||
"total_revenue": 7.26,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 13h 9m",
|
||||
"notes": "Venta local en Barcelona - Eixample",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "8e2c025d-d24c-4045-a661-bf15634d09e4",
|
||||
"tenant_id": "B0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 12h 12m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000001",
|
||||
"quantity_sold": 6,
|
||||
"unit_price": 0.9,
|
||||
"total_revenue": 5.41,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "cash",
|
||||
"created_at": "BASE_TS - 12h 12m",
|
||||
"notes": "Venta local en Barcelona - Eixample",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "7a38ebd1-6751-4200-9dd5-ff0d02a346eb",
|
||||
"tenant_id": "B0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 11h 15m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000002",
|
||||
"quantity_sold": 2,
|
||||
"unit_price": 1.39,
|
||||
"total_revenue": 2.77,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 11h 15m",
|
||||
"notes": "Venta local en Barcelona - Eixample",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "5c230de0-d3b6-45c8-96eb-e3b08b6ff1d0",
|
||||
"tenant_id": "B0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 10h 18m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000003",
|
||||
"quantity_sold": 3,
|
||||
"unit_price": 3.74,
|
||||
"total_revenue": 11.22,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 10h 18m",
|
||||
"notes": "Venta local en Barcelona - Eixample",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "942c135e-742d-4170-a77a-a890457c9c7f",
|
||||
"tenant_id": "B0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 9h 21m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000004",
|
||||
"quantity_sold": 4,
|
||||
"unit_price": 1.45,
|
||||
"total_revenue": 5.81,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 9h 21m",
|
||||
"notes": "Venta local en Barcelona - Eixample",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "0952fb16-4269-457f-97a9-f673f79a1046",
|
||||
"tenant_id": "B0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 8h 24m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000001",
|
||||
"quantity_sold": 5,
|
||||
"unit_price": 0.9,
|
||||
"total_revenue": 4.51,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "cash",
|
||||
"created_at": "BASE_TS - 8h 24m",
|
||||
"notes": "Venta local en Barcelona - Eixample",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "d1f2dff4-e324-4631-b65e-1a5bb06e49a0",
|
||||
"tenant_id": "B0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 7h 27m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000002",
|
||||
"quantity_sold": 6,
|
||||
"unit_price": 1.39,
|
||||
"total_revenue": 8.32,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 7h 27m",
|
||||
"notes": "Venta local en Barcelona - Eixample",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "863d7175-d174-4401-a0eb-b1e1b13f5e3f",
|
||||
"tenant_id": "B0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 6h 30m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000003",
|
||||
"quantity_sold": 2,
|
||||
"unit_price": 3.74,
|
||||
"total_revenue": 7.48,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 6h 30m",
|
||||
"notes": "Venta local en Barcelona - Eixample",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "19ff40eb-eb0e-435d-9e79-62a882875d2d",
|
||||
"tenant_id": "B0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 5h 33m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000004",
|
||||
"quantity_sold": 3,
|
||||
"unit_price": 1.45,
|
||||
"total_revenue": 4.36,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 5h 33m",
|
||||
"notes": "Venta local en Barcelona - Eixample",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "486d90b8-370e-4e4f-993c-173be441e459",
|
||||
"tenant_id": "B0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 16h 36m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000001",
|
||||
"quantity_sold": 4,
|
||||
"unit_price": 0.9,
|
||||
"total_revenue": 3.61,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "cash",
|
||||
"created_at": "BASE_TS - 16h 36m",
|
||||
"notes": "Venta local en Barcelona - Eixample",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "2c289f03-4a5c-4636-8292-cce140baed66",
|
||||
"tenant_id": "B0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 15h 39m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000002",
|
||||
"quantity_sold": 5,
|
||||
"unit_price": 1.39,
|
||||
"total_revenue": 6.93,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 15h 39m",
|
||||
"notes": "Venta local en Barcelona - Eixample",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "ed80ba2c-3765-4270-bbc5-5d04769d586f",
|
||||
"tenant_id": "B0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 14h 42m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000003",
|
||||
"quantity_sold": 6,
|
||||
"unit_price": 3.74,
|
||||
"total_revenue": 22.44,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 14h 42m",
|
||||
"notes": "Venta local en Barcelona - Eixample",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "7715cee7-d6d9-4731-ac97-14df72c1d9ad",
|
||||
"tenant_id": "B0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 13h 45m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000004",
|
||||
"quantity_sold": 2,
|
||||
"unit_price": 1.45,
|
||||
"total_revenue": 2.9,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 13h 45m",
|
||||
"notes": "Venta local en Barcelona - Eixample",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "326a2a4a-69b7-4c2d-9770-70ff34ada0b3",
|
||||
"tenant_id": "B0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 12h 48m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000001",
|
||||
"quantity_sold": 3,
|
||||
"unit_price": 0.9,
|
||||
"total_revenue": 2.71,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "cash",
|
||||
"created_at": "BASE_TS - 12h 48m",
|
||||
"notes": "Venta local en Barcelona - Eixample",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "6978a848-65de-4dc5-848b-4ecf2f684ef8",
|
||||
"tenant_id": "B0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 11h 51m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000002",
|
||||
"quantity_sold": 4,
|
||||
"unit_price": 1.39,
|
||||
"total_revenue": 5.54,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 11h 51m",
|
||||
"notes": "Venta local en Barcelona - Eixample",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "a005099c-e795-40cc-bea4-2ed5f6cbbfdd",
|
||||
"tenant_id": "B0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 10h 54m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000003",
|
||||
"quantity_sold": 5,
|
||||
"unit_price": 3.74,
|
||||
"total_revenue": 18.7,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 10h 54m",
|
||||
"notes": "Venta local en Barcelona - Eixample",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "6230d526-b341-4edc-b77c-0203a9d09f57",
|
||||
"tenant_id": "B0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 9h 57m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000004",
|
||||
"quantity_sold": 6,
|
||||
"unit_price": 1.45,
|
||||
"total_revenue": 8.71,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 9h 57m",
|
||||
"notes": "Venta local en Barcelona - Eixample",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "89ce02df-c236-4e1b-801c-2a8da9615541",
|
||||
"tenant_id": "B0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 8h 60m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000001",
|
||||
"quantity_sold": 2,
|
||||
"unit_price": 0.9,
|
||||
"total_revenue": 1.8,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "cash",
|
||||
"created_at": "BASE_TS - 8h 60m",
|
||||
"notes": "Venta local en Barcelona - Eixample",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "5363a886-64bc-4c23-b592-9293a3021bce",
|
||||
"tenant_id": "B0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 7h 63m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000002",
|
||||
"quantity_sold": 3,
|
||||
"unit_price": 1.39,
|
||||
"total_revenue": 4.16,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 7h 63m",
|
||||
"notes": "Venta local en Barcelona - Eixample",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "f078cbdb-d798-4b62-944b-dbe48d69917b",
|
||||
"tenant_id": "B0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 6h 66m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000003",
|
||||
"quantity_sold": 4,
|
||||
"unit_price": 3.74,
|
||||
"total_revenue": 14.96,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 6h 66m",
|
||||
"notes": "Venta local en Barcelona - Eixample",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "9f07234b-4ac5-49f6-8c11-4854a6fef024",
|
||||
"tenant_id": "B0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 5h 69m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000004",
|
||||
"quantity_sold": 5,
|
||||
"unit_price": 1.45,
|
||||
"total_revenue": 7.26,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 5h 69m",
|
||||
"notes": "Venta local en Barcelona - Eixample",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "27615e7d-814f-43a4-8c6d-2e8fb5735d20",
|
||||
"tenant_id": "B0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 16h 72m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000001",
|
||||
"quantity_sold": 6,
|
||||
"unit_price": 0.9,
|
||||
"total_revenue": 5.41,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "cash",
|
||||
"created_at": "BASE_TS - 16h 72m",
|
||||
"notes": "Venta local en Barcelona - Eixample",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "6d391efa-8d6a-4912-b80f-8dd870280c37",
|
||||
"tenant_id": "B0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 15h 75m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000002",
|
||||
"quantity_sold": 2,
|
||||
"unit_price": 1.39,
|
||||
"total_revenue": 2.77,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 15h 75m",
|
||||
"notes": "Venta local en Barcelona - Eixample",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "6ac6070b-5bda-4b6b-8bc6-fd03f9119fe5",
|
||||
"tenant_id": "B0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 14h 78m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000003",
|
||||
"quantity_sold": 3,
|
||||
"unit_price": 3.74,
|
||||
"total_revenue": 11.22,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 14h 78m",
|
||||
"notes": "Venta local en Barcelona - Eixample",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "c37a3242-f986-4fda-9461-1ae38783e0f1",
|
||||
"tenant_id": "B0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 13h 81m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000004",
|
||||
"quantity_sold": 4,
|
||||
"unit_price": 1.45,
|
||||
"total_revenue": 5.81,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 13h 81m",
|
||||
"notes": "Venta local en Barcelona - Eixample",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "9879bc0d-1708-4525-af11-cac205f81ce9",
|
||||
"tenant_id": "B0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 12h 84m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000001",
|
||||
"quantity_sold": 5,
|
||||
"unit_price": 0.9,
|
||||
"total_revenue": 4.51,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "cash",
|
||||
"created_at": "BASE_TS - 12h 84m",
|
||||
"notes": "Venta local en Barcelona - Eixample",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "c2eb8c92-5262-4a7c-b4a7-ef68b5a7ee82",
|
||||
"tenant_id": "B0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 11h 87m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000002",
|
||||
"quantity_sold": 6,
|
||||
"unit_price": 1.39,
|
||||
"total_revenue": 8.32,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 11h 87m",
|
||||
"notes": "Venta local en Barcelona - Eixample",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "9cf70fb9-c6be-43c3-b076-5cd51ff94d75",
|
||||
"tenant_id": "B0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 10h 90m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000003",
|
||||
"quantity_sold": 2,
|
||||
"unit_price": 3.74,
|
||||
"total_revenue": 7.48,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 10h 90m",
|
||||
"notes": "Venta local en Barcelona - Eixample",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "b57716d8-2a99-4f63-bdf3-eb07876d3502",
|
||||
"tenant_id": "B0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 9h 93m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000004",
|
||||
"quantity_sold": 3,
|
||||
"unit_price": 1.45,
|
||||
"total_revenue": 4.36,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 9h 93m",
|
||||
"notes": "Venta local en Barcelona - Eixample",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "4c296e49-6b77-41ae-8a48-7341fd43a4b3",
|
||||
"tenant_id": "B0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 8h 96m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000001",
|
||||
"quantity_sold": 4,
|
||||
"unit_price": 0.9,
|
||||
"total_revenue": 3.61,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "cash",
|
||||
"created_at": "BASE_TS - 8h 96m",
|
||||
"notes": "Venta local en Barcelona - Eixample",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "a4a17323-86b9-44d7-843b-6cd542c0c0ed",
|
||||
"tenant_id": "B0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 7h 99m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000002",
|
||||
"quantity_sold": 5,
|
||||
"unit_price": 1.39,
|
||||
"total_revenue": 6.93,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 7h 99m",
|
||||
"notes": "Venta local en Barcelona - Eixample",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "027b8eb5-da87-4c2b-b951-8934c1f0153f",
|
||||
"tenant_id": "B0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 6h 102m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000003",
|
||||
"quantity_sold": 6,
|
||||
"unit_price": 3.74,
|
||||
"total_revenue": 22.44,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 6h 102m",
|
||||
"notes": "Venta local en Barcelona - Eixample",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"orchestration_run": null,
|
||||
"alerts": []
|
||||
}
|
||||
@@ -0,0 +1,24 @@
|
||||
{
|
||||
"location": {
|
||||
"id": "C0000000-0000-4000-a000-000000000001",
|
||||
"parent_tenant_id": "80000000-0000-4000-a000-000000000001",
|
||||
"name": "Valencia - Ruzafa",
|
||||
"location_code": "VLC",
|
||||
"city": "Valencia",
|
||||
"zone": "Ruzafa",
|
||||
"address": "Calle de Sueca, 25",
|
||||
"postal_code": "46006",
|
||||
"country": "España",
|
||||
"latitude": 39.4623,
|
||||
"longitude": -0.3645,
|
||||
"status": "ACTIVE",
|
||||
"opening_hours": "07:00-21:00",
|
||||
"daily_capacity": 2000,
|
||||
"storage_capacity_kg": 1200,
|
||||
"created_at": "2024-06-01T00:00:00Z",
|
||||
"enterprise_location": true,
|
||||
"location_type": "retail",
|
||||
"staff_count": 10,
|
||||
"description": "Trendy artisan neighborhood with focus on quality"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,24 @@
|
||||
{
|
||||
"users": [
|
||||
{
|
||||
"id": "f60e7eaf-dc10-5751-a76e-413e92bc0067",
|
||||
"tenant_id": "C0000000-0000-4000-a000-000000000001",
|
||||
"name": "Gerente Valencia - Ruzafa",
|
||||
"email": "gerente.c0000000-0000-4000-a000-000000000001@panaderiaartesana.es",
|
||||
"role": "admin",
|
||||
"is_active": true,
|
||||
"created_at": "BASE_TS - 180d",
|
||||
"updated_at": "BASE_TS - 180d"
|
||||
},
|
||||
{
|
||||
"id": "7902d30b-6098-5100-b790-7786198605a8",
|
||||
"tenant_id": "C0000000-0000-4000-a000-000000000001",
|
||||
"name": "Empleado Valencia - Ruzafa",
|
||||
"email": "empleado.c0000000-0000-4000-a000-000000000001@panaderiaartesana.es",
|
||||
"role": "member",
|
||||
"is_active": true,
|
||||
"created_at": "BASE_TS - 150d",
|
||||
"updated_at": "BASE_TS - 150d"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,242 @@
|
||||
{
|
||||
"stock": [
|
||||
{
|
||||
"id": "66ee65db-e2a2-4483-9d53-9faf36f75e29",
|
||||
"tenant_id": "C0000000-0000-4000-a000-000000000001",
|
||||
"ingredient_id": "10000000-0000-0000-0000-000000000001",
|
||||
"production_stage": "raw_ingredient",
|
||||
"quality_status": "APPROVED",
|
||||
"expiration_date": "BASE_TS + 1d 6h",
|
||||
"supplier_id": "40000000-0000-0000-0000-000000000001",
|
||||
"batch_number": "VLC-PRO-20250116-001",
|
||||
"created_at": "BASE_TS - 6h",
|
||||
"current_quantity": 24.0,
|
||||
"reserved_quantity": 0.0,
|
||||
"available_quantity": 24.0,
|
||||
"storage_location": "Valencia - Ruzafa - Display Area",
|
||||
"updated_at": "BASE_TS - 6h",
|
||||
"is_available": true,
|
||||
"is_expired": false
|
||||
},
|
||||
{
|
||||
"id": "f84ee2f1-7dc4-409c-a1bd-4b246771988c",
|
||||
"tenant_id": "C0000000-0000-4000-a000-000000000001",
|
||||
"ingredient_id": "10000000-0000-0000-0000-000000000002",
|
||||
"production_stage": "raw_ingredient",
|
||||
"quality_status": "APPROVED",
|
||||
"expiration_date": "BASE_TS + 1d 6h",
|
||||
"supplier_id": "40000000-0000-0000-0000-000000000001",
|
||||
"batch_number": "VLC-PRO-20250116-002",
|
||||
"created_at": "BASE_TS - 6h",
|
||||
"current_quantity": 27.0,
|
||||
"reserved_quantity": 0.0,
|
||||
"available_quantity": 27.0,
|
||||
"storage_location": "Valencia - Ruzafa - Display Area",
|
||||
"updated_at": "BASE_TS - 6h",
|
||||
"is_available": true,
|
||||
"is_expired": false
|
||||
},
|
||||
{
|
||||
"id": "7f663dc0-07bf-4762-bb47-4c112810fd87",
|
||||
"tenant_id": "C0000000-0000-4000-a000-000000000001",
|
||||
"ingredient_id": "10000000-0000-0000-0000-000000000003",
|
||||
"production_stage": "raw_ingredient",
|
||||
"quality_status": "APPROVED",
|
||||
"expiration_date": "BASE_TS + 1d 6h",
|
||||
"supplier_id": "40000000-0000-0000-0000-000000000001",
|
||||
"batch_number": "VLC-PRO-20250116-003",
|
||||
"created_at": "BASE_TS - 6h",
|
||||
"current_quantity": 30.0,
|
||||
"reserved_quantity": 0.0,
|
||||
"available_quantity": 30.0,
|
||||
"storage_location": "Valencia - Ruzafa - Display Area",
|
||||
"updated_at": "BASE_TS - 6h",
|
||||
"is_available": true,
|
||||
"is_expired": false
|
||||
},
|
||||
{
|
||||
"id": "5ade7edd-b8a7-4a0a-843a-a99f91121806",
|
||||
"tenant_id": "C0000000-0000-4000-a000-000000000001",
|
||||
"ingredient_id": "10000000-0000-0000-0000-000000000004",
|
||||
"production_stage": "raw_ingredient",
|
||||
"quality_status": "APPROVED",
|
||||
"expiration_date": "BASE_TS + 1d 6h",
|
||||
"supplier_id": "40000000-0000-0000-0000-000000000001",
|
||||
"batch_number": "VLC-PRO-20250116-004",
|
||||
"created_at": "BASE_TS - 6h",
|
||||
"current_quantity": 33.0,
|
||||
"reserved_quantity": 0.0,
|
||||
"available_quantity": 33.0,
|
||||
"storage_location": "Valencia - Ruzafa - Display Area",
|
||||
"updated_at": "BASE_TS - 6h",
|
||||
"is_available": true,
|
||||
"is_expired": false
|
||||
}
|
||||
],
|
||||
"ingredients": [
|
||||
{
|
||||
"id": "10000000-0000-0000-0000-000000000001",
|
||||
"tenant_id": "C0000000-0000-4000-a000-000000000001",
|
||||
"name": "Harina de Trigo T55",
|
||||
"sku": "HAR-T55-ENT-001",
|
||||
"barcode": null,
|
||||
"product_type": "INGREDIENT",
|
||||
"ingredient_category": "FLOUR",
|
||||
"product_category": "BREAD",
|
||||
"subcategory": null,
|
||||
"description": "Harina de trigo refinada tipo 55, ideal para panes tradicionales y bollería",
|
||||
"brand": "Molinos San José - Enterprise Grade",
|
||||
"unit_of_measure": "KILOGRAMS",
|
||||
"package_size": null,
|
||||
"average_cost": 0.78,
|
||||
"last_purchase_price": null,
|
||||
"standard_cost": null,
|
||||
"low_stock_threshold": 700.0,
|
||||
"reorder_point": 1050.0,
|
||||
"reorder_quantity": null,
|
||||
"max_stock_level": null,
|
||||
"shelf_life_days": null,
|
||||
"display_life_hours": null,
|
||||
"best_before_hours": null,
|
||||
"storage_instructions": null,
|
||||
"central_baker_product_code": null,
|
||||
"delivery_days": null,
|
||||
"minimum_order_quantity": null,
|
||||
"pack_size": null,
|
||||
"is_active": true,
|
||||
"is_perishable": false,
|
||||
"allergen_info": [
|
||||
"gluten"
|
||||
],
|
||||
"nutritional_info": null,
|
||||
"produced_locally": false,
|
||||
"recipe_id": null,
|
||||
"created_at": "BASE_TS",
|
||||
"updated_at": "BASE_TS",
|
||||
"created_by": "ae38accc-1ad4-410d-adbc-a55630908924"
|
||||
},
|
||||
{
|
||||
"id": "10000000-0000-0000-0000-000000000002",
|
||||
"tenant_id": "C0000000-0000-4000-a000-000000000001",
|
||||
"name": "Harina de Trigo T65",
|
||||
"sku": "HAR-T65-ENT-002",
|
||||
"barcode": null,
|
||||
"product_type": "INGREDIENT",
|
||||
"ingredient_category": "FLOUR",
|
||||
"product_category": "BREAD",
|
||||
"subcategory": null,
|
||||
"description": "Harina de trigo semi-integral tipo 65, perfecta para panes rústicos",
|
||||
"brand": "Molinos San José - Enterprise Grade",
|
||||
"unit_of_measure": "KILOGRAMS",
|
||||
"package_size": null,
|
||||
"average_cost": 0.87,
|
||||
"last_purchase_price": null,
|
||||
"standard_cost": null,
|
||||
"low_stock_threshold": 560.0,
|
||||
"reorder_point": 840.0,
|
||||
"reorder_quantity": null,
|
||||
"max_stock_level": null,
|
||||
"shelf_life_days": null,
|
||||
"display_life_hours": null,
|
||||
"best_before_hours": null,
|
||||
"storage_instructions": null,
|
||||
"central_baker_product_code": null,
|
||||
"delivery_days": null,
|
||||
"minimum_order_quantity": null,
|
||||
"pack_size": null,
|
||||
"is_active": true,
|
||||
"is_perishable": false,
|
||||
"allergen_info": [
|
||||
"gluten"
|
||||
],
|
||||
"nutritional_info": null,
|
||||
"produced_locally": false,
|
||||
"recipe_id": null,
|
||||
"created_at": "BASE_TS",
|
||||
"updated_at": "BASE_TS",
|
||||
"created_by": "ae38accc-1ad4-410d-adbc-a55630908924"
|
||||
},
|
||||
{
|
||||
"id": "10000000-0000-0000-0000-000000000003",
|
||||
"tenant_id": "C0000000-0000-4000-a000-000000000001",
|
||||
"name": "Harina de Fuerza W300",
|
||||
"sku": "HAR-FUE-003",
|
||||
"barcode": null,
|
||||
"product_type": "INGREDIENT",
|
||||
"ingredient_category": "FLOUR",
|
||||
"product_category": "BREAD",
|
||||
"subcategory": null,
|
||||
"description": "Harina de gran fuerza W300, ideal para masas con alta hidratación",
|
||||
"brand": "Harinas Premium - Enterprise Grade",
|
||||
"unit_of_measure": "KILOGRAMS",
|
||||
"package_size": null,
|
||||
"average_cost": 1.06,
|
||||
"last_purchase_price": null,
|
||||
"standard_cost": null,
|
||||
"low_stock_threshold": 350.0,
|
||||
"reorder_point": 560.0,
|
||||
"reorder_quantity": null,
|
||||
"max_stock_level": null,
|
||||
"shelf_life_days": null,
|
||||
"display_life_hours": null,
|
||||
"best_before_hours": null,
|
||||
"storage_instructions": null,
|
||||
"central_baker_product_code": null,
|
||||
"delivery_days": null,
|
||||
"minimum_order_quantity": null,
|
||||
"pack_size": null,
|
||||
"is_active": true,
|
||||
"is_perishable": false,
|
||||
"allergen_info": [
|
||||
"gluten"
|
||||
],
|
||||
"nutritional_info": null,
|
||||
"produced_locally": false,
|
||||
"recipe_id": null,
|
||||
"created_at": "BASE_TS",
|
||||
"updated_at": "BASE_TS",
|
||||
"created_by": "ae38accc-1ad4-410d-adbc-a55630908924"
|
||||
},
|
||||
{
|
||||
"id": "10000000-0000-0000-0000-000000000004",
|
||||
"tenant_id": "C0000000-0000-4000-a000-000000000001",
|
||||
"name": "Harina Integral de Trigo",
|
||||
"sku": "HAR-INT-004",
|
||||
"barcode": null,
|
||||
"product_type": "INGREDIENT",
|
||||
"ingredient_category": "FLOUR",
|
||||
"product_category": "BREAD",
|
||||
"subcategory": null,
|
||||
"description": "Harina integral 100% con salvado, rica en fibra",
|
||||
"brand": "Bio Cereales - Enterprise Grade",
|
||||
"unit_of_measure": "KILOGRAMS",
|
||||
"package_size": null,
|
||||
"average_cost": 1.1,
|
||||
"last_purchase_price": null,
|
||||
"standard_cost": null,
|
||||
"low_stock_threshold": 420.0,
|
||||
"reorder_point": 630.0,
|
||||
"reorder_quantity": null,
|
||||
"max_stock_level": null,
|
||||
"shelf_life_days": null,
|
||||
"display_life_hours": null,
|
||||
"best_before_hours": null,
|
||||
"storage_instructions": null,
|
||||
"central_baker_product_code": null,
|
||||
"delivery_days": null,
|
||||
"minimum_order_quantity": null,
|
||||
"pack_size": null,
|
||||
"is_active": true,
|
||||
"is_perishable": false,
|
||||
"allergen_info": [
|
||||
"gluten"
|
||||
],
|
||||
"nutritional_info": null,
|
||||
"produced_locally": false,
|
||||
"recipe_id": null,
|
||||
"created_at": "BASE_TS",
|
||||
"updated_at": "BASE_TS",
|
||||
"created_by": "ae38accc-1ad4-410d-adbc-a55630908924"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"recipes": [],
|
||||
"recipe_ingredients": [],
|
||||
"recipe_instructions": []
|
||||
}
|
||||
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"suppliers": []
|
||||
}
|
||||
@@ -0,0 +1,75 @@
|
||||
{
|
||||
"equipment": [],
|
||||
"quality_check_templates": [],
|
||||
"quality_checks": [],
|
||||
"batches": [
|
||||
{
|
||||
"id": "50000001-0000-4000-a000-000000000001",
|
||||
"tenant_id": "C0000000-0000-4000-a000-000000000001",
|
||||
"recipe_id": "30000000-0000-0000-0000-000000000001",
|
||||
"product_id": "20000000-0000-0000-0000-000000000001",
|
||||
"batch_number": "BATCH-C000-0001",
|
||||
"status": "completed",
|
||||
"quantity_produced": 50,
|
||||
"quantity_good": 50,
|
||||
"quantity_defective": 0,
|
||||
"production_date": "BASE_TS - 1d",
|
||||
"expiration_date": "BASE_TS + 2d",
|
||||
"production_line": "Linea 1",
|
||||
"shift": "morning",
|
||||
"produced_by": "ae38accc-1ad4-410d-adbc-a55630908924",
|
||||
"approved_by": "80765906-0074-4206-8f58-5867df1975fd",
|
||||
"created_at": "BASE_TS - 1d",
|
||||
"updated_at": "BASE_TS - 1d",
|
||||
"is_active": true,
|
||||
"ingredients": [
|
||||
{
|
||||
"ingredient_id": "10000000-0000-0000-0000-000000000001",
|
||||
"quantity_used": 25.0,
|
||||
"unit": "kg"
|
||||
}
|
||||
],
|
||||
"product_name": "Baguette Tradicional",
|
||||
"planned_start_time": "BASE_TS",
|
||||
"planned_end_time": "BASE_TS + 4h",
|
||||
"actual_start_time": "BASE_TS - 1d",
|
||||
"actual_end_time": "BASE_TS - 1d + 4h",
|
||||
"planned_quantity": 50.0,
|
||||
"planned_duration_minutes": 240
|
||||
},
|
||||
{
|
||||
"id": "50000002-0000-4000-a000-000000000001",
|
||||
"tenant_id": "C0000000-0000-4000-a000-000000000001",
|
||||
"recipe_id": "30000000-0000-0000-0000-000000000002",
|
||||
"product_id": "20000000-0000-0000-0000-000000000002",
|
||||
"batch_number": "BATCH-C000-0002",
|
||||
"status": "completed",
|
||||
"quantity_produced": 60,
|
||||
"quantity_good": 60,
|
||||
"quantity_defective": 0,
|
||||
"production_date": "BASE_TS - 2d",
|
||||
"expiration_date": "BASE_TS + 1d",
|
||||
"production_line": "Linea 2",
|
||||
"shift": "morning",
|
||||
"produced_by": "ae38accc-1ad4-410d-adbc-a55630908924",
|
||||
"approved_by": "80765906-0074-4206-8f58-5867df1975fd",
|
||||
"created_at": "BASE_TS - 2d",
|
||||
"updated_at": "BASE_TS - 2d",
|
||||
"is_active": true,
|
||||
"ingredients": [
|
||||
{
|
||||
"ingredient_id": "10000000-0000-0000-0000-000000000001",
|
||||
"quantity_used": 30.0,
|
||||
"unit": "kg"
|
||||
}
|
||||
],
|
||||
"product_name": "Croissant de Mantequilla",
|
||||
"planned_start_time": "BASE_TS",
|
||||
"planned_end_time": "BASE_TS + 4h",
|
||||
"actual_start_time": "BASE_TS - 1d",
|
||||
"actual_end_time": "BASE_TS - 1d + 4h",
|
||||
"planned_quantity": 50.0,
|
||||
"planned_duration_minutes": 240
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"purchase_orders": [],
|
||||
"purchase_order_items": []
|
||||
}
|
||||
@@ -0,0 +1,44 @@
|
||||
{
|
||||
"customers": [
|
||||
{
|
||||
"id": "60000000-0000-0000-0000-000000000001",
|
||||
"tenant_id": "C0000000-0000-4000-a000-000000000001",
|
||||
"customer_code": "CUST-001",
|
||||
"name": "Restaurante El Buen Yantar - Valencia",
|
||||
"customer_type": "WHOLESALE",
|
||||
"contact_person": "Luis Gómez",
|
||||
"email": "compras@buenyantar.es",
|
||||
"phone": "+34 912 345 678",
|
||||
"address": "Calle Mayor, 45",
|
||||
"city": "Valencia",
|
||||
"postal_code": "46013",
|
||||
"country": "España",
|
||||
"status": "ACTIVE",
|
||||
"total_orders": 45,
|
||||
"total_spent": 3250.75,
|
||||
"created_at": "BASE_TS",
|
||||
"notes": "Regular wholesale customer - weekly orders"
|
||||
},
|
||||
{
|
||||
"id": "60000000-0000-0000-0000-000000000002",
|
||||
"tenant_id": "C0000000-0000-4000-a000-000000000001",
|
||||
"customer_code": "CUST-002",
|
||||
"name": "Cafetería La Esquina - Valencia",
|
||||
"customer_type": "RETAIL",
|
||||
"contact_person": "Marta Ruiz",
|
||||
"email": "cafeteria@laesquina.com",
|
||||
"phone": "+34 913 456 789",
|
||||
"address": "Plaza del Sol, 12",
|
||||
"city": "Valencia",
|
||||
"postal_code": "46012",
|
||||
"country": "España",
|
||||
"status": "ACTIVE",
|
||||
"total_orders": 12,
|
||||
"total_spent": 850.2,
|
||||
"created_at": "BASE_TS",
|
||||
"notes": "Small retail customer - biweekly orders"
|
||||
}
|
||||
],
|
||||
"customer_orders": [],
|
||||
"order_items": []
|
||||
}
|
||||
@@ -0,0 +1,304 @@
|
||||
{
|
||||
"sales_data": [
|
||||
{
|
||||
"id": "3cdfda6a-37c2-485e-99f3-39ee905bd5ee",
|
||||
"tenant_id": "C0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 16h 0m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000001",
|
||||
"quantity_sold": 2,
|
||||
"unit_price": 0.9,
|
||||
"total_revenue": 1.8,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "cash",
|
||||
"created_at": "BASE_TS - 16h 0m",
|
||||
"notes": "Venta local en Valencia - Ruzafa",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "07ae1a79-867c-49e4-a320-09410a08e359",
|
||||
"tenant_id": "C0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 15h 3m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000002",
|
||||
"quantity_sold": 3,
|
||||
"unit_price": 1.39,
|
||||
"total_revenue": 4.16,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 15h 3m",
|
||||
"notes": "Venta local en Valencia - Ruzafa",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "0cef9b51-ef2e-40ff-a488-568d82f5c6e6",
|
||||
"tenant_id": "C0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 14h 6m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000003",
|
||||
"quantity_sold": 4,
|
||||
"unit_price": 3.74,
|
||||
"total_revenue": 14.96,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 14h 6m",
|
||||
"notes": "Venta local en Valencia - Ruzafa",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "0a9af98d-2fd6-47da-bf85-a7c2ef365afb",
|
||||
"tenant_id": "C0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 13h 9m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000004",
|
||||
"quantity_sold": 5,
|
||||
"unit_price": 1.45,
|
||||
"total_revenue": 7.26,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 13h 9m",
|
||||
"notes": "Venta local en Valencia - Ruzafa",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "73165b4b-fd89-424f-9e1c-3ecc216f8d60",
|
||||
"tenant_id": "C0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 12h 12m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000001",
|
||||
"quantity_sold": 6,
|
||||
"unit_price": 0.9,
|
||||
"total_revenue": 5.41,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "cash",
|
||||
"created_at": "BASE_TS - 12h 12m",
|
||||
"notes": "Venta local en Valencia - Ruzafa",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "0f8537bd-afe4-43c3-bea6-eea73e19c2e9",
|
||||
"tenant_id": "C0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 11h 15m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000002",
|
||||
"quantity_sold": 2,
|
||||
"unit_price": 1.39,
|
||||
"total_revenue": 2.77,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 11h 15m",
|
||||
"notes": "Venta local en Valencia - Ruzafa",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "f6981442-7321-453c-a49c-1f3d729c6ad8",
|
||||
"tenant_id": "C0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 10h 18m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000003",
|
||||
"quantity_sold": 3,
|
||||
"unit_price": 3.74,
|
||||
"total_revenue": 11.22,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 10h 18m",
|
||||
"notes": "Venta local en Valencia - Ruzafa",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "8e733da7-28ca-496d-8bc7-310ed6ccfbd2",
|
||||
"tenant_id": "C0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 9h 21m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000004",
|
||||
"quantity_sold": 4,
|
||||
"unit_price": 1.45,
|
||||
"total_revenue": 5.81,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 9h 21m",
|
||||
"notes": "Venta local en Valencia - Ruzafa",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "f757e392-0f3e-453d-a6c8-2baad7dc91e8",
|
||||
"tenant_id": "C0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 8h 24m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000001",
|
||||
"quantity_sold": 5,
|
||||
"unit_price": 0.9,
|
||||
"total_revenue": 4.51,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "cash",
|
||||
"created_at": "BASE_TS - 8h 24m",
|
||||
"notes": "Venta local en Valencia - Ruzafa",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "90c194a8-926b-4a32-8e38-65824578b0c0",
|
||||
"tenant_id": "C0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 7h 27m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000002",
|
||||
"quantity_sold": 6,
|
||||
"unit_price": 1.39,
|
||||
"total_revenue": 8.32,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 7h 27m",
|
||||
"notes": "Venta local en Valencia - Ruzafa",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "8874a2ce-e6b8-4b65-a5ee-4ab8f5b726c6",
|
||||
"tenant_id": "C0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 6h 30m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000003",
|
||||
"quantity_sold": 2,
|
||||
"unit_price": 3.74,
|
||||
"total_revenue": 7.48,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 6h 30m",
|
||||
"notes": "Venta local en Valencia - Ruzafa",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "3fc14bee-6819-4b94-ab2d-3f4bd6b72c87",
|
||||
"tenant_id": "C0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 5h 33m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000004",
|
||||
"quantity_sold": 3,
|
||||
"unit_price": 1.45,
|
||||
"total_revenue": 4.36,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 5h 33m",
|
||||
"notes": "Venta local en Valencia - Ruzafa",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "89421fb3-e5c6-4e9d-94b2-a660999b63b6",
|
||||
"tenant_id": "C0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 16h 36m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000001",
|
||||
"quantity_sold": 4,
|
||||
"unit_price": 0.9,
|
||||
"total_revenue": 3.61,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "cash",
|
||||
"created_at": "BASE_TS - 16h 36m",
|
||||
"notes": "Venta local en Valencia - Ruzafa",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "d804e2d4-5ac6-43d1-8438-f70b6cf18ff2",
|
||||
"tenant_id": "C0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 15h 39m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000002",
|
||||
"quantity_sold": 5,
|
||||
"unit_price": 1.39,
|
||||
"total_revenue": 6.93,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 15h 39m",
|
||||
"notes": "Venta local en Valencia - Ruzafa",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "662934e8-084b-4a7f-ac5f-31ef65abb042",
|
||||
"tenant_id": "C0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 14h 42m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000003",
|
||||
"quantity_sold": 6,
|
||||
"unit_price": 3.74,
|
||||
"total_revenue": 22.44,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 14h 42m",
|
||||
"notes": "Venta local en Valencia - Ruzafa",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "e7b8712b-0d00-44cc-981d-66af15603bd9",
|
||||
"tenant_id": "C0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 13h 45m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000004",
|
||||
"quantity_sold": 2,
|
||||
"unit_price": 1.45,
|
||||
"total_revenue": 2.9,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 13h 45m",
|
||||
"notes": "Venta local en Valencia - Ruzafa",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "f501da6f-1a09-4c47-a2e7-61061ba96a1c",
|
||||
"tenant_id": "C0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 12h 48m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000001",
|
||||
"quantity_sold": 3,
|
||||
"unit_price": 0.9,
|
||||
"total_revenue": 2.71,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "cash",
|
||||
"created_at": "BASE_TS - 12h 48m",
|
||||
"notes": "Venta local en Valencia - Ruzafa",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "140e74cf-882e-48a0-b083-06a266067147",
|
||||
"tenant_id": "C0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 11h 51m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000002",
|
||||
"quantity_sold": 4,
|
||||
"unit_price": 1.39,
|
||||
"total_revenue": 5.54,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 11h 51m",
|
||||
"notes": "Venta local en Valencia - Ruzafa",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "f4f940e6-83a5-4399-9fb3-8ad72ba11140",
|
||||
"tenant_id": "C0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 10h 54m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000003",
|
||||
"quantity_sold": 5,
|
||||
"unit_price": 3.74,
|
||||
"total_revenue": 18.7,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 10h 54m",
|
||||
"notes": "Venta local en Valencia - Ruzafa",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "e4f17a2d-87b8-4f7f-901d-463341e3919b",
|
||||
"tenant_id": "C0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 9h 57m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000004",
|
||||
"quantity_sold": 6,
|
||||
"unit_price": 1.45,
|
||||
"total_revenue": 8.71,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 9h 57m",
|
||||
"notes": "Venta local en Valencia - Ruzafa",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"orchestration_run": null,
|
||||
"alerts": []
|
||||
}
|
||||
@@ -0,0 +1,24 @@
|
||||
{
|
||||
"location": {
|
||||
"id": "D0000000-0000-4000-a000-000000000001",
|
||||
"parent_tenant_id": "80000000-0000-4000-a000-000000000001",
|
||||
"name": "Seville - Triana",
|
||||
"location_code": "SEV",
|
||||
"city": "Seville",
|
||||
"zone": "Triana",
|
||||
"address": "Calle Betis, 15",
|
||||
"postal_code": "41010",
|
||||
"country": "España",
|
||||
"latitude": 37.3828,
|
||||
"longitude": -6.0026,
|
||||
"status": "ACTIVE",
|
||||
"opening_hours": "07:00-21:00",
|
||||
"daily_capacity": 1800,
|
||||
"storage_capacity_kg": 1000,
|
||||
"created_at": "2024-06-01T00:00:00Z",
|
||||
"enterprise_location": true,
|
||||
"location_type": "retail",
|
||||
"staff_count": 9,
|
||||
"description": "Traditional Andalusian location with local specialties"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,24 @@
|
||||
{
|
||||
"users": [
|
||||
{
|
||||
"id": "0b06b4a6-4d5b-5f62-8a66-76a2a7c4510d",
|
||||
"tenant_id": "D0000000-0000-4000-a000-000000000001",
|
||||
"name": "Gerente Seville - Triana",
|
||||
"email": "gerente.d0000000-0000-4000-a000-000000000001@panaderiaartesana.es",
|
||||
"role": "admin",
|
||||
"is_active": true,
|
||||
"created_at": "BASE_TS - 180d",
|
||||
"updated_at": "BASE_TS - 180d"
|
||||
},
|
||||
{
|
||||
"id": "281b76ff-3b06-557d-b2a5-3757d874a85f",
|
||||
"tenant_id": "D0000000-0000-4000-a000-000000000001",
|
||||
"name": "Empleado Seville - Triana",
|
||||
"email": "empleado.d0000000-0000-4000-a000-000000000001@panaderiaartesana.es",
|
||||
"role": "member",
|
||||
"is_active": true,
|
||||
"created_at": "BASE_TS - 150d",
|
||||
"updated_at": "BASE_TS - 150d"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,242 @@
|
||||
{
|
||||
"stock": [
|
||||
{
|
||||
"id": "11bf4708-93b9-4249-a582-32d366ee1e13",
|
||||
"tenant_id": "D0000000-0000-4000-a000-000000000001",
|
||||
"ingredient_id": "10000000-0000-0000-0000-000000000001",
|
||||
"production_stage": "raw_ingredient",
|
||||
"quality_status": "APPROVED",
|
||||
"expiration_date": "BASE_TS + 1d 6h",
|
||||
"supplier_id": "40000000-0000-0000-0000-000000000001",
|
||||
"batch_number": "SEV-PRO-20250116-001",
|
||||
"created_at": "BASE_TS - 6h",
|
||||
"current_quantity": 28.0,
|
||||
"reserved_quantity": 0.0,
|
||||
"available_quantity": 28.0,
|
||||
"storage_location": "Seville - Triana - Display Area",
|
||||
"updated_at": "BASE_TS - 6h",
|
||||
"is_available": true,
|
||||
"is_expired": false
|
||||
},
|
||||
{
|
||||
"id": "b806a1fd-aa88-40cd-aac5-7cf075029b39",
|
||||
"tenant_id": "D0000000-0000-4000-a000-000000000001",
|
||||
"ingredient_id": "10000000-0000-0000-0000-000000000002",
|
||||
"production_stage": "raw_ingredient",
|
||||
"quality_status": "APPROVED",
|
||||
"expiration_date": "BASE_TS + 1d 6h",
|
||||
"supplier_id": "40000000-0000-0000-0000-000000000001",
|
||||
"batch_number": "SEV-PRO-20250116-002",
|
||||
"created_at": "BASE_TS - 6h",
|
||||
"current_quantity": 31.5,
|
||||
"reserved_quantity": 0.0,
|
||||
"available_quantity": 31.5,
|
||||
"storage_location": "Seville - Triana - Display Area",
|
||||
"updated_at": "BASE_TS - 6h",
|
||||
"is_available": true,
|
||||
"is_expired": false
|
||||
},
|
||||
{
|
||||
"id": "4f9f63ff-979f-4bf3-bff0-2a287504614c",
|
||||
"tenant_id": "D0000000-0000-4000-a000-000000000001",
|
||||
"ingredient_id": "10000000-0000-0000-0000-000000000003",
|
||||
"production_stage": "raw_ingredient",
|
||||
"quality_status": "APPROVED",
|
||||
"expiration_date": "BASE_TS + 1d 6h",
|
||||
"supplier_id": "40000000-0000-0000-0000-000000000001",
|
||||
"batch_number": "SEV-PRO-20250116-003",
|
||||
"created_at": "BASE_TS - 6h",
|
||||
"current_quantity": 35.0,
|
||||
"reserved_quantity": 0.0,
|
||||
"available_quantity": 35.0,
|
||||
"storage_location": "Seville - Triana - Display Area",
|
||||
"updated_at": "BASE_TS - 6h",
|
||||
"is_available": true,
|
||||
"is_expired": false
|
||||
},
|
||||
{
|
||||
"id": "518e55d1-8d99-4634-9bbc-9edf61ec3a93",
|
||||
"tenant_id": "D0000000-0000-4000-a000-000000000001",
|
||||
"ingredient_id": "10000000-0000-0000-0000-000000000004",
|
||||
"production_stage": "raw_ingredient",
|
||||
"quality_status": "APPROVED",
|
||||
"expiration_date": "BASE_TS + 1d 6h",
|
||||
"supplier_id": "40000000-0000-0000-0000-000000000001",
|
||||
"batch_number": "SEV-PRO-20250116-004",
|
||||
"created_at": "BASE_TS - 6h",
|
||||
"current_quantity": 38.5,
|
||||
"reserved_quantity": 0.0,
|
||||
"available_quantity": 38.5,
|
||||
"storage_location": "Seville - Triana - Display Area",
|
||||
"updated_at": "BASE_TS - 6h",
|
||||
"is_available": true,
|
||||
"is_expired": false
|
||||
}
|
||||
],
|
||||
"ingredients": [
|
||||
{
|
||||
"id": "10000000-0000-0000-0000-000000000001",
|
||||
"tenant_id": "D0000000-0000-4000-a000-000000000001",
|
||||
"name": "Harina de Trigo T55",
|
||||
"sku": "HAR-T55-ENT-001",
|
||||
"barcode": null,
|
||||
"product_type": "INGREDIENT",
|
||||
"ingredient_category": "FLOUR",
|
||||
"product_category": "BREAD",
|
||||
"subcategory": null,
|
||||
"description": "Harina de trigo refinada tipo 55, ideal para panes tradicionales y bollería",
|
||||
"brand": "Molinos San José - Enterprise Grade",
|
||||
"unit_of_measure": "KILOGRAMS",
|
||||
"package_size": null,
|
||||
"average_cost": 0.78,
|
||||
"last_purchase_price": null,
|
||||
"standard_cost": null,
|
||||
"low_stock_threshold": 700.0,
|
||||
"reorder_point": 1050.0,
|
||||
"reorder_quantity": null,
|
||||
"max_stock_level": null,
|
||||
"shelf_life_days": null,
|
||||
"display_life_hours": null,
|
||||
"best_before_hours": null,
|
||||
"storage_instructions": null,
|
||||
"central_baker_product_code": null,
|
||||
"delivery_days": null,
|
||||
"minimum_order_quantity": null,
|
||||
"pack_size": null,
|
||||
"is_active": true,
|
||||
"is_perishable": false,
|
||||
"allergen_info": [
|
||||
"gluten"
|
||||
],
|
||||
"nutritional_info": null,
|
||||
"produced_locally": false,
|
||||
"recipe_id": null,
|
||||
"created_at": "BASE_TS",
|
||||
"updated_at": "BASE_TS",
|
||||
"created_by": "ae38accc-1ad4-410d-adbc-a55630908924"
|
||||
},
|
||||
{
|
||||
"id": "10000000-0000-0000-0000-000000000002",
|
||||
"tenant_id": "D0000000-0000-4000-a000-000000000001",
|
||||
"name": "Harina de Trigo T65",
|
||||
"sku": "HAR-T65-ENT-002",
|
||||
"barcode": null,
|
||||
"product_type": "INGREDIENT",
|
||||
"ingredient_category": "FLOUR",
|
||||
"product_category": "BREAD",
|
||||
"subcategory": null,
|
||||
"description": "Harina de trigo semi-integral tipo 65, perfecta para panes rústicos",
|
||||
"brand": "Molinos San José - Enterprise Grade",
|
||||
"unit_of_measure": "KILOGRAMS",
|
||||
"package_size": null,
|
||||
"average_cost": 0.87,
|
||||
"last_purchase_price": null,
|
||||
"standard_cost": null,
|
||||
"low_stock_threshold": 560.0,
|
||||
"reorder_point": 840.0,
|
||||
"reorder_quantity": null,
|
||||
"max_stock_level": null,
|
||||
"shelf_life_days": null,
|
||||
"display_life_hours": null,
|
||||
"best_before_hours": null,
|
||||
"storage_instructions": null,
|
||||
"central_baker_product_code": null,
|
||||
"delivery_days": null,
|
||||
"minimum_order_quantity": null,
|
||||
"pack_size": null,
|
||||
"is_active": true,
|
||||
"is_perishable": false,
|
||||
"allergen_info": [
|
||||
"gluten"
|
||||
],
|
||||
"nutritional_info": null,
|
||||
"produced_locally": false,
|
||||
"recipe_id": null,
|
||||
"created_at": "BASE_TS",
|
||||
"updated_at": "BASE_TS",
|
||||
"created_by": "ae38accc-1ad4-410d-adbc-a55630908924"
|
||||
},
|
||||
{
|
||||
"id": "10000000-0000-0000-0000-000000000003",
|
||||
"tenant_id": "D0000000-0000-4000-a000-000000000001",
|
||||
"name": "Harina de Fuerza W300",
|
||||
"sku": "HAR-FUE-003",
|
||||
"barcode": null,
|
||||
"product_type": "INGREDIENT",
|
||||
"ingredient_category": "FLOUR",
|
||||
"product_category": "BREAD",
|
||||
"subcategory": null,
|
||||
"description": "Harina de gran fuerza W300, ideal para masas con alta hidratación",
|
||||
"brand": "Harinas Premium - Enterprise Grade",
|
||||
"unit_of_measure": "KILOGRAMS",
|
||||
"package_size": null,
|
||||
"average_cost": 1.06,
|
||||
"last_purchase_price": null,
|
||||
"standard_cost": null,
|
||||
"low_stock_threshold": 350.0,
|
||||
"reorder_point": 560.0,
|
||||
"reorder_quantity": null,
|
||||
"max_stock_level": null,
|
||||
"shelf_life_days": null,
|
||||
"display_life_hours": null,
|
||||
"best_before_hours": null,
|
||||
"storage_instructions": null,
|
||||
"central_baker_product_code": null,
|
||||
"delivery_days": null,
|
||||
"minimum_order_quantity": null,
|
||||
"pack_size": null,
|
||||
"is_active": true,
|
||||
"is_perishable": false,
|
||||
"allergen_info": [
|
||||
"gluten"
|
||||
],
|
||||
"nutritional_info": null,
|
||||
"produced_locally": false,
|
||||
"recipe_id": null,
|
||||
"created_at": "BASE_TS",
|
||||
"updated_at": "BASE_TS",
|
||||
"created_by": "ae38accc-1ad4-410d-adbc-a55630908924"
|
||||
},
|
||||
{
|
||||
"id": "10000000-0000-0000-0000-000000000004",
|
||||
"tenant_id": "D0000000-0000-4000-a000-000000000001",
|
||||
"name": "Harina Integral de Trigo",
|
||||
"sku": "HAR-INT-004",
|
||||
"barcode": null,
|
||||
"product_type": "INGREDIENT",
|
||||
"ingredient_category": "FLOUR",
|
||||
"product_category": "BREAD",
|
||||
"subcategory": null,
|
||||
"description": "Harina integral 100% con salvado, rica en fibra",
|
||||
"brand": "Bio Cereales - Enterprise Grade",
|
||||
"unit_of_measure": "KILOGRAMS",
|
||||
"package_size": null,
|
||||
"average_cost": 1.1,
|
||||
"last_purchase_price": null,
|
||||
"standard_cost": null,
|
||||
"low_stock_threshold": 420.0,
|
||||
"reorder_point": 630.0,
|
||||
"reorder_quantity": null,
|
||||
"max_stock_level": null,
|
||||
"shelf_life_days": null,
|
||||
"display_life_hours": null,
|
||||
"best_before_hours": null,
|
||||
"storage_instructions": null,
|
||||
"central_baker_product_code": null,
|
||||
"delivery_days": null,
|
||||
"minimum_order_quantity": null,
|
||||
"pack_size": null,
|
||||
"is_active": true,
|
||||
"is_perishable": false,
|
||||
"allergen_info": [
|
||||
"gluten"
|
||||
],
|
||||
"nutritional_info": null,
|
||||
"produced_locally": false,
|
||||
"recipe_id": null,
|
||||
"created_at": "BASE_TS",
|
||||
"updated_at": "BASE_TS",
|
||||
"created_by": "ae38accc-1ad4-410d-adbc-a55630908924"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"recipes": [],
|
||||
"recipe_ingredients": [],
|
||||
"recipe_instructions": []
|
||||
}
|
||||
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"suppliers": []
|
||||
}
|
||||
@@ -0,0 +1,75 @@
|
||||
{
|
||||
"equipment": [],
|
||||
"quality_check_templates": [],
|
||||
"quality_checks": [],
|
||||
"batches": [
|
||||
{
|
||||
"id": "50000001-0000-4000-a000-000000000001",
|
||||
"tenant_id": "D0000000-0000-4000-a000-000000000001",
|
||||
"recipe_id": "30000000-0000-0000-0000-000000000001",
|
||||
"product_id": "20000000-0000-0000-0000-000000000001",
|
||||
"batch_number": "BATCH-D000-0001",
|
||||
"status": "completed",
|
||||
"quantity_produced": 50,
|
||||
"quantity_good": 50,
|
||||
"quantity_defective": 0,
|
||||
"production_date": "BASE_TS - 1d",
|
||||
"expiration_date": "BASE_TS + 2d",
|
||||
"production_line": "Linea 1",
|
||||
"shift": "morning",
|
||||
"produced_by": "ae38accc-1ad4-410d-adbc-a55630908924",
|
||||
"approved_by": "80765906-0074-4206-8f58-5867df1975fd",
|
||||
"created_at": "BASE_TS - 1d",
|
||||
"updated_at": "BASE_TS - 1d",
|
||||
"is_active": true,
|
||||
"ingredients": [
|
||||
{
|
||||
"ingredient_id": "10000000-0000-0000-0000-000000000001",
|
||||
"quantity_used": 25.0,
|
||||
"unit": "kg"
|
||||
}
|
||||
],
|
||||
"product_name": "Baguette Tradicional",
|
||||
"planned_start_time": "BASE_TS",
|
||||
"planned_end_time": "BASE_TS + 4h",
|
||||
"actual_start_time": "BASE_TS - 1d",
|
||||
"actual_end_time": "BASE_TS - 1d + 4h",
|
||||
"planned_quantity": 50.0,
|
||||
"planned_duration_minutes": 240
|
||||
},
|
||||
{
|
||||
"id": "50000002-0000-4000-a000-000000000001",
|
||||
"tenant_id": "D0000000-0000-4000-a000-000000000001",
|
||||
"recipe_id": "30000000-0000-0000-0000-000000000002",
|
||||
"product_id": "20000000-0000-0000-0000-000000000002",
|
||||
"batch_number": "BATCH-D000-0002",
|
||||
"status": "completed",
|
||||
"quantity_produced": 60,
|
||||
"quantity_good": 60,
|
||||
"quantity_defective": 0,
|
||||
"production_date": "BASE_TS - 2d",
|
||||
"expiration_date": "BASE_TS + 1d",
|
||||
"production_line": "Linea 2",
|
||||
"shift": "morning",
|
||||
"produced_by": "ae38accc-1ad4-410d-adbc-a55630908924",
|
||||
"approved_by": "80765906-0074-4206-8f58-5867df1975fd",
|
||||
"created_at": "BASE_TS - 2d",
|
||||
"updated_at": "BASE_TS - 2d",
|
||||
"is_active": true,
|
||||
"ingredients": [
|
||||
{
|
||||
"ingredient_id": "10000000-0000-0000-0000-000000000001",
|
||||
"quantity_used": 30.0,
|
||||
"unit": "kg"
|
||||
}
|
||||
],
|
||||
"product_name": "Croissant de Mantequilla",
|
||||
"planned_start_time": "BASE_TS",
|
||||
"planned_end_time": "BASE_TS + 4h",
|
||||
"actual_start_time": "BASE_TS - 1d",
|
||||
"actual_end_time": "BASE_TS - 1d + 4h",
|
||||
"planned_quantity": 50.0,
|
||||
"planned_duration_minutes": 240
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"purchase_orders": [],
|
||||
"purchase_order_items": []
|
||||
}
|
||||
@@ -0,0 +1,44 @@
|
||||
{
|
||||
"customers": [
|
||||
{
|
||||
"id": "60000000-0000-0000-0000-000000000001",
|
||||
"tenant_id": "D0000000-0000-4000-a000-000000000001",
|
||||
"customer_code": "CUST-001",
|
||||
"name": "Restaurante El Buen Yantar - Seville",
|
||||
"customer_type": "WHOLESALE",
|
||||
"contact_person": "Luis Gómez",
|
||||
"email": "compras@buenyantar.es",
|
||||
"phone": "+34 912 345 678",
|
||||
"address": "Calle Mayor, 45",
|
||||
"city": "Seville",
|
||||
"postal_code": "41013",
|
||||
"country": "España",
|
||||
"status": "ACTIVE",
|
||||
"total_orders": 45,
|
||||
"total_spent": 3250.75,
|
||||
"created_at": "BASE_TS",
|
||||
"notes": "Regular wholesale customer - weekly orders"
|
||||
},
|
||||
{
|
||||
"id": "60000000-0000-0000-0000-000000000002",
|
||||
"tenant_id": "D0000000-0000-4000-a000-000000000001",
|
||||
"customer_code": "CUST-002",
|
||||
"name": "Cafetería La Esquina - Seville",
|
||||
"customer_type": "RETAIL",
|
||||
"contact_person": "Marta Ruiz",
|
||||
"email": "cafeteria@laesquina.com",
|
||||
"phone": "+34 913 456 789",
|
||||
"address": "Plaza del Sol, 12",
|
||||
"city": "Seville",
|
||||
"postal_code": "41012",
|
||||
"country": "España",
|
||||
"status": "ACTIVE",
|
||||
"total_orders": 12,
|
||||
"total_spent": 850.2,
|
||||
"created_at": "BASE_TS",
|
||||
"notes": "Small retail customer - biweekly orders"
|
||||
}
|
||||
],
|
||||
"customer_orders": [],
|
||||
"order_items": []
|
||||
}
|
||||
@@ -0,0 +1,274 @@
|
||||
{
|
||||
"sales_data": [
|
||||
{
|
||||
"id": "0a141dbd-fd05-4686-8996-a9e122b83440",
|
||||
"tenant_id": "D0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 16h 0m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000001",
|
||||
"quantity_sold": 2,
|
||||
"unit_price": 0.9,
|
||||
"total_revenue": 1.8,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "cash",
|
||||
"created_at": "BASE_TS - 16h 0m",
|
||||
"notes": "Venta local en Seville - Triana",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "ee377244-f94f-4679-b6dd-eecdd554b6ef",
|
||||
"tenant_id": "D0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 15h 3m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000002",
|
||||
"quantity_sold": 3,
|
||||
"unit_price": 1.39,
|
||||
"total_revenue": 4.16,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 15h 3m",
|
||||
"notes": "Venta local en Seville - Triana",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "6ecade43-3bb3-4ce1-ab16-0705c215d9bd",
|
||||
"tenant_id": "D0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 14h 6m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000003",
|
||||
"quantity_sold": 4,
|
||||
"unit_price": 3.74,
|
||||
"total_revenue": 14.96,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 14h 6m",
|
||||
"notes": "Venta local en Seville - Triana",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "b9a3d1b9-90a7-4efb-bc5d-8a3e7b9c5fdd",
|
||||
"tenant_id": "D0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 13h 9m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000004",
|
||||
"quantity_sold": 5,
|
||||
"unit_price": 1.45,
|
||||
"total_revenue": 7.26,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 13h 9m",
|
||||
"notes": "Venta local en Seville - Triana",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "1287736a-08d8-4d77-8de3-55b82427dc5e",
|
||||
"tenant_id": "D0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 12h 12m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000001",
|
||||
"quantity_sold": 6,
|
||||
"unit_price": 0.9,
|
||||
"total_revenue": 5.41,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "cash",
|
||||
"created_at": "BASE_TS - 12h 12m",
|
||||
"notes": "Venta local en Seville - Triana",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "e6bcfc7d-00b5-4af5-8783-2f9e47fadfb8",
|
||||
"tenant_id": "D0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 11h 15m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000002",
|
||||
"quantity_sold": 2,
|
||||
"unit_price": 1.39,
|
||||
"total_revenue": 2.77,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 11h 15m",
|
||||
"notes": "Venta local en Seville - Triana",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "3ca59ae9-750d-4f4a-a8ad-d9d6b334ec51",
|
||||
"tenant_id": "D0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 10h 18m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000003",
|
||||
"quantity_sold": 3,
|
||||
"unit_price": 3.74,
|
||||
"total_revenue": 11.22,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 10h 18m",
|
||||
"notes": "Venta local en Seville - Triana",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "fdd72396-5243-4bc7-a2f4-f0fb0531098d",
|
||||
"tenant_id": "D0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 9h 21m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000004",
|
||||
"quantity_sold": 4,
|
||||
"unit_price": 1.45,
|
||||
"total_revenue": 5.81,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 9h 21m",
|
||||
"notes": "Venta local en Seville - Triana",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "7aed7d6f-51c7-472a-9a60-730de1b59a4a",
|
||||
"tenant_id": "D0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 8h 24m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000001",
|
||||
"quantity_sold": 5,
|
||||
"unit_price": 0.9,
|
||||
"total_revenue": 4.51,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "cash",
|
||||
"created_at": "BASE_TS - 8h 24m",
|
||||
"notes": "Venta local en Seville - Triana",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "59f0511c-b8f6-4163-b0a0-3689cd12d0c9",
|
||||
"tenant_id": "D0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 7h 27m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000002",
|
||||
"quantity_sold": 6,
|
||||
"unit_price": 1.39,
|
||||
"total_revenue": 8.32,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 7h 27m",
|
||||
"notes": "Venta local en Seville - Triana",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "9667e561-46c9-459b-aaaa-cb54167a59f6",
|
||||
"tenant_id": "D0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 6h 30m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000003",
|
||||
"quantity_sold": 2,
|
||||
"unit_price": 3.74,
|
||||
"total_revenue": 7.48,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 6h 30m",
|
||||
"notes": "Venta local en Seville - Triana",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "22d24b17-d63d-45d4-92ba-36087ff1eb8b",
|
||||
"tenant_id": "D0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 5h 33m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000004",
|
||||
"quantity_sold": 3,
|
||||
"unit_price": 1.45,
|
||||
"total_revenue": 4.36,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 5h 33m",
|
||||
"notes": "Venta local en Seville - Triana",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "644a08ba-c78e-4e18-9f32-6cf89a0c3087",
|
||||
"tenant_id": "D0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 16h 36m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000001",
|
||||
"quantity_sold": 4,
|
||||
"unit_price": 0.9,
|
||||
"total_revenue": 3.61,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "cash",
|
||||
"created_at": "BASE_TS - 16h 36m",
|
||||
"notes": "Venta local en Seville - Triana",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "2c0d30b4-d038-4106-aa94-547e2544e103",
|
||||
"tenant_id": "D0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 15h 39m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000002",
|
||||
"quantity_sold": 5,
|
||||
"unit_price": 1.39,
|
||||
"total_revenue": 6.93,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 15h 39m",
|
||||
"notes": "Venta local en Seville - Triana",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "e6a10396-acdb-4ed7-9b77-f9e8b124e071",
|
||||
"tenant_id": "D0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 14h 42m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000003",
|
||||
"quantity_sold": 6,
|
||||
"unit_price": 3.74,
|
||||
"total_revenue": 22.44,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 14h 42m",
|
||||
"notes": "Venta local en Seville - Triana",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "fcddb2ce-a3b4-43b6-b6bb-775a3a9b82e2",
|
||||
"tenant_id": "D0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 13h 45m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000004",
|
||||
"quantity_sold": 2,
|
||||
"unit_price": 1.45,
|
||||
"total_revenue": 2.9,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 13h 45m",
|
||||
"notes": "Venta local en Seville - Triana",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "a3e4973c-6a34-4bfb-b32b-26860de7b5d8",
|
||||
"tenant_id": "D0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 12h 48m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000001",
|
||||
"quantity_sold": 3,
|
||||
"unit_price": 0.9,
|
||||
"total_revenue": 2.71,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "cash",
|
||||
"created_at": "BASE_TS - 12h 48m",
|
||||
"notes": "Venta local en Seville - Triana",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "7ea27a36-819f-475d-a621-915e282c4502",
|
||||
"tenant_id": "D0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 11h 51m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000002",
|
||||
"quantity_sold": 4,
|
||||
"unit_price": 1.39,
|
||||
"total_revenue": 5.54,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 11h 51m",
|
||||
"notes": "Venta local en Seville - Triana",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"orchestration_run": null,
|
||||
"alerts": []
|
||||
}
|
||||
@@ -0,0 +1,24 @@
|
||||
{
|
||||
"location": {
|
||||
"id": "E0000000-0000-4000-a000-000000000001",
|
||||
"parent_tenant_id": "80000000-0000-4000-a000-000000000001",
|
||||
"name": "Bilbao - Casco Viejo",
|
||||
"location_code": "BIL",
|
||||
"city": "Bilbao",
|
||||
"zone": "Casco Viejo",
|
||||
"address": "Calle Somera, 8",
|
||||
"postal_code": "48005",
|
||||
"country": "España",
|
||||
"latitude": 43.2567,
|
||||
"longitude": -2.9272,
|
||||
"status": "ACTIVE",
|
||||
"opening_hours": "07:00-21:00",
|
||||
"daily_capacity": 1500,
|
||||
"storage_capacity_kg": 900,
|
||||
"created_at": "2024-06-01T00:00:00Z",
|
||||
"enterprise_location": true,
|
||||
"location_type": "retail",
|
||||
"staff_count": 8,
|
||||
"description": "Basque region location with focus on quality and local culture"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,24 @@
|
||||
{
|
||||
"users": [
|
||||
{
|
||||
"id": "944f50dd-b6d8-57a1-af87-20bfc1052c75",
|
||||
"tenant_id": "E0000000-0000-4000-a000-000000000001",
|
||||
"name": "Gerente Bilbao - Casco Viejo",
|
||||
"email": "gerente.e0000000-0000-4000-a000-000000000001@panaderiaartesana.es",
|
||||
"role": "admin",
|
||||
"is_active": true,
|
||||
"created_at": "BASE_TS - 180d",
|
||||
"updated_at": "BASE_TS - 180d"
|
||||
},
|
||||
{
|
||||
"id": "26e92f43-d03c-5fd7-99da-c54b319f8cb3",
|
||||
"tenant_id": "E0000000-0000-4000-a000-000000000001",
|
||||
"name": "Empleado Bilbao - Casco Viejo",
|
||||
"email": "empleado.e0000000-0000-4000-a000-000000000001@panaderiaartesana.es",
|
||||
"role": "member",
|
||||
"is_active": true,
|
||||
"created_at": "BASE_TS - 150d",
|
||||
"updated_at": "BASE_TS - 150d"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,242 @@
|
||||
{
|
||||
"stock": [
|
||||
{
|
||||
"id": "e85b30cf-832f-4491-a646-156dd52e9e39",
|
||||
"tenant_id": "E0000000-0000-4000-a000-000000000001",
|
||||
"ingredient_id": "10000000-0000-0000-0000-000000000001",
|
||||
"production_stage": "raw_ingredient",
|
||||
"quality_status": "APPROVED",
|
||||
"expiration_date": "BASE_TS + 1d 6h",
|
||||
"supplier_id": "40000000-0000-0000-0000-000000000001",
|
||||
"batch_number": "BIL-PRO-20250116-001",
|
||||
"created_at": "BASE_TS - 6h",
|
||||
"current_quantity": 20.0,
|
||||
"reserved_quantity": 0.0,
|
||||
"available_quantity": 20.0,
|
||||
"storage_location": "Bilbao - Casco Viejo - Display Area",
|
||||
"updated_at": "BASE_TS - 6h",
|
||||
"is_available": true,
|
||||
"is_expired": false
|
||||
},
|
||||
{
|
||||
"id": "d117af21-52d9-4015-aa85-4ff260f5c88c",
|
||||
"tenant_id": "E0000000-0000-4000-a000-000000000001",
|
||||
"ingredient_id": "10000000-0000-0000-0000-000000000002",
|
||||
"production_stage": "raw_ingredient",
|
||||
"quality_status": "APPROVED",
|
||||
"expiration_date": "BASE_TS + 1d 6h",
|
||||
"supplier_id": "40000000-0000-0000-0000-000000000001",
|
||||
"batch_number": "BIL-PRO-20250116-002",
|
||||
"created_at": "BASE_TS - 6h",
|
||||
"current_quantity": 22.5,
|
||||
"reserved_quantity": 0.0,
|
||||
"available_quantity": 22.5,
|
||||
"storage_location": "Bilbao - Casco Viejo - Display Area",
|
||||
"updated_at": "BASE_TS - 6h",
|
||||
"is_available": true,
|
||||
"is_expired": false
|
||||
},
|
||||
{
|
||||
"id": "c3c5ffa9-33bc-4a5d-9cfe-981541799ed5",
|
||||
"tenant_id": "E0000000-0000-4000-a000-000000000001",
|
||||
"ingredient_id": "10000000-0000-0000-0000-000000000003",
|
||||
"production_stage": "raw_ingredient",
|
||||
"quality_status": "APPROVED",
|
||||
"expiration_date": "BASE_TS + 1d 6h",
|
||||
"supplier_id": "40000000-0000-0000-0000-000000000001",
|
||||
"batch_number": "BIL-PRO-20250116-003",
|
||||
"created_at": "BASE_TS - 6h",
|
||||
"current_quantity": 25.0,
|
||||
"reserved_quantity": 0.0,
|
||||
"available_quantity": 25.0,
|
||||
"storage_location": "Bilbao - Casco Viejo - Display Area",
|
||||
"updated_at": "BASE_TS - 6h",
|
||||
"is_available": true,
|
||||
"is_expired": false
|
||||
},
|
||||
{
|
||||
"id": "269aaab9-06c3-4bdc-8277-5a3c659f4346",
|
||||
"tenant_id": "E0000000-0000-4000-a000-000000000001",
|
||||
"ingredient_id": "10000000-0000-0000-0000-000000000004",
|
||||
"production_stage": "raw_ingredient",
|
||||
"quality_status": "APPROVED",
|
||||
"expiration_date": "BASE_TS + 1d 6h",
|
||||
"supplier_id": "40000000-0000-0000-0000-000000000001",
|
||||
"batch_number": "BIL-PRO-20250116-004",
|
||||
"created_at": "BASE_TS - 6h",
|
||||
"current_quantity": 27.5,
|
||||
"reserved_quantity": 0.0,
|
||||
"available_quantity": 27.5,
|
||||
"storage_location": "Bilbao - Casco Viejo - Display Area",
|
||||
"updated_at": "BASE_TS - 6h",
|
||||
"is_available": true,
|
||||
"is_expired": false
|
||||
}
|
||||
],
|
||||
"ingredients": [
|
||||
{
|
||||
"id": "10000000-0000-0000-0000-000000000001",
|
||||
"tenant_id": "E0000000-0000-4000-a000-000000000001",
|
||||
"name": "Harina de Trigo T55",
|
||||
"sku": "HAR-T55-ENT-001",
|
||||
"barcode": null,
|
||||
"product_type": "INGREDIENT",
|
||||
"ingredient_category": "FLOUR",
|
||||
"product_category": "BREAD",
|
||||
"subcategory": null,
|
||||
"description": "Harina de trigo refinada tipo 55, ideal para panes tradicionales y bollería",
|
||||
"brand": "Molinos San José - Enterprise Grade",
|
||||
"unit_of_measure": "KILOGRAMS",
|
||||
"package_size": null,
|
||||
"average_cost": 0.78,
|
||||
"last_purchase_price": null,
|
||||
"standard_cost": null,
|
||||
"low_stock_threshold": 700.0,
|
||||
"reorder_point": 1050.0,
|
||||
"reorder_quantity": null,
|
||||
"max_stock_level": null,
|
||||
"shelf_life_days": null,
|
||||
"display_life_hours": null,
|
||||
"best_before_hours": null,
|
||||
"storage_instructions": null,
|
||||
"central_baker_product_code": null,
|
||||
"delivery_days": null,
|
||||
"minimum_order_quantity": null,
|
||||
"pack_size": null,
|
||||
"is_active": true,
|
||||
"is_perishable": false,
|
||||
"allergen_info": [
|
||||
"gluten"
|
||||
],
|
||||
"nutritional_info": null,
|
||||
"produced_locally": false,
|
||||
"recipe_id": null,
|
||||
"created_at": "BASE_TS",
|
||||
"updated_at": "BASE_TS",
|
||||
"created_by": "ae38accc-1ad4-410d-adbc-a55630908924"
|
||||
},
|
||||
{
|
||||
"id": "10000000-0000-0000-0000-000000000002",
|
||||
"tenant_id": "E0000000-0000-4000-a000-000000000001",
|
||||
"name": "Harina de Trigo T65",
|
||||
"sku": "HAR-T65-ENT-002",
|
||||
"barcode": null,
|
||||
"product_type": "INGREDIENT",
|
||||
"ingredient_category": "FLOUR",
|
||||
"product_category": "BREAD",
|
||||
"subcategory": null,
|
||||
"description": "Harina de trigo semi-integral tipo 65, perfecta para panes rústicos",
|
||||
"brand": "Molinos San José - Enterprise Grade",
|
||||
"unit_of_measure": "KILOGRAMS",
|
||||
"package_size": null,
|
||||
"average_cost": 0.87,
|
||||
"last_purchase_price": null,
|
||||
"standard_cost": null,
|
||||
"low_stock_threshold": 560.0,
|
||||
"reorder_point": 840.0,
|
||||
"reorder_quantity": null,
|
||||
"max_stock_level": null,
|
||||
"shelf_life_days": null,
|
||||
"display_life_hours": null,
|
||||
"best_before_hours": null,
|
||||
"storage_instructions": null,
|
||||
"central_baker_product_code": null,
|
||||
"delivery_days": null,
|
||||
"minimum_order_quantity": null,
|
||||
"pack_size": null,
|
||||
"is_active": true,
|
||||
"is_perishable": false,
|
||||
"allergen_info": [
|
||||
"gluten"
|
||||
],
|
||||
"nutritional_info": null,
|
||||
"produced_locally": false,
|
||||
"recipe_id": null,
|
||||
"created_at": "BASE_TS",
|
||||
"updated_at": "BASE_TS",
|
||||
"created_by": "ae38accc-1ad4-410d-adbc-a55630908924"
|
||||
},
|
||||
{
|
||||
"id": "10000000-0000-0000-0000-000000000003",
|
||||
"tenant_id": "E0000000-0000-4000-a000-000000000001",
|
||||
"name": "Harina de Fuerza W300",
|
||||
"sku": "HAR-FUE-003",
|
||||
"barcode": null,
|
||||
"product_type": "INGREDIENT",
|
||||
"ingredient_category": "FLOUR",
|
||||
"product_category": "BREAD",
|
||||
"subcategory": null,
|
||||
"description": "Harina de gran fuerza W300, ideal para masas con alta hidratación",
|
||||
"brand": "Harinas Premium - Enterprise Grade",
|
||||
"unit_of_measure": "KILOGRAMS",
|
||||
"package_size": null,
|
||||
"average_cost": 1.06,
|
||||
"last_purchase_price": null,
|
||||
"standard_cost": null,
|
||||
"low_stock_threshold": 350.0,
|
||||
"reorder_point": 560.0,
|
||||
"reorder_quantity": null,
|
||||
"max_stock_level": null,
|
||||
"shelf_life_days": null,
|
||||
"display_life_hours": null,
|
||||
"best_before_hours": null,
|
||||
"storage_instructions": null,
|
||||
"central_baker_product_code": null,
|
||||
"delivery_days": null,
|
||||
"minimum_order_quantity": null,
|
||||
"pack_size": null,
|
||||
"is_active": true,
|
||||
"is_perishable": false,
|
||||
"allergen_info": [
|
||||
"gluten"
|
||||
],
|
||||
"nutritional_info": null,
|
||||
"produced_locally": false,
|
||||
"recipe_id": null,
|
||||
"created_at": "BASE_TS",
|
||||
"updated_at": "BASE_TS",
|
||||
"created_by": "ae38accc-1ad4-410d-adbc-a55630908924"
|
||||
},
|
||||
{
|
||||
"id": "10000000-0000-0000-0000-000000000004",
|
||||
"tenant_id": "E0000000-0000-4000-a000-000000000001",
|
||||
"name": "Harina Integral de Trigo",
|
||||
"sku": "HAR-INT-004",
|
||||
"barcode": null,
|
||||
"product_type": "INGREDIENT",
|
||||
"ingredient_category": "FLOUR",
|
||||
"product_category": "BREAD",
|
||||
"subcategory": null,
|
||||
"description": "Harina integral 100% con salvado, rica en fibra",
|
||||
"brand": "Bio Cereales - Enterprise Grade",
|
||||
"unit_of_measure": "KILOGRAMS",
|
||||
"package_size": null,
|
||||
"average_cost": 1.1,
|
||||
"last_purchase_price": null,
|
||||
"standard_cost": null,
|
||||
"low_stock_threshold": 420.0,
|
||||
"reorder_point": 630.0,
|
||||
"reorder_quantity": null,
|
||||
"max_stock_level": null,
|
||||
"shelf_life_days": null,
|
||||
"display_life_hours": null,
|
||||
"best_before_hours": null,
|
||||
"storage_instructions": null,
|
||||
"central_baker_product_code": null,
|
||||
"delivery_days": null,
|
||||
"minimum_order_quantity": null,
|
||||
"pack_size": null,
|
||||
"is_active": true,
|
||||
"is_perishable": false,
|
||||
"allergen_info": [
|
||||
"gluten"
|
||||
],
|
||||
"nutritional_info": null,
|
||||
"produced_locally": false,
|
||||
"recipe_id": null,
|
||||
"created_at": "BASE_TS",
|
||||
"updated_at": "BASE_TS",
|
||||
"created_by": "ae38accc-1ad4-410d-adbc-a55630908924"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"recipes": [],
|
||||
"recipe_ingredients": [],
|
||||
"recipe_instructions": []
|
||||
}
|
||||
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"suppliers": []
|
||||
}
|
||||
@@ -0,0 +1,75 @@
|
||||
{
|
||||
"equipment": [],
|
||||
"quality_check_templates": [],
|
||||
"quality_checks": [],
|
||||
"batches": [
|
||||
{
|
||||
"id": "50000001-0000-4000-a000-000000000001",
|
||||
"tenant_id": "E0000000-0000-4000-a000-000000000001",
|
||||
"recipe_id": "30000000-0000-0000-0000-000000000001",
|
||||
"product_id": "20000000-0000-0000-0000-000000000001",
|
||||
"batch_number": "BATCH-E000-0001",
|
||||
"status": "completed",
|
||||
"quantity_produced": 50,
|
||||
"quantity_good": 50,
|
||||
"quantity_defective": 0,
|
||||
"production_date": "BASE_TS - 1d",
|
||||
"expiration_date": "BASE_TS + 2d",
|
||||
"production_line": "Linea 1",
|
||||
"shift": "morning",
|
||||
"produced_by": "ae38accc-1ad4-410d-adbc-a55630908924",
|
||||
"approved_by": "80765906-0074-4206-8f58-5867df1975fd",
|
||||
"created_at": "BASE_TS - 1d",
|
||||
"updated_at": "BASE_TS - 1d",
|
||||
"is_active": true,
|
||||
"ingredients": [
|
||||
{
|
||||
"ingredient_id": "10000000-0000-0000-0000-000000000001",
|
||||
"quantity_used": 25.0,
|
||||
"unit": "kg"
|
||||
}
|
||||
],
|
||||
"product_name": "Baguette Tradicional",
|
||||
"planned_start_time": "BASE_TS",
|
||||
"planned_end_time": "BASE_TS + 4h",
|
||||
"actual_start_time": "BASE_TS - 1d",
|
||||
"actual_end_time": "BASE_TS - 1d + 4h",
|
||||
"planned_quantity": 50.0,
|
||||
"planned_duration_minutes": 240
|
||||
},
|
||||
{
|
||||
"id": "50000002-0000-4000-a000-000000000001",
|
||||
"tenant_id": "E0000000-0000-4000-a000-000000000001",
|
||||
"recipe_id": "30000000-0000-0000-0000-000000000002",
|
||||
"product_id": "20000000-0000-0000-0000-000000000002",
|
||||
"batch_number": "BATCH-E000-0002",
|
||||
"status": "completed",
|
||||
"quantity_produced": 60,
|
||||
"quantity_good": 60,
|
||||
"quantity_defective": 0,
|
||||
"production_date": "BASE_TS - 2d",
|
||||
"expiration_date": "BASE_TS + 1d",
|
||||
"production_line": "Linea 2",
|
||||
"shift": "morning",
|
||||
"produced_by": "ae38accc-1ad4-410d-adbc-a55630908924",
|
||||
"approved_by": "80765906-0074-4206-8f58-5867df1975fd",
|
||||
"created_at": "BASE_TS - 2d",
|
||||
"updated_at": "BASE_TS - 2d",
|
||||
"is_active": true,
|
||||
"ingredients": [
|
||||
{
|
||||
"ingredient_id": "10000000-0000-0000-0000-000000000001",
|
||||
"quantity_used": 30.0,
|
||||
"unit": "kg"
|
||||
}
|
||||
],
|
||||
"product_name": "Croissant de Mantequilla",
|
||||
"planned_start_time": "BASE_TS",
|
||||
"planned_end_time": "BASE_TS + 4h",
|
||||
"actual_start_time": "BASE_TS - 1d",
|
||||
"actual_end_time": "BASE_TS - 1d + 4h",
|
||||
"planned_quantity": 50.0,
|
||||
"planned_duration_minutes": 240
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"purchase_orders": [],
|
||||
"purchase_order_items": []
|
||||
}
|
||||
@@ -0,0 +1,44 @@
|
||||
{
|
||||
"customers": [
|
||||
{
|
||||
"id": "60000000-0000-0000-0000-000000000001",
|
||||
"tenant_id": "E0000000-0000-4000-a000-000000000001",
|
||||
"customer_code": "CUST-001",
|
||||
"name": "Restaurante El Buen Yantar - Bilbao",
|
||||
"customer_type": "WHOLESALE",
|
||||
"contact_person": "Luis Gómez",
|
||||
"email": "compras@buenyantar.es",
|
||||
"phone": "+34 912 345 678",
|
||||
"address": "Calle Mayor, 45",
|
||||
"city": "Bilbao",
|
||||
"postal_code": "48013",
|
||||
"country": "España",
|
||||
"status": "ACTIVE",
|
||||
"total_orders": 45,
|
||||
"total_spent": 3250.75,
|
||||
"created_at": "BASE_TS",
|
||||
"notes": "Regular wholesale customer - weekly orders"
|
||||
},
|
||||
{
|
||||
"id": "60000000-0000-0000-0000-000000000002",
|
||||
"tenant_id": "E0000000-0000-4000-a000-000000000001",
|
||||
"customer_code": "CUST-002",
|
||||
"name": "Cafetería La Esquina - Bilbao",
|
||||
"customer_type": "RETAIL",
|
||||
"contact_person": "Marta Ruiz",
|
||||
"email": "cafeteria@laesquina.com",
|
||||
"phone": "+34 913 456 789",
|
||||
"address": "Plaza del Sol, 12",
|
||||
"city": "Bilbao",
|
||||
"postal_code": "48012",
|
||||
"country": "España",
|
||||
"status": "ACTIVE",
|
||||
"total_orders": 12,
|
||||
"total_spent": 850.2,
|
||||
"created_at": "BASE_TS",
|
||||
"notes": "Small retail customer - biweekly orders"
|
||||
}
|
||||
],
|
||||
"customer_orders": [],
|
||||
"order_items": []
|
||||
}
|
||||
@@ -0,0 +1,229 @@
|
||||
{
|
||||
"sales_data": [
|
||||
{
|
||||
"id": "6b021d81-0f78-4dda-af68-6ddbc721c06a",
|
||||
"tenant_id": "E0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 16h 0m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000001",
|
||||
"quantity_sold": 2,
|
||||
"unit_price": 0.9,
|
||||
"total_revenue": 1.8,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "cash",
|
||||
"created_at": "BASE_TS - 16h 0m",
|
||||
"notes": "Venta local en Bilbao - Casco Viejo",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "bee94849-b27c-4741-b896-491af67f24db",
|
||||
"tenant_id": "E0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 15h 3m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000002",
|
||||
"quantity_sold": 3,
|
||||
"unit_price": 1.39,
|
||||
"total_revenue": 4.16,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 15h 3m",
|
||||
"notes": "Venta local en Bilbao - Casco Viejo",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "8e97a063-4ca4-4fc5-b2fc-9dd94ce04678",
|
||||
"tenant_id": "E0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 14h 6m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000003",
|
||||
"quantity_sold": 4,
|
||||
"unit_price": 3.74,
|
||||
"total_revenue": 14.96,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 14h 6m",
|
||||
"notes": "Venta local en Bilbao - Casco Viejo",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "fb310fd1-27f9-4821-9d34-d1eeef8356dc",
|
||||
"tenant_id": "E0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 13h 9m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000004",
|
||||
"quantity_sold": 5,
|
||||
"unit_price": 1.45,
|
||||
"total_revenue": 7.26,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 13h 9m",
|
||||
"notes": "Venta local en Bilbao - Casco Viejo",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "682a5f61-fdba-4bd5-8c1f-7e173a690521",
|
||||
"tenant_id": "E0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 12h 12m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000001",
|
||||
"quantity_sold": 6,
|
||||
"unit_price": 0.9,
|
||||
"total_revenue": 5.41,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "cash",
|
||||
"created_at": "BASE_TS - 12h 12m",
|
||||
"notes": "Venta local en Bilbao - Casco Viejo",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "b20f6751-39a1-4329-a5c9-29a71403cc4b",
|
||||
"tenant_id": "E0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 11h 15m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000002",
|
||||
"quantity_sold": 2,
|
||||
"unit_price": 1.39,
|
||||
"total_revenue": 2.77,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 11h 15m",
|
||||
"notes": "Venta local en Bilbao - Casco Viejo",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "1b0f403f-e2cc-4bd7-8349-7d646dfb435b",
|
||||
"tenant_id": "E0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 10h 18m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000003",
|
||||
"quantity_sold": 3,
|
||||
"unit_price": 3.74,
|
||||
"total_revenue": 11.22,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 10h 18m",
|
||||
"notes": "Venta local en Bilbao - Casco Viejo",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "6ac8c158-ead3-4fe3-8dc6-010ddc9803c9",
|
||||
"tenant_id": "E0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 9h 21m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000004",
|
||||
"quantity_sold": 4,
|
||||
"unit_price": 1.45,
|
||||
"total_revenue": 5.81,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 9h 21m",
|
||||
"notes": "Venta local en Bilbao - Casco Viejo",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "e2cf5f55-71eb-489d-ae01-307cd08a0d6c",
|
||||
"tenant_id": "E0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 8h 24m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000001",
|
||||
"quantity_sold": 5,
|
||||
"unit_price": 0.9,
|
||||
"total_revenue": 4.51,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "cash",
|
||||
"created_at": "BASE_TS - 8h 24m",
|
||||
"notes": "Venta local en Bilbao - Casco Viejo",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "58fb2270-4375-4d9c-9b6d-7a8fa38c2d22",
|
||||
"tenant_id": "E0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 7h 27m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000002",
|
||||
"quantity_sold": 6,
|
||||
"unit_price": 1.39,
|
||||
"total_revenue": 8.32,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 7h 27m",
|
||||
"notes": "Venta local en Bilbao - Casco Viejo",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "a354222f-8635-491a-a0da-b81e887bb205",
|
||||
"tenant_id": "E0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 6h 30m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000003",
|
||||
"quantity_sold": 2,
|
||||
"unit_price": 3.74,
|
||||
"total_revenue": 7.48,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 6h 30m",
|
||||
"notes": "Venta local en Bilbao - Casco Viejo",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "ada5c135-4d10-431d-8f92-a7828c8ef6d4",
|
||||
"tenant_id": "E0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 5h 33m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000004",
|
||||
"quantity_sold": 3,
|
||||
"unit_price": 1.45,
|
||||
"total_revenue": 4.36,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 5h 33m",
|
||||
"notes": "Venta local en Bilbao - Casco Viejo",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "9c01f80b-546d-4195-abb7-2a864b6c3720",
|
||||
"tenant_id": "E0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 16h 36m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000001",
|
||||
"quantity_sold": 4,
|
||||
"unit_price": 0.9,
|
||||
"total_revenue": 3.61,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "cash",
|
||||
"created_at": "BASE_TS - 16h 36m",
|
||||
"notes": "Venta local en Bilbao - Casco Viejo",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "5054f48f-53c5-40ff-bee6-1138c6185803",
|
||||
"tenant_id": "E0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 15h 39m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000002",
|
||||
"quantity_sold": 5,
|
||||
"unit_price": 1.39,
|
||||
"total_revenue": 6.93,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 15h 39m",
|
||||
"notes": "Venta local en Bilbao - Casco Viejo",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
},
|
||||
{
|
||||
"id": "60849ef7-7214-46cd-91ef-2c05f902cc6f",
|
||||
"tenant_id": "E0000000-0000-4000-a000-000000000001",
|
||||
"sale_date": "BASE_TS - 14h 42m",
|
||||
"product_id": "20000000-0000-0000-0000-000000000003",
|
||||
"quantity_sold": 6,
|
||||
"unit_price": 3.74,
|
||||
"total_revenue": 22.44,
|
||||
"sales_channel": "in_store",
|
||||
"payment_method": "card",
|
||||
"created_at": "BASE_TS - 14h 42m",
|
||||
"notes": "Venta local en Bilbao - Casco Viejo",
|
||||
"enterprise_location_sale": true,
|
||||
"date": "BASE_TS"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"orchestration_run": null,
|
||||
"alerts": []
|
||||
}
|
||||
114
shared/demo/fixtures/enterprise/parent/01-tenant.json
Normal file
114
shared/demo/fixtures/enterprise/parent/01-tenant.json
Normal file
@@ -0,0 +1,114 @@
|
||||
{
|
||||
"tenant": {
|
||||
"id": "80000000-0000-4000-a000-000000000001",
|
||||
"name": "Panadería Artesana España - Central",
|
||||
"subscription_tier": "enterprise",
|
||||
"tenant_type": "parent",
|
||||
"email": "central@panaderiaartesana.es",
|
||||
"subdomain": "artesana-central",
|
||||
"description": "Central production facility and parent tenant for Panadería Artesana España multi-location bakery chain",
|
||||
"is_active": true,
|
||||
"created_at": "2024-01-01T00:00:00Z",
|
||||
"updated_at": "2024-01-01T00:00:00Z"
|
||||
},
|
||||
"owner": {
|
||||
"id": "d2e3f4a5-b6c7-48d9-e0f1-a2b3c4d5e6f7",
|
||||
"name": "Director",
|
||||
"email": "director@panaderiaartesana.es",
|
||||
"role": "owner"
|
||||
},
|
||||
"subscription": {
|
||||
"id": "80000000-0000-0000-0000-000000000001",
|
||||
"tenant_id": "80000000-0000-4000-a000-000000000001",
|
||||
"plan": "enterprise",
|
||||
"status": "active",
|
||||
"monthly_price": 1999.0,
|
||||
"billing_cycle": "monthly",
|
||||
"max_users": 50,
|
||||
"max_locations": 20,
|
||||
"max_products": 5000,
|
||||
"features": {
|
||||
"multi_location_management": true,
|
||||
"centralized_inventory": true,
|
||||
"centralized_production": true,
|
||||
"bulk_procurement": true,
|
||||
"advanced_analytics": true,
|
||||
"custom_reporting": true,
|
||||
"api_access": true,
|
||||
"priority_support": true,
|
||||
"cross_location_optimization": true,
|
||||
"distribution_management": true
|
||||
},
|
||||
"trial_ends_at": "BASE_TS+60d",
|
||||
"next_billing_date": "BASE_TS+30d",
|
||||
"stripe_subscription_id": null,
|
||||
"stripe_customer_id": null,
|
||||
"cancelled_at": null,
|
||||
"cancellation_effective_date": null,
|
||||
"created_at": "BASE_TS-90d",
|
||||
"updated_at": "BASE_TS-1d",
|
||||
"is_tenant_linked": true
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"id": "A0000000-0000-4000-a000-000000000001",
|
||||
"name": "Madrid - Salamanca",
|
||||
"base_tenant_id": "A0000000-0000-4000-a000-000000000001",
|
||||
"location": {
|
||||
"city": "Madrid",
|
||||
"zone": "Salamanca",
|
||||
"latitude": 40.4284,
|
||||
"longitude": -3.6847
|
||||
},
|
||||
"description": "Premium location in upscale Salamanca district"
|
||||
},
|
||||
{
|
||||
"id": "B0000000-0000-4000-a000-000000000001",
|
||||
"name": "Barcelona - Eixample",
|
||||
"base_tenant_id": "B0000000-0000-4000-a000-000000000001",
|
||||
"location": {
|
||||
"city": "Barcelona",
|
||||
"zone": "Eixample",
|
||||
"latitude": 41.3947,
|
||||
"longitude": 2.1616
|
||||
},
|
||||
"description": "High-volume tourist and local area in central Barcelona"
|
||||
},
|
||||
{
|
||||
"id": "C0000000-0000-4000-a000-000000000001",
|
||||
"name": "Valencia - Ruzafa",
|
||||
"base_tenant_id": "C0000000-0000-4000-a000-000000000001",
|
||||
"location": {
|
||||
"city": "Valencia",
|
||||
"zone": "Ruzafa",
|
||||
"latitude": 39.4623,
|
||||
"longitude": -0.3645
|
||||
},
|
||||
"description": "Trendy artisan neighborhood with focus on quality"
|
||||
},
|
||||
{
|
||||
"id": "D0000000-0000-4000-a000-000000000001",
|
||||
"name": "Seville - Triana",
|
||||
"base_tenant_id": "D0000000-0000-4000-a000-000000000001",
|
||||
"location": {
|
||||
"city": "Seville",
|
||||
"zone": "Triana",
|
||||
"latitude": 37.3828,
|
||||
"longitude": -6.0026
|
||||
},
|
||||
"description": "Traditional Andalusian location with local specialties"
|
||||
},
|
||||
{
|
||||
"id": "E0000000-0000-4000-a000-000000000001",
|
||||
"name": "Bilbao - Casco Viejo",
|
||||
"base_tenant_id": "E0000000-0000-4000-a000-000000000001",
|
||||
"location": {
|
||||
"city": "Bilbao",
|
||||
"zone": "Casco Viejo",
|
||||
"latitude": 43.2567,
|
||||
"longitude": -2.9272
|
||||
},
|
||||
"description": "Basque region location with focus on quality and local culture"
|
||||
}
|
||||
]
|
||||
}
|
||||
274
shared/demo/fixtures/enterprise/parent/02-auth.json
Normal file
274
shared/demo/fixtures/enterprise/parent/02-auth.json
Normal file
@@ -0,0 +1,274 @@
|
||||
{
|
||||
"users": [
|
||||
{
|
||||
"id": "d2e3f4a5-b6c7-48d9-e0f1-a2b3c4d5e6f7",
|
||||
"tenant_id": "80000000-0000-4000-a000-000000000001",
|
||||
"name": "Director",
|
||||
"full_name": "Director",
|
||||
"email": "director@panaderiaartesana.es",
|
||||
"role": "owner",
|
||||
"is_active": true,
|
||||
"created_at": "BASE_TS - 365d",
|
||||
"updated_at": "BASE_TS - 365d"
|
||||
},
|
||||
{
|
||||
"id": "ae38accc-1ad4-410d-adbc-a55630908924",
|
||||
"tenant_id": "80000000-0000-4000-a000-000000000001",
|
||||
"name": "Produccion",
|
||||
"email": "produccion@panaderiaartesana.es",
|
||||
"role": "admin",
|
||||
"is_active": true,
|
||||
"created_at": "BASE_TS - 300d",
|
||||
"updated_at": "BASE_TS - 300d"
|
||||
},
|
||||
{
|
||||
"id": "9d04ab32-8b7f-4f71-b88f-d7bf1452a010",
|
||||
"tenant_id": "80000000-0000-4000-a000-000000000001",
|
||||
"name": "Compras",
|
||||
"email": "compras@panaderiaartesana.es",
|
||||
"role": "admin",
|
||||
"is_active": true,
|
||||
"created_at": "BASE_TS - 280d",
|
||||
"updated_at": "BASE_TS - 280d"
|
||||
},
|
||||
{
|
||||
"id": "80765906-0074-4206-8f58-5867df1975fd",
|
||||
"tenant_id": "80000000-0000-4000-a000-000000000001",
|
||||
"email": "calidad@panaderiaartesana.es",
|
||||
"first_name": "Jos\u00e9",
|
||||
"last_name": "Mart\u00ednez",
|
||||
"role": "admin",
|
||||
"department": "quality",
|
||||
"position": "Responsable de Calidad",
|
||||
"phone": "+34 916 123 459",
|
||||
"is_active": true,
|
||||
"created_at": "BASE_TS - 250d",
|
||||
"permissions": [
|
||||
"quality_control",
|
||||
"batch_approve",
|
||||
"quality_reports"
|
||||
],
|
||||
"name": "Jos\u00e9 Mart\u00ednez",
|
||||
"updated_at": "BASE_TS - 250d"
|
||||
},
|
||||
{
|
||||
"id": "f6c54d0f-5899-4952-ad94-7a492c07167a",
|
||||
"tenant_id": "80000000-0000-4000-a000-000000000001",
|
||||
"email": "logistica@panaderiaartesana.es",
|
||||
"first_name": "Laura",
|
||||
"last_name": "L\u00f3pez",
|
||||
"role": "admin",
|
||||
"department": "logistics",
|
||||
"position": "Coordinadora de Log\u00edstica",
|
||||
"phone": "+34 916 123 460",
|
||||
"is_active": true,
|
||||
"created_at": "BASE_TS - 230d",
|
||||
"permissions": [
|
||||
"distribution_manage",
|
||||
"inventory_view",
|
||||
"order_manage"
|
||||
],
|
||||
"name": "Laura L\u00f3pez",
|
||||
"updated_at": "BASE_TS - 230d"
|
||||
},
|
||||
{
|
||||
"id": "77621701-e794-48d9-87d7-dc8db905efc0",
|
||||
"tenant_id": "80000000-0000-4000-a000-000000000001",
|
||||
"email": "maestro1@panaderiaartesana.es",
|
||||
"first_name": "Antonio",
|
||||
"last_name": "S\u00e1nchez",
|
||||
"role": "admin",
|
||||
"department": "production",
|
||||
"position": "Maestro Panadero Principal",
|
||||
"phone": "+34 916 123 461",
|
||||
"is_active": true,
|
||||
"created_at": "BASE_TS - 320d",
|
||||
"permissions": [
|
||||
"recipe_manage",
|
||||
"production_manage",
|
||||
"training"
|
||||
],
|
||||
"name": "Antonio S\u00e1nchez",
|
||||
"updated_at": "BASE_TS - 320d"
|
||||
},
|
||||
{
|
||||
"id": "f21dadbf-a37e-4f53-86e6-b5f34a0c792f",
|
||||
"tenant_id": "80000000-0000-4000-a000-000000000001",
|
||||
"email": "maestro2@panaderiaartesana.es",
|
||||
"first_name": "Isabel",
|
||||
"last_name": "Ruiz",
|
||||
"role": "admin",
|
||||
"department": "production",
|
||||
"position": "Maestra Panadera Senior",
|
||||
"phone": "+34 916 123 462",
|
||||
"is_active": true,
|
||||
"created_at": "BASE_TS - 280d",
|
||||
"permissions": [
|
||||
"recipe_manage",
|
||||
"production_manage",
|
||||
"training"
|
||||
],
|
||||
"name": "Isabel Ruiz",
|
||||
"updated_at": "BASE_TS - 280d"
|
||||
},
|
||||
{
|
||||
"id": "701cb9d2-6049-4bb9-8d3a-1b3bd3aae45f",
|
||||
"tenant_id": "80000000-0000-4000-a000-000000000001",
|
||||
"email": "almacen1@panaderiaartesana.es",
|
||||
"first_name": "Francisco",
|
||||
"last_name": "Moreno",
|
||||
"role": "admin",
|
||||
"department": "warehouse",
|
||||
"position": "Supervisor de Almac\u00e9n",
|
||||
"phone": "+34 916 123 463",
|
||||
"is_active": true,
|
||||
"created_at": "BASE_TS - 200d",
|
||||
"permissions": [
|
||||
"inventory_manage",
|
||||
"stock_receive",
|
||||
"stock_transfer"
|
||||
],
|
||||
"name": "Francisco Moreno",
|
||||
"updated_at": "BASE_TS - 200d"
|
||||
},
|
||||
{
|
||||
"id": "a98bbee4-96fa-4840-9eb7-1f35c6e83a36",
|
||||
"tenant_id": "80000000-0000-4000-a000-000000000001",
|
||||
"email": "almacen2@panaderiaartesana.es",
|
||||
"first_name": "Carmen",
|
||||
"last_name": "Jim\u00e9nez",
|
||||
"role": "admin",
|
||||
"department": "warehouse",
|
||||
"position": "Supervisora de Almac\u00e9n Turno Noche",
|
||||
"phone": "+34 916 123 464",
|
||||
"is_active": true,
|
||||
"created_at": "BASE_TS - 180d",
|
||||
"permissions": [
|
||||
"inventory_manage",
|
||||
"stock_receive",
|
||||
"stock_transfer"
|
||||
],
|
||||
"name": "Carmen Jim\u00e9nez",
|
||||
"updated_at": "BASE_TS - 180d"
|
||||
},
|
||||
{
|
||||
"id": "022fba62-ff2a-4a38-b345-42228e11f04a",
|
||||
"tenant_id": "80000000-0000-4000-a000-000000000001",
|
||||
"email": "analisis@panaderiaartesana.es",
|
||||
"first_name": "David",
|
||||
"last_name": "Gonz\u00e1lez",
|
||||
"role": "admin",
|
||||
"department": "operations",
|
||||
"position": "Analista de Operaciones",
|
||||
"phone": "+34 916 123 465",
|
||||
"is_active": true,
|
||||
"created_at": "BASE_TS - 150d",
|
||||
"permissions": [
|
||||
"reports_view",
|
||||
"analytics_view",
|
||||
"forecasting_view"
|
||||
],
|
||||
"name": "David Gonz\u00e1lez",
|
||||
"updated_at": "BASE_TS - 150d"
|
||||
},
|
||||
{
|
||||
"id": "ba2ce42e-efd7-46a6-aa09-d9f9afc1c63f",
|
||||
"tenant_id": "80000000-0000-4000-a000-000000000001",
|
||||
"email": "mantenimiento@panaderiaartesana.es",
|
||||
"first_name": "Pedro",
|
||||
"last_name": "D\u00edaz",
|
||||
"role": "admin",
|
||||
"department": "maintenance",
|
||||
"position": "T\u00e9cnico de Mantenimiento",
|
||||
"phone": "+34 916 123 466",
|
||||
"is_active": true,
|
||||
"created_at": "BASE_TS - 200d",
|
||||
"permissions": [
|
||||
"equipment_view",
|
||||
"maintenance_log"
|
||||
],
|
||||
"name": "Pedro D\u00edaz",
|
||||
"updated_at": "BASE_TS - 200d"
|
||||
},
|
||||
{
|
||||
"id": "ba8ca79b-b81e-4fe9-b064-e58a34bf0fa3",
|
||||
"tenant_id": "80000000-0000-4000-a000-000000000001",
|
||||
"email": "turno.dia@panaderiaartesana.es",
|
||||
"first_name": "Rosa",
|
||||
"last_name": "Navarro",
|
||||
"role": "admin",
|
||||
"department": "production",
|
||||
"position": "Supervisora Turno D\u00eda",
|
||||
"phone": "+34 916 123 467",
|
||||
"is_active": true,
|
||||
"created_at": "BASE_TS - 180d",
|
||||
"permissions": [
|
||||
"production_view",
|
||||
"batch_create",
|
||||
"staff_manage"
|
||||
],
|
||||
"name": "Rosa Navarro",
|
||||
"updated_at": "BASE_TS - 180d"
|
||||
},
|
||||
{
|
||||
"id": "75e92fec-e052-4e90-bd96-804eed44926c",
|
||||
"tenant_id": "80000000-0000-4000-a000-000000000001",
|
||||
"email": "turno.tarde@panaderiaartesana.es",
|
||||
"first_name": "Manuel",
|
||||
"last_name": "Torres",
|
||||
"role": "admin",
|
||||
"department": "production",
|
||||
"position": "Supervisor Turno Tarde",
|
||||
"phone": "+34 916 123 468",
|
||||
"is_active": true,
|
||||
"created_at": "BASE_TS - 160d",
|
||||
"permissions": [
|
||||
"production_view",
|
||||
"batch_create",
|
||||
"staff_manage"
|
||||
],
|
||||
"name": "Manuel Torres",
|
||||
"updated_at": "BASE_TS - 160d"
|
||||
},
|
||||
{
|
||||
"id": "6fec3a43-f83d-47c3-b760-54105fcbf7f1",
|
||||
"tenant_id": "80000000-0000-4000-a000-000000000001",
|
||||
"email": "turno.noche@panaderiaartesana.es",
|
||||
"first_name": "Luc\u00eda",
|
||||
"last_name": "Romero",
|
||||
"role": "admin",
|
||||
"department": "production",
|
||||
"position": "Supervisora Turno Noche",
|
||||
"phone": "+34 916 123 469",
|
||||
"is_active": true,
|
||||
"created_at": "BASE_TS - 140d",
|
||||
"permissions": [
|
||||
"production_view",
|
||||
"batch_create",
|
||||
"staff_manage"
|
||||
],
|
||||
"name": "Luc\u00eda Romero",
|
||||
"updated_at": "BASE_TS - 140d"
|
||||
},
|
||||
{
|
||||
"id": "743fd2c8-58b8-4431-a49f-085e0c284ff0",
|
||||
"tenant_id": "80000000-0000-4000-a000-000000000001",
|
||||
"email": "it@panaderiaartesana.es",
|
||||
"first_name": "Javier",
|
||||
"last_name": "Vargas",
|
||||
"role": "admin",
|
||||
"department": "it",
|
||||
"position": "Administrador de Sistemas",
|
||||
"phone": "+34 916 123 470",
|
||||
"is_active": true,
|
||||
"created_at": "BASE_TS - 200d",
|
||||
"permissions": [
|
||||
"system_admin",
|
||||
"user_manage",
|
||||
"settings_manage"
|
||||
],
|
||||
"name": "Javier Vargas",
|
||||
"updated_at": "BASE_TS - 200d"
|
||||
}
|
||||
]
|
||||
}
|
||||
15531
shared/demo/fixtures/enterprise/parent/03-inventory.json
Normal file
15531
shared/demo/fixtures/enterprise/parent/03-inventory.json
Normal file
File diff suppressed because it is too large
Load Diff
848
shared/demo/fixtures/enterprise/parent/04-recipes.json
Normal file
848
shared/demo/fixtures/enterprise/parent/04-recipes.json
Normal file
@@ -0,0 +1,848 @@
|
||||
{
|
||||
"recipes": [
|
||||
{
|
||||
"id": "30000000-0000-0000-0000-000000000001",
|
||||
"tenant_id": "80000000-0000-4000-a000-000000000001",
|
||||
"name": "Baguette Francesa Tradicional",
|
||||
"recipe_code": null,
|
||||
"version": "1.0",
|
||||
"finished_product_id": "20000000-0000-0000-0000-000000000001",
|
||||
"description": "Baguette francesa tradicional con corteza crujiente y miga alveolada. Perfecta para acompa\u00f1ar cualquier comida.",
|
||||
"category": "Panes",
|
||||
"cuisine_type": "Francesa",
|
||||
"difficulty_level": 2,
|
||||
"yield_quantity": 10.0,
|
||||
"yield_unit": "units",
|
||||
"prep_time_minutes": 20,
|
||||
"cook_time_minutes": 25,
|
||||
"total_time_minutes": 165,
|
||||
"rest_time_minutes": 120,
|
||||
"estimated_cost_per_unit": null,
|
||||
"last_calculated_cost": null,
|
||||
"cost_calculation_date": null,
|
||||
"target_margin_percentage": null,
|
||||
"suggested_selling_price": null,
|
||||
"instructions": {
|
||||
"steps": [
|
||||
{
|
||||
"step": 1,
|
||||
"title": "Amasado",
|
||||
"description": "Mezclar harina, agua, sal y levadura. Amasar durante 15 minutos hasta obtener una masa lisa y el\u00e1stica.",
|
||||
"duration_minutes": 15
|
||||
},
|
||||
{
|
||||
"step": 2,
|
||||
"title": "Primera Fermentaci\u00f3n",
|
||||
"description": "Dejar reposar la masa en un recipiente tapado durante 60 minutos a temperatura ambiente (22-24\u00b0C).",
|
||||
"duration_minutes": 60
|
||||
},
|
||||
{
|
||||
"step": 3,
|
||||
"title": "Divisi\u00f3n y Formado",
|
||||
"description": "Dividir la masa en 10 piezas de 250g cada una. Formar las baguettes d\u00e1ndoles la forma alargada caracter\u00edstica.",
|
||||
"duration_minutes": 20
|
||||
},
|
||||
{
|
||||
"step": 4,
|
||||
"title": "Segunda Fermentaci\u00f3n",
|
||||
"description": "Colocar las baguettes en un lienzo enharinado y dejar fermentar 60 minutos m\u00e1s.",
|
||||
"duration_minutes": 60
|
||||
},
|
||||
{
|
||||
"step": 5,
|
||||
"title": "Gre\u00f1ado y Horneado",
|
||||
"description": "Hacer cortes diagonales en la superficie con una cuchilla. Hornear a 240\u00b0C con vapor inicial durante 25 minutos.",
|
||||
"duration_minutes": 25
|
||||
}
|
||||
]
|
||||
},
|
||||
"preparation_notes": "Es crucial usar vapor al inicio del horneado para lograr una corteza crujiente. La temperatura del agua debe estar entre 18-20\u00b0C.",
|
||||
"storage_instructions": "Consumir el mismo d\u00eda de producci\u00f3n. Se puede congelar despu\u00e9s del horneado.",
|
||||
"serves_count": null,
|
||||
"nutritional_info": null,
|
||||
"allergen_info": null,
|
||||
"dietary_tags": null,
|
||||
"batch_size_multiplier": 1.0,
|
||||
"minimum_batch_size": null,
|
||||
"maximum_batch_size": null,
|
||||
"optimal_production_temperature": null,
|
||||
"optimal_humidity": null,
|
||||
"quality_check_configuration": null,
|
||||
"status": "ACTIVE",
|
||||
"is_seasonal": false,
|
||||
"season_start_month": null,
|
||||
"season_end_month": null,
|
||||
"is_signature_item": true,
|
||||
"created_at": "BASE_TS",
|
||||
"updated_at": "BASE_TS",
|
||||
"created_by": "c1a2b3c4-d5e6-47a8-b9c0-d1e2f3a4b5c6",
|
||||
"updated_by": "c1a2b3c4-d5e6-47a8-b9c0-d1e2f3a4b5c6",
|
||||
"enterprise_standard": true,
|
||||
"centrally_produced": true
|
||||
},
|
||||
{
|
||||
"id": "30000000-0000-0000-0000-000000000002",
|
||||
"tenant_id": "80000000-0000-4000-a000-000000000001",
|
||||
"name": "Croissant de Mantequilla Artesanal",
|
||||
"recipe_code": null,
|
||||
"version": "1.0",
|
||||
"finished_product_id": "20000000-0000-0000-0000-000000000002",
|
||||
"description": "Croissant de mantequilla con laminado perfecto y textura hojaldrada. Elaboraci\u00f3n artesanal con mantequilla de alta calidad.",
|
||||
"category": "Boller\u00eda",
|
||||
"cuisine_type": "Francesa",
|
||||
"difficulty_level": 4,
|
||||
"yield_quantity": 12.0,
|
||||
"yield_unit": "units",
|
||||
"prep_time_minutes": 45,
|
||||
"cook_time_minutes": 18,
|
||||
"total_time_minutes": 333,
|
||||
"rest_time_minutes": 270,
|
||||
"estimated_cost_per_unit": null,
|
||||
"last_calculated_cost": null,
|
||||
"cost_calculation_date": null,
|
||||
"target_margin_percentage": null,
|
||||
"suggested_selling_price": null,
|
||||
"instructions": {
|
||||
"steps": [
|
||||
{
|
||||
"step": 1,
|
||||
"title": "Preparaci\u00f3n de la Masa Base",
|
||||
"description": "Mezclar todos los ingredientes excepto la mantequilla de laminado. Amasar hasta obtener una masa homog\u00e9nea.",
|
||||
"duration_minutes": 20
|
||||
},
|
||||
{
|
||||
"step": 2,
|
||||
"title": "Reposo en Fr\u00edo",
|
||||
"description": "Envolver la masa en film y refrigerar durante 2 horas.",
|
||||
"duration_minutes": 120
|
||||
},
|
||||
{
|
||||
"step": 3,
|
||||
"title": "Laminado",
|
||||
"description": "Extender la masa en rect\u00e1ngulo. Colocar la mantequilla en el centro y hacer 3 dobleces sencillos con 30 minutos de reposo entre cada uno.",
|
||||
"duration_minutes": 90
|
||||
},
|
||||
{
|
||||
"step": 4,
|
||||
"title": "Formado",
|
||||
"description": "Extender a 3mm de grosor, cortar tri\u00e1ngulos y enrollar para formar los croissants.",
|
||||
"duration_minutes": 25
|
||||
},
|
||||
{
|
||||
"step": 5,
|
||||
"title": "Fermentaci\u00f3n Final",
|
||||
"description": "Dejar fermentar a 26\u00b0C durante 2-3 horas hasta que dupliquen su volumen.",
|
||||
"duration_minutes": 150
|
||||
},
|
||||
{
|
||||
"step": 6,
|
||||
"title": "Horneado",
|
||||
"description": "Pintar con huevo batido y hornear a 200\u00b0C durante 18 minutos hasta dorar.",
|
||||
"duration_minutes": 18
|
||||
}
|
||||
]
|
||||
},
|
||||
"preparation_notes": "La mantequilla para laminar debe estar a 15-16\u00b0C, flexible pero no blanda. Trabajar en ambiente fresco.",
|
||||
"storage_instructions": "Consumir el d\u00eda de producci\u00f3n. Se puede congelar la masa formada antes de la fermentaci\u00f3n final.",
|
||||
"serves_count": null,
|
||||
"nutritional_info": null,
|
||||
"allergen_info": null,
|
||||
"dietary_tags": null,
|
||||
"batch_size_multiplier": 1.0,
|
||||
"minimum_batch_size": null,
|
||||
"maximum_batch_size": null,
|
||||
"optimal_production_temperature": null,
|
||||
"optimal_humidity": null,
|
||||
"quality_check_configuration": null,
|
||||
"status": "ACTIVE",
|
||||
"is_seasonal": false,
|
||||
"season_start_month": null,
|
||||
"season_end_month": null,
|
||||
"is_signature_item": true,
|
||||
"created_at": "BASE_TS",
|
||||
"updated_at": "BASE_TS",
|
||||
"created_by": "c1a2b3c4-d5e6-47a8-b9c0-d1e2f3a4b5c6",
|
||||
"updated_by": "c1a2b3c4-d5e6-47a8-b9c0-d1e2f3a4b5c6",
|
||||
"enterprise_standard": true,
|
||||
"centrally_produced": true
|
||||
},
|
||||
{
|
||||
"id": "30000000-0000-0000-0000-000000000003",
|
||||
"tenant_id": "80000000-0000-4000-a000-000000000001",
|
||||
"name": "Pan de Pueblo con Masa Madre",
|
||||
"recipe_code": null,
|
||||
"version": "1.0",
|
||||
"finished_product_id": "20000000-0000-0000-0000-000000000003",
|
||||
"description": "Hogaza de pan r\u00fastico elaborada con masa madre natural. Corteza gruesa y miga densa con sabor ligeramente \u00e1cido.",
|
||||
"category": "Panes Artesanales",
|
||||
"cuisine_type": "Espa\u00f1ola",
|
||||
"difficulty_level": 3,
|
||||
"yield_quantity": 4.0,
|
||||
"yield_unit": "units",
|
||||
"prep_time_minutes": 30,
|
||||
"cook_time_minutes": 45,
|
||||
"total_time_minutes": 435,
|
||||
"rest_time_minutes": 360,
|
||||
"estimated_cost_per_unit": null,
|
||||
"last_calculated_cost": null,
|
||||
"cost_calculation_date": null,
|
||||
"target_margin_percentage": null,
|
||||
"suggested_selling_price": null,
|
||||
"instructions": {
|
||||
"steps": [
|
||||
{
|
||||
"step": 1,
|
||||
"title": "Autolisis",
|
||||
"description": "Mezclar harinas y agua, dejar reposar 30 minutos para desarrollar el gluten.",
|
||||
"duration_minutes": 30
|
||||
},
|
||||
{
|
||||
"step": 2,
|
||||
"title": "Incorporaci\u00f3n de Masa Madre y Sal",
|
||||
"description": "A\u00f1adir la masa madre y la sal. Amasar suavemente hasta integrar completamente.",
|
||||
"duration_minutes": 15
|
||||
},
|
||||
{
|
||||
"step": 3,
|
||||
"title": "Fermentaci\u00f3n en Bloque con Pliegues",
|
||||
"description": "Realizar 4 series de pliegues cada 30 minutos durante las primeras 2 horas. Luego dejar reposar 2 horas m\u00e1s.",
|
||||
"duration_minutes": 240
|
||||
},
|
||||
{
|
||||
"step": 4,
|
||||
"title": "Divisi\u00f3n y Preformado",
|
||||
"description": "Dividir en 4 piezas de 800g. Preformar en bolas y dejar reposar 30 minutos.",
|
||||
"duration_minutes": 30
|
||||
},
|
||||
{
|
||||
"step": 5,
|
||||
"title": "Formado Final",
|
||||
"description": "Formar las hogazas d\u00e1ndoles tensi\u00f3n superficial. Colocar en banneton o lienzo enharinado.",
|
||||
"duration_minutes": 15
|
||||
},
|
||||
{
|
||||
"step": 6,
|
||||
"title": "Fermentaci\u00f3n Final",
|
||||
"description": "Dejar fermentar a temperatura ambiente durante 2 horas o en fr\u00edo durante la noche.",
|
||||
"duration_minutes": 120
|
||||
},
|
||||
{
|
||||
"step": 7,
|
||||
"title": "Horneado",
|
||||
"description": "Hacer cortes en la superficie. Hornear a 230\u00b0C con vapor inicial durante 45 minutos.",
|
||||
"duration_minutes": 45
|
||||
}
|
||||
]
|
||||
},
|
||||
"preparation_notes": "La masa madre debe estar activa y en su punto \u00f3ptimo. La temperatura final de la masa debe ser 24-25\u00b0C.",
|
||||
"storage_instructions": "Se conserva hasta 5-7 d\u00edas en bolsa de papel. Mejora al segundo d\u00eda.",
|
||||
"serves_count": null,
|
||||
"nutritional_info": null,
|
||||
"allergen_info": null,
|
||||
"dietary_tags": null,
|
||||
"batch_size_multiplier": 1.0,
|
||||
"minimum_batch_size": null,
|
||||
"maximum_batch_size": null,
|
||||
"optimal_production_temperature": null,
|
||||
"optimal_humidity": null,
|
||||
"quality_check_configuration": null,
|
||||
"status": "ACTIVE",
|
||||
"is_seasonal": false,
|
||||
"season_start_month": null,
|
||||
"season_end_month": null,
|
||||
"is_signature_item": true,
|
||||
"created_at": "BASE_TS",
|
||||
"updated_at": "BASE_TS",
|
||||
"created_by": "c1a2b3c4-d5e6-47a8-b9c0-d1e2f3a4b5c6",
|
||||
"updated_by": "c1a2b3c4-d5e6-47a8-b9c0-d1e2f3a4b5c6",
|
||||
"enterprise_standard": true,
|
||||
"centrally_produced": true
|
||||
},
|
||||
{
|
||||
"id": "30000000-0000-0000-0000-000000000004",
|
||||
"tenant_id": "80000000-0000-4000-a000-000000000001",
|
||||
"name": "Napolitana de Chocolate",
|
||||
"recipe_code": null,
|
||||
"version": "1.0",
|
||||
"finished_product_id": "20000000-0000-0000-0000-000000000004",
|
||||
"description": "Boller\u00eda de hojaldre rectangular rellena de chocolate. Cl\u00e1sico de las panader\u00edas espa\u00f1olas.",
|
||||
"category": "Boller\u00eda",
|
||||
"cuisine_type": "Espa\u00f1ola",
|
||||
"difficulty_level": 3,
|
||||
"yield_quantity": 16.0,
|
||||
"yield_unit": "units",
|
||||
"prep_time_minutes": 40,
|
||||
"cook_time_minutes": 15,
|
||||
"total_time_minutes": 325,
|
||||
"rest_time_minutes": 270,
|
||||
"estimated_cost_per_unit": null,
|
||||
"last_calculated_cost": null,
|
||||
"cost_calculation_date": null,
|
||||
"target_margin_percentage": null,
|
||||
"suggested_selling_price": null,
|
||||
"instructions": {
|
||||
"steps": [
|
||||
{
|
||||
"step": 1,
|
||||
"title": "Masa Base y Laminado",
|
||||
"description": "Preparar masa de hojaldre siguiendo el mismo proceso que los croissants.",
|
||||
"duration_minutes": 180
|
||||
},
|
||||
{
|
||||
"step": 2,
|
||||
"title": "Corte y Formado",
|
||||
"description": "Extender la masa y cortar rect\u00e1ngulos de 10x15cm. Colocar barritas de chocolate en el centro.",
|
||||
"duration_minutes": 20
|
||||
},
|
||||
{
|
||||
"step": 3,
|
||||
"title": "Sellado",
|
||||
"description": "Doblar la masa sobre s\u00ed misma para cubrir el chocolate. Sellar bien los bordes.",
|
||||
"duration_minutes": 20
|
||||
},
|
||||
{
|
||||
"step": 4,
|
||||
"title": "Fermentaci\u00f3n",
|
||||
"description": "Dejar fermentar a 26\u00b0C durante 90 minutos.",
|
||||
"duration_minutes": 90
|
||||
},
|
||||
{
|
||||
"step": 5,
|
||||
"title": "Horneado",
|
||||
"description": "Pintar con huevo y hornear a 190\u00b0C durante 15 minutos.",
|
||||
"duration_minutes": 15
|
||||
}
|
||||
]
|
||||
},
|
||||
"preparation_notes": "El chocolate debe ser de buena calidad para un mejor resultado. No sobrecargar de chocolate.",
|
||||
"storage_instructions": "Consumir preferiblemente el d\u00eda de producci\u00f3n.",
|
||||
"serves_count": null,
|
||||
"nutritional_info": null,
|
||||
"allergen_info": null,
|
||||
"dietary_tags": null,
|
||||
"batch_size_multiplier": 1.0,
|
||||
"minimum_batch_size": null,
|
||||
"maximum_batch_size": null,
|
||||
"optimal_production_temperature": null,
|
||||
"optimal_humidity": null,
|
||||
"quality_check_configuration": null,
|
||||
"status": "ACTIVE",
|
||||
"is_seasonal": false,
|
||||
"season_start_month": null,
|
||||
"season_end_month": null,
|
||||
"is_signature_item": false,
|
||||
"created_at": "BASE_TS",
|
||||
"updated_at": "BASE_TS",
|
||||
"created_by": "c1a2b3c4-d5e6-47a8-b9c0-d1e2f3a4b5c6",
|
||||
"updated_by": "c1a2b3c4-d5e6-47a8-b9c0-d1e2f3a4b5c6",
|
||||
"enterprise_standard": true,
|
||||
"centrally_produced": true
|
||||
}
|
||||
],
|
||||
"recipe_ingredients": [
|
||||
{
|
||||
"id": "473debdb-ab7c-4a79-9b41-985715695710",
|
||||
"tenant_id": "80000000-0000-4000-a000-000000000001",
|
||||
"recipe_id": "30000000-0000-0000-0000-000000000001",
|
||||
"ingredient_id": "10000000-0000-0000-0000-000000000001",
|
||||
"quantity": 1000.0,
|
||||
"unit": "g",
|
||||
"quantity_in_base_unit": null,
|
||||
"alternative_quantity": null,
|
||||
"alternative_unit": null,
|
||||
"preparation_method": "tamizada",
|
||||
"ingredient_notes": null,
|
||||
"is_optional": false,
|
||||
"ingredient_order": 1,
|
||||
"ingredient_group": "Secos",
|
||||
"substitution_options": null,
|
||||
"substitution_ratio": null,
|
||||
"unit_cost": null,
|
||||
"total_cost": null,
|
||||
"cost_updated_at": null
|
||||
},
|
||||
{
|
||||
"id": "545c7899-d893-41f4-a839-963235f128cd",
|
||||
"tenant_id": "80000000-0000-4000-a000-000000000001",
|
||||
"recipe_id": "30000000-0000-0000-0000-000000000001",
|
||||
"ingredient_id": "10000000-0000-0000-0000-000000000033",
|
||||
"quantity": 650.0,
|
||||
"unit": "ml",
|
||||
"quantity_in_base_unit": null,
|
||||
"alternative_quantity": null,
|
||||
"alternative_unit": null,
|
||||
"preparation_method": "temperatura ambiente",
|
||||
"ingredient_notes": null,
|
||||
"is_optional": false,
|
||||
"ingredient_order": 2,
|
||||
"ingredient_group": "L\u00edquidos",
|
||||
"substitution_options": null,
|
||||
"substitution_ratio": null,
|
||||
"unit_cost": null,
|
||||
"total_cost": null,
|
||||
"cost_updated_at": null
|
||||
},
|
||||
{
|
||||
"id": "4e9ec9cc-6339-4191-bad5-c52b604106c9",
|
||||
"tenant_id": "80000000-0000-4000-a000-000000000001",
|
||||
"recipe_id": "30000000-0000-0000-0000-000000000001",
|
||||
"ingredient_id": "10000000-0000-0000-0000-000000000031",
|
||||
"quantity": 20.0,
|
||||
"unit": "g",
|
||||
"quantity_in_base_unit": null,
|
||||
"alternative_quantity": null,
|
||||
"alternative_unit": null,
|
||||
"preparation_method": null,
|
||||
"ingredient_notes": null,
|
||||
"is_optional": false,
|
||||
"ingredient_order": 3,
|
||||
"ingredient_group": "Secos",
|
||||
"substitution_options": null,
|
||||
"substitution_ratio": null,
|
||||
"unit_cost": null,
|
||||
"total_cost": null,
|
||||
"cost_updated_at": null
|
||||
},
|
||||
{
|
||||
"id": "f89b85f2-e18e-451a-8048-668bcfb6bc51",
|
||||
"tenant_id": "80000000-0000-4000-a000-000000000001",
|
||||
"recipe_id": "30000000-0000-0000-0000-000000000001",
|
||||
"ingredient_id": "10000000-0000-0000-0000-000000000021",
|
||||
"quantity": 15.0,
|
||||
"unit": "g",
|
||||
"quantity_in_base_unit": null,
|
||||
"alternative_quantity": null,
|
||||
"alternative_unit": null,
|
||||
"preparation_method": "desmenuzada",
|
||||
"ingredient_notes": null,
|
||||
"is_optional": false,
|
||||
"ingredient_order": 4,
|
||||
"ingredient_group": "Fermentos",
|
||||
"substitution_options": null,
|
||||
"substitution_ratio": null,
|
||||
"unit_cost": null,
|
||||
"total_cost": null,
|
||||
"cost_updated_at": null
|
||||
},
|
||||
{
|
||||
"id": "5e25c0c8-17b9-4db1-b099-8dc459def206",
|
||||
"tenant_id": "80000000-0000-4000-a000-000000000001",
|
||||
"recipe_id": "30000000-0000-0000-0000-000000000002",
|
||||
"ingredient_id": "10000000-0000-0000-0000-000000000001",
|
||||
"quantity": 500.0,
|
||||
"unit": "g",
|
||||
"quantity_in_base_unit": null,
|
||||
"alternative_quantity": null,
|
||||
"alternative_unit": null,
|
||||
"preparation_method": null,
|
||||
"ingredient_notes": null,
|
||||
"is_optional": false,
|
||||
"ingredient_order": 1,
|
||||
"ingredient_group": "Masa base",
|
||||
"substitution_options": null,
|
||||
"substitution_ratio": null,
|
||||
"unit_cost": null,
|
||||
"total_cost": null,
|
||||
"cost_updated_at": null
|
||||
},
|
||||
{
|
||||
"id": "89a9872d-4bf4-469f-8c84-37f7bf0c9a92",
|
||||
"tenant_id": "80000000-0000-4000-a000-000000000001",
|
||||
"recipe_id": "30000000-0000-0000-0000-000000000002",
|
||||
"ingredient_id": "10000000-0000-0000-0000-000000000012",
|
||||
"quantity": 120.0,
|
||||
"unit": "ml",
|
||||
"quantity_in_base_unit": null,
|
||||
"alternative_quantity": null,
|
||||
"alternative_unit": null,
|
||||
"preparation_method": "tibia",
|
||||
"ingredient_notes": null,
|
||||
"is_optional": false,
|
||||
"ingredient_order": 2,
|
||||
"ingredient_group": "Masa base",
|
||||
"substitution_options": null,
|
||||
"substitution_ratio": null,
|
||||
"unit_cost": null,
|
||||
"total_cost": null,
|
||||
"cost_updated_at": null
|
||||
},
|
||||
{
|
||||
"id": "1843a05b-d3dd-4963-afa1-1c76fcd6922f",
|
||||
"tenant_id": "80000000-0000-4000-a000-000000000001",
|
||||
"recipe_id": "30000000-0000-0000-0000-000000000002",
|
||||
"ingredient_id": "10000000-0000-0000-0000-000000000033",
|
||||
"quantity": 80.0,
|
||||
"unit": "ml",
|
||||
"quantity_in_base_unit": null,
|
||||
"alternative_quantity": null,
|
||||
"alternative_unit": null,
|
||||
"preparation_method": null,
|
||||
"ingredient_notes": null,
|
||||
"is_optional": false,
|
||||
"ingredient_order": 3,
|
||||
"ingredient_group": "Masa base",
|
||||
"substitution_options": null,
|
||||
"substitution_ratio": null,
|
||||
"unit_cost": null,
|
||||
"total_cost": null,
|
||||
"cost_updated_at": null
|
||||
},
|
||||
{
|
||||
"id": "af984d98-3b75-458f-8fdd-02699dc33e9d",
|
||||
"tenant_id": "80000000-0000-4000-a000-000000000001",
|
||||
"recipe_id": "30000000-0000-0000-0000-000000000002",
|
||||
"ingredient_id": "10000000-0000-0000-0000-000000000032",
|
||||
"quantity": 50.0,
|
||||
"unit": "g",
|
||||
"quantity_in_base_unit": null,
|
||||
"alternative_quantity": null,
|
||||
"alternative_unit": null,
|
||||
"preparation_method": null,
|
||||
"ingredient_notes": null,
|
||||
"is_optional": false,
|
||||
"ingredient_order": 4,
|
||||
"ingredient_group": "Masa base",
|
||||
"substitution_options": null,
|
||||
"substitution_ratio": null,
|
||||
"unit_cost": null,
|
||||
"total_cost": null,
|
||||
"cost_updated_at": null
|
||||
},
|
||||
{
|
||||
"id": "b09b738f-d24c-4dde-be76-6b88ea99511e",
|
||||
"tenant_id": "80000000-0000-4000-a000-000000000001",
|
||||
"recipe_id": "30000000-0000-0000-0000-000000000002",
|
||||
"ingredient_id": "10000000-0000-0000-0000-000000000031",
|
||||
"quantity": 10.0,
|
||||
"unit": "g",
|
||||
"quantity_in_base_unit": null,
|
||||
"alternative_quantity": null,
|
||||
"alternative_unit": null,
|
||||
"preparation_method": null,
|
||||
"ingredient_notes": null,
|
||||
"is_optional": false,
|
||||
"ingredient_order": 5,
|
||||
"ingredient_group": "Masa base",
|
||||
"substitution_options": null,
|
||||
"substitution_ratio": null,
|
||||
"unit_cost": null,
|
||||
"total_cost": null,
|
||||
"cost_updated_at": null
|
||||
},
|
||||
{
|
||||
"id": "119496cd-e7e3-40a4-b298-09a434b679fc",
|
||||
"tenant_id": "80000000-0000-4000-a000-000000000001",
|
||||
"recipe_id": "30000000-0000-0000-0000-000000000002",
|
||||
"ingredient_id": "10000000-0000-0000-0000-000000000021",
|
||||
"quantity": 20.0,
|
||||
"unit": "g",
|
||||
"quantity_in_base_unit": null,
|
||||
"alternative_quantity": null,
|
||||
"alternative_unit": null,
|
||||
"preparation_method": null,
|
||||
"ingredient_notes": null,
|
||||
"is_optional": false,
|
||||
"ingredient_order": 6,
|
||||
"ingredient_group": "Masa base",
|
||||
"substitution_options": null,
|
||||
"substitution_ratio": null,
|
||||
"unit_cost": null,
|
||||
"total_cost": null,
|
||||
"cost_updated_at": null
|
||||
},
|
||||
{
|
||||
"id": "c8fe9422-3000-42b4-a74a-cb00b6277130",
|
||||
"tenant_id": "80000000-0000-4000-a000-000000000001",
|
||||
"recipe_id": "30000000-0000-0000-0000-000000000002",
|
||||
"ingredient_id": "10000000-0000-0000-0000-000000000011",
|
||||
"quantity": 25.0,
|
||||
"unit": "g",
|
||||
"quantity_in_base_unit": null,
|
||||
"alternative_quantity": null,
|
||||
"alternative_unit": null,
|
||||
"preparation_method": "en la masa",
|
||||
"ingredient_notes": null,
|
||||
"is_optional": false,
|
||||
"ingredient_order": 7,
|
||||
"ingredient_group": "Masa base",
|
||||
"substitution_options": null,
|
||||
"substitution_ratio": null,
|
||||
"unit_cost": null,
|
||||
"total_cost": null,
|
||||
"cost_updated_at": null
|
||||
},
|
||||
{
|
||||
"id": "107a15bf-d2df-4e25-95fe-aee64febf112",
|
||||
"tenant_id": "80000000-0000-4000-a000-000000000001",
|
||||
"recipe_id": "30000000-0000-0000-0000-000000000002",
|
||||
"ingredient_id": "10000000-0000-0000-0000-000000000011",
|
||||
"quantity": 250.0,
|
||||
"unit": "g",
|
||||
"quantity_in_base_unit": null,
|
||||
"alternative_quantity": null,
|
||||
"alternative_unit": null,
|
||||
"preparation_method": "para laminar (15-16\u00b0C)",
|
||||
"ingredient_notes": null,
|
||||
"is_optional": false,
|
||||
"ingredient_order": 8,
|
||||
"ingredient_group": "Laminado",
|
||||
"substitution_options": null,
|
||||
"substitution_ratio": null,
|
||||
"unit_cost": null,
|
||||
"total_cost": null,
|
||||
"cost_updated_at": null
|
||||
},
|
||||
{
|
||||
"id": "242e8508-3adf-4b11-b482-33d740bd5397",
|
||||
"tenant_id": "80000000-0000-4000-a000-000000000001",
|
||||
"recipe_id": "30000000-0000-0000-0000-000000000003",
|
||||
"ingredient_id": "10000000-0000-0000-0000-000000000002",
|
||||
"quantity": 800.0,
|
||||
"unit": "g",
|
||||
"quantity_in_base_unit": null,
|
||||
"alternative_quantity": null,
|
||||
"alternative_unit": null,
|
||||
"preparation_method": null,
|
||||
"ingredient_notes": null,
|
||||
"is_optional": false,
|
||||
"ingredient_order": 1,
|
||||
"ingredient_group": "Harinas",
|
||||
"substitution_options": null,
|
||||
"substitution_ratio": null,
|
||||
"unit_cost": null,
|
||||
"total_cost": null,
|
||||
"cost_updated_at": null
|
||||
},
|
||||
{
|
||||
"id": "ea701cf5-0c6b-45aa-9519-e4dc42a40662",
|
||||
"tenant_id": "80000000-0000-4000-a000-000000000001",
|
||||
"recipe_id": "30000000-0000-0000-0000-000000000003",
|
||||
"ingredient_id": "10000000-0000-0000-0000-000000000004",
|
||||
"quantity": 200.0,
|
||||
"unit": "g",
|
||||
"quantity_in_base_unit": null,
|
||||
"alternative_quantity": null,
|
||||
"alternative_unit": null,
|
||||
"preparation_method": null,
|
||||
"ingredient_notes": null,
|
||||
"is_optional": false,
|
||||
"ingredient_order": 2,
|
||||
"ingredient_group": "Harinas",
|
||||
"substitution_options": null,
|
||||
"substitution_ratio": null,
|
||||
"unit_cost": null,
|
||||
"total_cost": null,
|
||||
"cost_updated_at": null
|
||||
},
|
||||
{
|
||||
"id": "3a4f0c61-8451-42fe-b3bc-4b0f4527af87",
|
||||
"tenant_id": "80000000-0000-4000-a000-000000000001",
|
||||
"recipe_id": "30000000-0000-0000-0000-000000000003",
|
||||
"ingredient_id": "10000000-0000-0000-0000-000000000023",
|
||||
"quantity": 300.0,
|
||||
"unit": "g",
|
||||
"quantity_in_base_unit": null,
|
||||
"alternative_quantity": null,
|
||||
"alternative_unit": null,
|
||||
"preparation_method": "activa y alimentada",
|
||||
"ingredient_notes": null,
|
||||
"is_optional": false,
|
||||
"ingredient_order": 3,
|
||||
"ingredient_group": "Fermentos",
|
||||
"substitution_options": null,
|
||||
"substitution_ratio": null,
|
||||
"unit_cost": null,
|
||||
"total_cost": null,
|
||||
"cost_updated_at": null
|
||||
},
|
||||
{
|
||||
"id": "ededf3a3-b58a-4f10-8d12-324aa3400349",
|
||||
"tenant_id": "80000000-0000-4000-a000-000000000001",
|
||||
"recipe_id": "30000000-0000-0000-0000-000000000003",
|
||||
"ingredient_id": "10000000-0000-0000-0000-000000000033",
|
||||
"quantity": 650.0,
|
||||
"unit": "ml",
|
||||
"quantity_in_base_unit": null,
|
||||
"alternative_quantity": null,
|
||||
"alternative_unit": null,
|
||||
"preparation_method": "temperatura ambiente",
|
||||
"ingredient_notes": null,
|
||||
"is_optional": false,
|
||||
"ingredient_order": 4,
|
||||
"ingredient_group": "L\u00edquidos",
|
||||
"substitution_options": null,
|
||||
"substitution_ratio": null,
|
||||
"unit_cost": null,
|
||||
"total_cost": null,
|
||||
"cost_updated_at": null
|
||||
},
|
||||
{
|
||||
"id": "93177be7-24e5-4e97-8d46-df373d6a04bc",
|
||||
"tenant_id": "80000000-0000-4000-a000-000000000001",
|
||||
"recipe_id": "30000000-0000-0000-0000-000000000003",
|
||||
"ingredient_id": "10000000-0000-0000-0000-000000000031",
|
||||
"quantity": 22.0,
|
||||
"unit": "g",
|
||||
"quantity_in_base_unit": null,
|
||||
"alternative_quantity": null,
|
||||
"alternative_unit": null,
|
||||
"preparation_method": null,
|
||||
"ingredient_notes": null,
|
||||
"is_optional": false,
|
||||
"ingredient_order": 5,
|
||||
"ingredient_group": "Condimentos",
|
||||
"substitution_options": null,
|
||||
"substitution_ratio": null,
|
||||
"unit_cost": null,
|
||||
"total_cost": null,
|
||||
"cost_updated_at": null
|
||||
},
|
||||
{
|
||||
"id": "c27a4fbb-d451-4a14-b0e2-09e5cbd07bad",
|
||||
"tenant_id": "80000000-0000-4000-a000-000000000001",
|
||||
"recipe_id": "30000000-0000-0000-0000-000000000004",
|
||||
"ingredient_id": "10000000-0000-0000-0000-000000000001",
|
||||
"quantity": 500.0,
|
||||
"unit": "g",
|
||||
"quantity_in_base_unit": null,
|
||||
"alternative_quantity": null,
|
||||
"alternative_unit": null,
|
||||
"preparation_method": null,
|
||||
"ingredient_notes": null,
|
||||
"is_optional": false,
|
||||
"ingredient_order": 1,
|
||||
"ingredient_group": "Masa",
|
||||
"substitution_options": null,
|
||||
"substitution_ratio": null,
|
||||
"unit_cost": null,
|
||||
"total_cost": null,
|
||||
"cost_updated_at": null
|
||||
},
|
||||
{
|
||||
"id": "dd8a0784-ead1-483b-b183-21c71b692a7d",
|
||||
"tenant_id": "80000000-0000-4000-a000-000000000001",
|
||||
"recipe_id": "30000000-0000-0000-0000-000000000004",
|
||||
"ingredient_id": "10000000-0000-0000-0000-000000000011",
|
||||
"quantity": 300.0,
|
||||
"unit": "g",
|
||||
"quantity_in_base_unit": null,
|
||||
"alternative_quantity": null,
|
||||
"alternative_unit": null,
|
||||
"preparation_method": null,
|
||||
"ingredient_notes": null,
|
||||
"is_optional": false,
|
||||
"ingredient_order": 2,
|
||||
"ingredient_group": "Laminado",
|
||||
"substitution_options": null,
|
||||
"substitution_ratio": null,
|
||||
"unit_cost": null,
|
||||
"total_cost": null,
|
||||
"cost_updated_at": null
|
||||
},
|
||||
{
|
||||
"id": "6434bf78-48e5-469d-a8d0-6f4dbe5c69ca",
|
||||
"tenant_id": "80000000-0000-4000-a000-000000000001",
|
||||
"recipe_id": "30000000-0000-0000-0000-000000000004",
|
||||
"ingredient_id": "10000000-0000-0000-0000-000000000041",
|
||||
"quantity": 200.0,
|
||||
"unit": "g",
|
||||
"quantity_in_base_unit": null,
|
||||
"alternative_quantity": null,
|
||||
"alternative_unit": null,
|
||||
"preparation_method": "en barritas",
|
||||
"ingredient_notes": null,
|
||||
"is_optional": false,
|
||||
"ingredient_order": 3,
|
||||
"ingredient_group": "Relleno",
|
||||
"substitution_options": null,
|
||||
"substitution_ratio": null,
|
||||
"unit_cost": null,
|
||||
"total_cost": null,
|
||||
"cost_updated_at": null
|
||||
},
|
||||
{
|
||||
"id": "38661b92-03e9-4dcd-ac90-86832eee9455",
|
||||
"tenant_id": "80000000-0000-4000-a000-000000000001",
|
||||
"recipe_id": "30000000-0000-0000-0000-000000000004",
|
||||
"ingredient_id": "10000000-0000-0000-0000-000000000032",
|
||||
"quantity": 60.0,
|
||||
"unit": "g",
|
||||
"quantity_in_base_unit": null,
|
||||
"alternative_quantity": null,
|
||||
"alternative_unit": null,
|
||||
"preparation_method": null,
|
||||
"ingredient_notes": null,
|
||||
"is_optional": false,
|
||||
"ingredient_order": 4,
|
||||
"ingredient_group": "Masa",
|
||||
"substitution_options": null,
|
||||
"substitution_ratio": null,
|
||||
"unit_cost": null,
|
||||
"total_cost": null,
|
||||
"cost_updated_at": null
|
||||
},
|
||||
{
|
||||
"id": "1ff5fb46-3361-4978-b248-a6b3bb6592f7",
|
||||
"tenant_id": "80000000-0000-4000-a000-000000000001",
|
||||
"recipe_id": "30000000-0000-0000-0000-000000000004",
|
||||
"ingredient_id": "10000000-0000-0000-0000-000000000031",
|
||||
"quantity": 10.0,
|
||||
"unit": "g",
|
||||
"quantity_in_base_unit": null,
|
||||
"alternative_quantity": null,
|
||||
"alternative_unit": null,
|
||||
"preparation_method": null,
|
||||
"ingredient_notes": null,
|
||||
"is_optional": false,
|
||||
"ingredient_order": 5,
|
||||
"ingredient_group": "Masa",
|
||||
"substitution_options": null,
|
||||
"substitution_ratio": null,
|
||||
"unit_cost": null,
|
||||
"total_cost": null,
|
||||
"cost_updated_at": null
|
||||
},
|
||||
{
|
||||
"id": "aa85612a-f99b-4c8b-a100-08ae4a9898a5",
|
||||
"tenant_id": "80000000-0000-4000-a000-000000000001",
|
||||
"recipe_id": "30000000-0000-0000-0000-000000000004",
|
||||
"ingredient_id": "10000000-0000-0000-0000-000000000021",
|
||||
"quantity": 15.0,
|
||||
"unit": "g",
|
||||
"quantity_in_base_unit": null,
|
||||
"alternative_quantity": null,
|
||||
"alternative_unit": null,
|
||||
"preparation_method": null,
|
||||
"ingredient_notes": null,
|
||||
"is_optional": false,
|
||||
"ingredient_order": 6,
|
||||
"ingredient_group": "Masa",
|
||||
"substitution_options": null,
|
||||
"substitution_ratio": null,
|
||||
"unit_cost": null,
|
||||
"total_cost": null,
|
||||
"cost_updated_at": null
|
||||
},
|
||||
{
|
||||
"id": "33c542c8-bf36-4041-957f-765bf28cc68a",
|
||||
"tenant_id": "80000000-0000-4000-a000-000000000001",
|
||||
"recipe_id": "30000000-0000-0000-0000-000000000004",
|
||||
"ingredient_id": "10000000-0000-0000-0000-000000000012",
|
||||
"quantity": 150.0,
|
||||
"unit": "ml",
|
||||
"quantity_in_base_unit": null,
|
||||
"alternative_quantity": null,
|
||||
"alternative_unit": null,
|
||||
"preparation_method": null,
|
||||
"ingredient_notes": null,
|
||||
"is_optional": false,
|
||||
"ingredient_order": 7,
|
||||
"ingredient_group": "Masa",
|
||||
"substitution_options": null,
|
||||
"substitution_ratio": null,
|
||||
"unit_cost": null,
|
||||
"total_cost": null,
|
||||
"cost_updated_at": null
|
||||
}
|
||||
]
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user