Improve the frontend 3
This commit is contained in:
@@ -504,6 +504,112 @@ class InventoryServiceClient(BaseServiceClient):
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
# ================================================================
|
||||
# BATCH OPERATIONS (NEW - for Orchestrator optimization)
|
||||
# ================================================================
|
||||
|
||||
async def get_ingredients_batch(
|
||||
self,
|
||||
tenant_id: str,
|
||||
ingredient_ids: List[UUID]
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Fetch multiple ingredients in a single request.
|
||||
|
||||
This method reduces N API calls to 1, significantly improving
|
||||
performance when fetching data for multiple ingredients.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
ingredient_ids: List of ingredient IDs to fetch
|
||||
|
||||
Returns:
|
||||
Dict with 'ingredients', 'found_count', and 'missing_ids'
|
||||
"""
|
||||
try:
|
||||
if not ingredient_ids:
|
||||
return {
|
||||
'ingredients': [],
|
||||
'found_count': 0,
|
||||
'missing_ids': []
|
||||
}
|
||||
|
||||
# Convert UUIDs to strings for JSON serialization
|
||||
ids_str = [str(id) for id in ingredient_ids]
|
||||
|
||||
result = await self.post(
|
||||
"inventory/operations/ingredients/batch",
|
||||
data={"ingredient_ids": ids_str},
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
|
||||
if result:
|
||||
logger.info(
|
||||
"Retrieved ingredients in batch",
|
||||
requested=len(ingredient_ids),
|
||||
found=result.get('found_count', 0),
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
|
||||
return result or {'ingredients': [], 'found_count': 0, 'missing_ids': ids_str}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error fetching ingredients in batch",
|
||||
error=str(e),
|
||||
count=len(ingredient_ids),
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
return {'ingredients': [], 'found_count': 0, 'missing_ids': [str(id) for id in ingredient_ids]}
|
||||
|
||||
async def get_stock_levels_batch(
|
||||
self,
|
||||
tenant_id: str,
|
||||
ingredient_ids: List[UUID]
|
||||
) -> Dict[str, float]:
|
||||
"""
|
||||
Fetch stock levels for multiple ingredients in a single request.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
ingredient_ids: List of ingredient IDs
|
||||
|
||||
Returns:
|
||||
Dict mapping ingredient_id (str) to stock level (float)
|
||||
"""
|
||||
try:
|
||||
if not ingredient_ids:
|
||||
return {}
|
||||
|
||||
# Convert UUIDs to strings for JSON serialization
|
||||
ids_str = [str(id) for id in ingredient_ids]
|
||||
|
||||
result = await self.post(
|
||||
"inventory/operations/stock-levels/batch",
|
||||
data={"ingredient_ids": ids_str},
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
|
||||
stock_levels = result.get('stock_levels', {}) if result else {}
|
||||
|
||||
logger.info(
|
||||
"Retrieved stock levels in batch",
|
||||
requested=len(ingredient_ids),
|
||||
found=len(stock_levels),
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
|
||||
return stock_levels
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error fetching stock levels in batch",
|
||||
error=str(e),
|
||||
count=len(ingredient_ids),
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
return {}
|
||||
|
||||
# ================================================================
|
||||
# UTILITY METHODS
|
||||
# ================================================================
|
||||
|
||||
486
shared/clients/procurement_client.py
Normal file
486
shared/clients/procurement_client.py
Normal file
@@ -0,0 +1,486 @@
|
||||
"""
|
||||
Procurement Service Client - ENHANCED VERSION
|
||||
Adds support for advanced replenishment planning endpoints
|
||||
|
||||
NEW METHODS:
|
||||
- generate_replenishment_plan()
|
||||
- get_replenishment_plan()
|
||||
- list_replenishment_plans()
|
||||
- get_inventory_projections()
|
||||
- calculate_safety_stock()
|
||||
- evaluate_supplier_selection()
|
||||
"""
|
||||
|
||||
import structlog
|
||||
from typing import Dict, Any, Optional, List
|
||||
from uuid import UUID
|
||||
from datetime import date
|
||||
from shared.clients.base_service_client import BaseServiceClient
|
||||
from shared.config.base import BaseServiceSettings
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class ProcurementServiceClient(BaseServiceClient):
|
||||
"""Enhanced client for communicating with the Procurement Service"""
|
||||
|
||||
def __init__(self, config: BaseServiceSettings):
|
||||
super().__init__("procurement", config)
|
||||
|
||||
def get_service_base_path(self) -> str:
|
||||
return "/api/v1"
|
||||
|
||||
# ================================================================
|
||||
# ORIGINAL PROCUREMENT PLANNING (Kept for backward compatibility)
|
||||
# ================================================================
|
||||
|
||||
async def auto_generate_procurement(
|
||||
self,
|
||||
tenant_id: str,
|
||||
forecast_data: Dict[str, Any],
|
||||
production_schedule_id: Optional[str] = None,
|
||||
target_date: Optional[str] = None,
|
||||
auto_create_pos: bool = False,
|
||||
auto_approve_pos: bool = False,
|
||||
inventory_data: Optional[Dict[str, Any]] = None,
|
||||
suppliers_data: Optional[Dict[str, Any]] = None,
|
||||
recipes_data: Optional[Dict[str, Any]] = None
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Auto-generate procurement plan from forecast data (called by orchestrator)
|
||||
|
||||
NOW USES ENHANCED PLANNING INTERNALLY
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
forecast_data: Forecast data
|
||||
production_schedule_id: Optional production schedule ID
|
||||
target_date: Optional target date
|
||||
auto_create_pos: Auto-create purchase orders
|
||||
auto_approve_pos: Auto-approve purchase orders
|
||||
inventory_data: Optional inventory snapshot (NEW - to avoid duplicate fetching)
|
||||
suppliers_data: Optional suppliers snapshot (NEW - to avoid duplicate fetching)
|
||||
recipes_data: Optional recipes snapshot (NEW - to avoid duplicate fetching)
|
||||
"""
|
||||
try:
|
||||
path = f"/tenants/{tenant_id}/procurement/auto-generate"
|
||||
payload = {
|
||||
"forecast_data": forecast_data,
|
||||
"production_schedule_id": production_schedule_id,
|
||||
"target_date": target_date,
|
||||
"auto_create_pos": auto_create_pos,
|
||||
"auto_approve_pos": auto_approve_pos
|
||||
}
|
||||
|
||||
# NEW: Include cached data if provided
|
||||
if inventory_data:
|
||||
payload["inventory_data"] = inventory_data
|
||||
if suppliers_data:
|
||||
payload["suppliers_data"] = suppliers_data
|
||||
if recipes_data:
|
||||
payload["recipes_data"] = recipes_data
|
||||
|
||||
logger.info("Calling auto_generate_procurement (enhanced)",
|
||||
tenant_id=tenant_id,
|
||||
has_forecast_data=bool(forecast_data))
|
||||
|
||||
response = await self._post(path, json=payload)
|
||||
return response
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error calling auto_generate_procurement",
|
||||
tenant_id=tenant_id, error=str(e))
|
||||
return None
|
||||
|
||||
# ================================================================
|
||||
# NEW: REPLENISHMENT PLANNING ENDPOINTS
|
||||
# ================================================================
|
||||
|
||||
async def generate_replenishment_plan(
|
||||
self,
|
||||
tenant_id: str,
|
||||
requirements: List[Dict[str, Any]],
|
||||
forecast_id: Optional[str] = None,
|
||||
production_schedule_id: Optional[str] = None,
|
||||
projection_horizon_days: int = 7,
|
||||
service_level: float = 0.95,
|
||||
buffer_days: int = 1
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Generate advanced replenishment plan with full planning algorithms
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
requirements: List of ingredient requirements
|
||||
forecast_id: Optional forecast ID reference
|
||||
production_schedule_id: Optional production schedule ID reference
|
||||
projection_horizon_days: Days to project ahead (default 7)
|
||||
service_level: Target service level for safety stock (default 0.95)
|
||||
buffer_days: Buffer days for lead time (default 1)
|
||||
|
||||
Returns:
|
||||
Dict with complete replenishment plan including:
|
||||
- plan_id: Plan ID
|
||||
- total_items: Total items in plan
|
||||
- urgent_items: Number of urgent items
|
||||
- high_risk_items: Number of high-risk items
|
||||
- items: List of plan items with full metadata
|
||||
"""
|
||||
try:
|
||||
path = f"/tenants/{tenant_id}/replenishment-plans/generate"
|
||||
payload = {
|
||||
"tenant_id": tenant_id,
|
||||
"requirements": requirements,
|
||||
"forecast_id": forecast_id,
|
||||
"production_schedule_id": production_schedule_id,
|
||||
"projection_horizon_days": projection_horizon_days,
|
||||
"service_level": service_level,
|
||||
"buffer_days": buffer_days
|
||||
}
|
||||
|
||||
logger.info("Generating replenishment plan",
|
||||
tenant_id=tenant_id,
|
||||
requirements_count=len(requirements))
|
||||
|
||||
response = await self._post(path, json=payload)
|
||||
return response
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error generating replenishment plan",
|
||||
tenant_id=tenant_id, error=str(e))
|
||||
return None
|
||||
|
||||
async def get_replenishment_plan(
|
||||
self,
|
||||
tenant_id: str,
|
||||
plan_id: str
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get replenishment plan by ID
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
plan_id: Plan ID
|
||||
|
||||
Returns:
|
||||
Dict with complete plan details
|
||||
"""
|
||||
try:
|
||||
path = f"/tenants/{tenant_id}/replenishment-plans/{plan_id}"
|
||||
|
||||
logger.debug("Getting replenishment plan",
|
||||
tenant_id=tenant_id, plan_id=plan_id)
|
||||
|
||||
response = await self._get(path)
|
||||
return response
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting replenishment plan",
|
||||
tenant_id=tenant_id, plan_id=plan_id, error=str(e))
|
||||
return None
|
||||
|
||||
async def list_replenishment_plans(
|
||||
self,
|
||||
tenant_id: str,
|
||||
skip: int = 0,
|
||||
limit: int = 100,
|
||||
status: Optional[str] = None
|
||||
) -> Optional[List[Dict[str, Any]]]:
|
||||
"""
|
||||
List replenishment plans for tenant
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
skip: Number of records to skip (pagination)
|
||||
limit: Maximum number of records to return
|
||||
status: Optional status filter
|
||||
|
||||
Returns:
|
||||
List of plan summaries
|
||||
"""
|
||||
try:
|
||||
path = f"/tenants/{tenant_id}/replenishment-plans"
|
||||
params = {"skip": skip, "limit": limit}
|
||||
if status:
|
||||
params["status"] = status
|
||||
|
||||
logger.debug("Listing replenishment plans",
|
||||
tenant_id=tenant_id, skip=skip, limit=limit)
|
||||
|
||||
response = await self._get(path, params=params)
|
||||
return response
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error listing replenishment plans",
|
||||
tenant_id=tenant_id, error=str(e))
|
||||
return None
|
||||
|
||||
# ================================================================
|
||||
# NEW: INVENTORY PROJECTION ENDPOINTS
|
||||
# ================================================================
|
||||
|
||||
async def project_inventory(
|
||||
self,
|
||||
tenant_id: str,
|
||||
ingredient_id: str,
|
||||
ingredient_name: str,
|
||||
current_stock: float,
|
||||
unit_of_measure: str,
|
||||
daily_demand: List[Dict[str, Any]],
|
||||
scheduled_receipts: List[Dict[str, Any]] = None,
|
||||
projection_horizon_days: int = 7
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Project inventory levels to identify future stockouts
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
ingredient_id: Ingredient ID
|
||||
ingredient_name: Ingredient name
|
||||
current_stock: Current stock level
|
||||
unit_of_measure: Unit of measure
|
||||
daily_demand: List of daily demand forecasts
|
||||
scheduled_receipts: List of scheduled receipts (POs, production)
|
||||
projection_horizon_days: Days to project
|
||||
|
||||
Returns:
|
||||
Dict with inventory projection including:
|
||||
- daily_projections: Day-by-day projection
|
||||
- stockout_days: Number of stockout days
|
||||
- stockout_risk: Risk level (low/medium/high/critical)
|
||||
"""
|
||||
try:
|
||||
path = f"/tenants/{tenant_id}/replenishment-plans/inventory-projections/project"
|
||||
payload = {
|
||||
"ingredient_id": ingredient_id,
|
||||
"ingredient_name": ingredient_name,
|
||||
"current_stock": current_stock,
|
||||
"unit_of_measure": unit_of_measure,
|
||||
"daily_demand": daily_demand,
|
||||
"scheduled_receipts": scheduled_receipts or [],
|
||||
"projection_horizon_days": projection_horizon_days
|
||||
}
|
||||
|
||||
logger.info("Projecting inventory",
|
||||
tenant_id=tenant_id, ingredient_id=ingredient_id)
|
||||
|
||||
response = await self._post(path, json=payload)
|
||||
return response
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error projecting inventory",
|
||||
tenant_id=tenant_id, error=str(e))
|
||||
return None
|
||||
|
||||
async def get_inventory_projections(
|
||||
self,
|
||||
tenant_id: str,
|
||||
ingredient_id: Optional[str] = None,
|
||||
projection_date: Optional[str] = None,
|
||||
stockout_only: bool = False,
|
||||
skip: int = 0,
|
||||
limit: int = 100
|
||||
) -> Optional[List[Dict[str, Any]]]:
|
||||
"""
|
||||
Get inventory projections
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
ingredient_id: Optional ingredient ID filter
|
||||
projection_date: Optional date filter
|
||||
stockout_only: Only return projections with stockouts
|
||||
skip: Pagination skip
|
||||
limit: Pagination limit
|
||||
|
||||
Returns:
|
||||
List of inventory projections
|
||||
"""
|
||||
try:
|
||||
path = f"/tenants/{tenant_id}/replenishment-plans/inventory-projections"
|
||||
params = {
|
||||
"skip": skip,
|
||||
"limit": limit,
|
||||
"stockout_only": stockout_only
|
||||
}
|
||||
if ingredient_id:
|
||||
params["ingredient_id"] = ingredient_id
|
||||
if projection_date:
|
||||
params["projection_date"] = projection_date
|
||||
|
||||
response = await self._get(path, params=params)
|
||||
return response
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting inventory projections",
|
||||
tenant_id=tenant_id, error=str(e))
|
||||
return None
|
||||
|
||||
# ================================================================
|
||||
# NEW: SAFETY STOCK CALCULATION
|
||||
# ================================================================
|
||||
|
||||
async def calculate_safety_stock(
|
||||
self,
|
||||
tenant_id: str,
|
||||
ingredient_id: str,
|
||||
daily_demands: List[float],
|
||||
lead_time_days: int,
|
||||
service_level: float = 0.95
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Calculate dynamic safety stock
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
ingredient_id: Ingredient ID
|
||||
daily_demands: Historical daily demands
|
||||
lead_time_days: Supplier lead time
|
||||
service_level: Target service level (0-1)
|
||||
|
||||
Returns:
|
||||
Dict with safety stock calculation including:
|
||||
- safety_stock_quantity: Calculated safety stock
|
||||
- calculation_method: Method used
|
||||
- confidence: Confidence level
|
||||
- reasoning: Explanation
|
||||
"""
|
||||
try:
|
||||
path = f"/tenants/{tenant_id}/replenishment-plans/safety-stock/calculate"
|
||||
payload = {
|
||||
"ingredient_id": ingredient_id,
|
||||
"daily_demands": daily_demands,
|
||||
"lead_time_days": lead_time_days,
|
||||
"service_level": service_level
|
||||
}
|
||||
|
||||
response = await self._post(path, json=payload)
|
||||
return response
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error calculating safety stock",
|
||||
tenant_id=tenant_id, error=str(e))
|
||||
return None
|
||||
|
||||
# ================================================================
|
||||
# NEW: SUPPLIER SELECTION
|
||||
# ================================================================
|
||||
|
||||
async def evaluate_supplier_selection(
|
||||
self,
|
||||
tenant_id: str,
|
||||
ingredient_id: str,
|
||||
ingredient_name: str,
|
||||
required_quantity: float,
|
||||
supplier_options: List[Dict[str, Any]]
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Evaluate supplier options using multi-criteria analysis
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
ingredient_id: Ingredient ID
|
||||
ingredient_name: Ingredient name
|
||||
required_quantity: Quantity needed
|
||||
supplier_options: List of supplier options with pricing, lead time, etc.
|
||||
|
||||
Returns:
|
||||
Dict with supplier selection result including:
|
||||
- allocations: List of supplier allocations
|
||||
- total_cost: Total cost
|
||||
- selection_strategy: Strategy used (single/dual/multi)
|
||||
- diversification_applied: Whether diversification was applied
|
||||
"""
|
||||
try:
|
||||
path = f"/tenants/{tenant_id}/replenishment-plans/supplier-selections/evaluate"
|
||||
payload = {
|
||||
"ingredient_id": ingredient_id,
|
||||
"ingredient_name": ingredient_name,
|
||||
"required_quantity": required_quantity,
|
||||
"supplier_options": supplier_options
|
||||
}
|
||||
|
||||
response = await self._post(path, json=payload)
|
||||
return response
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error evaluating supplier selection",
|
||||
tenant_id=tenant_id, error=str(e))
|
||||
return None
|
||||
|
||||
async def get_supplier_allocations(
|
||||
self,
|
||||
tenant_id: str,
|
||||
requirement_id: Optional[str] = None,
|
||||
supplier_id: Optional[str] = None,
|
||||
skip: int = 0,
|
||||
limit: int = 100
|
||||
) -> Optional[List[Dict[str, Any]]]:
|
||||
"""
|
||||
Get supplier allocations
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
requirement_id: Optional requirement ID filter
|
||||
supplier_id: Optional supplier ID filter
|
||||
skip: Pagination skip
|
||||
limit: Pagination limit
|
||||
|
||||
Returns:
|
||||
List of supplier allocations
|
||||
"""
|
||||
try:
|
||||
path = f"/tenants/{tenant_id}/replenishment-plans/supplier-allocations"
|
||||
params = {"skip": skip, "limit": limit}
|
||||
if requirement_id:
|
||||
params["requirement_id"] = requirement_id
|
||||
if supplier_id:
|
||||
params["supplier_id"] = supplier_id
|
||||
|
||||
response = await self._get(path, params=params)
|
||||
return response
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting supplier allocations",
|
||||
tenant_id=tenant_id, error=str(e))
|
||||
return None
|
||||
|
||||
# ================================================================
|
||||
# NEW: ANALYTICS
|
||||
# ================================================================
|
||||
|
||||
async def get_replenishment_analytics(
|
||||
self,
|
||||
tenant_id: str,
|
||||
start_date: Optional[str] = None,
|
||||
end_date: Optional[str] = None
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get replenishment planning analytics
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
start_date: Optional start date filter
|
||||
end_date: Optional end date filter
|
||||
|
||||
Returns:
|
||||
Dict with analytics including:
|
||||
- total_plans: Total plans created
|
||||
- total_items_planned: Total items
|
||||
- urgent_items_percentage: % of urgent items
|
||||
- stockout_prevention_rate: Effectiveness metric
|
||||
"""
|
||||
try:
|
||||
path = f"/tenants/{tenant_id}/replenishment-plans/analytics"
|
||||
params = {}
|
||||
if start_date:
|
||||
params["start_date"] = start_date
|
||||
if end_date:
|
||||
params["end_date"] = end_date
|
||||
|
||||
response = await self._get(path, params=params)
|
||||
return response
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting replenishment analytics",
|
||||
tenant_id=tenant_id, error=str(e))
|
||||
return None
|
||||
@@ -26,6 +26,66 @@ class ProductionServiceClient(BaseServiceClient):
|
||||
# PRODUCTION PLANNING
|
||||
# ================================================================
|
||||
|
||||
async def generate_schedule(
|
||||
self,
|
||||
tenant_id: str,
|
||||
forecast_data: Dict[str, Any],
|
||||
inventory_data: Optional[Dict[str, Any]] = None,
|
||||
recipes_data: Optional[Dict[str, Any]] = None,
|
||||
target_date: Optional[str] = None,
|
||||
planning_horizon_days: int = 1
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Generate production schedule (called by Orchestrator).
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
forecast_data: Forecast data from forecasting service
|
||||
inventory_data: Optional inventory snapshot (NEW - to avoid duplicate fetching)
|
||||
recipes_data: Optional recipes snapshot (NEW - to avoid duplicate fetching)
|
||||
target_date: Optional target date
|
||||
planning_horizon_days: Number of days to plan
|
||||
|
||||
Returns:
|
||||
Dict with schedule_id, batches_created, etc.
|
||||
"""
|
||||
try:
|
||||
request_data = {
|
||||
"forecast_data": forecast_data,
|
||||
"target_date": target_date,
|
||||
"planning_horizon_days": planning_horizon_days
|
||||
}
|
||||
|
||||
# NEW: Include cached data if provided
|
||||
if inventory_data:
|
||||
request_data["inventory_data"] = inventory_data
|
||||
if recipes_data:
|
||||
request_data["recipes_data"] = recipes_data
|
||||
|
||||
result = await self.post(
|
||||
"production/generate-schedule",
|
||||
data=request_data,
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
|
||||
if result:
|
||||
logger.info(
|
||||
"Generated production schedule",
|
||||
schedule_id=result.get('schedule_id'),
|
||||
batches_created=result.get('batches_created', 0),
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error generating production schedule",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
return None
|
||||
|
||||
async def get_production_requirements(self, tenant_id: str, date: Optional[str] = None) -> Optional[Dict[str, Any]]:
|
||||
"""Get production requirements for procurement planning"""
|
||||
try:
|
||||
|
||||
@@ -28,7 +28,7 @@ class SuppliersServiceClient(BaseServiceClient):
|
||||
async def get_supplier_by_id(self, tenant_id: str, supplier_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get supplier details by ID"""
|
||||
try:
|
||||
result = await self.get(f"suppliers/list/{supplier_id}", tenant_id=tenant_id)
|
||||
result = await self.get(f"suppliers/{supplier_id}", tenant_id=tenant_id)
|
||||
if result:
|
||||
logger.info("Retrieved supplier details from suppliers service",
|
||||
supplier_id=supplier_id, tenant_id=tenant_id)
|
||||
@@ -435,4 +435,4 @@ class SuppliersServiceClient(BaseServiceClient):
|
||||
# Factory function for dependency injection
|
||||
def create_suppliers_client(config: BaseServiceSettings) -> SuppliersServiceClient:
|
||||
"""Create suppliers service client instance"""
|
||||
return SuppliersServiceClient(config)
|
||||
return SuppliersServiceClient(config)
|
||||
|
||||
@@ -235,6 +235,7 @@ class BaseServiceSettings(BaseSettings):
|
||||
NOMINATIM_SERVICE_URL: str = os.getenv("NOMINATIM_SERVICE_URL", "http://nominatim:8080")
|
||||
DEMO_SESSION_SERVICE_URL: str = os.getenv("DEMO_SESSION_SERVICE_URL", "http://demo-session-service:8000")
|
||||
ALERT_PROCESSOR_SERVICE_URL: str = os.getenv("ALERT_PROCESSOR_SERVICE_URL", "http://alert-processor-api:8010")
|
||||
PROCUREMENT_SERVICE_URL: str = os.getenv("PROCUREMENT_SERVICE_URL", "http://procurement-service:8000")
|
||||
|
||||
# HTTP Client Settings
|
||||
HTTP_TIMEOUT: int = int(os.getenv("HTTP_TIMEOUT", "30"))
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
"""
|
||||
Enhanced Base Database Configuration for All Microservices
|
||||
Provides DatabaseManager with connection pooling, health checks, and multi-database support
|
||||
|
||||
Fixed: SSL configuration now uses connect_args instead of URL parameters to avoid asyncpg parameter parsing issues
|
||||
"""
|
||||
|
||||
import os
|
||||
import ssl
|
||||
from typing import Optional, Dict, Any, List
|
||||
from sqlalchemy import create_engine, text
|
||||
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine, async_sessionmaker
|
||||
@@ -43,14 +46,17 @@ class DatabaseManager:
|
||||
connect_timeout: int = 30,
|
||||
**engine_kwargs
|
||||
):
|
||||
# Add SSL parameters to database URL if PostgreSQL
|
||||
if "postgresql" in database_url.lower() and "ssl" not in database_url.lower():
|
||||
separator = "&" if "?" in database_url else "?"
|
||||
# asyncpg uses 'ssl=require' or 'ssl=verify-full', not 'sslmode'
|
||||
database_url = f"{database_url}{separator}ssl=require"
|
||||
logger.info(f"SSL enforcement added to database URL for {service_name}")
|
||||
|
||||
self.database_url = database_url
|
||||
|
||||
# Configure SSL for PostgreSQL via connect_args instead of URL parameters
|
||||
# This avoids asyncpg parameter parsing issues
|
||||
self.use_ssl = False
|
||||
if "postgresql" in database_url.lower():
|
||||
# Check if SSL is already configured in URL or should be enabled
|
||||
if "ssl" not in database_url.lower() and "sslmode" not in database_url.lower():
|
||||
# Enable SSL for production, but allow override via URL
|
||||
self.use_ssl = True
|
||||
logger.info(f"SSL will be enabled for PostgreSQL connection: {service_name}")
|
||||
self.service_name = service_name
|
||||
self.pool_size = pool_size
|
||||
self.max_overflow = max_overflow
|
||||
@@ -58,13 +64,27 @@ class DatabaseManager:
|
||||
# Configure pool for async engines
|
||||
# Note: SQLAlchemy 2.0 async engines automatically use AsyncAdaptedQueuePool
|
||||
# We should NOT specify poolclass for async engines unless using StaticPool for SQLite
|
||||
|
||||
# Prepare connect_args for asyncpg
|
||||
connect_args = {"timeout": connect_timeout}
|
||||
|
||||
# Add SSL configuration if needed (for asyncpg driver)
|
||||
if self.use_ssl and "asyncpg" in database_url.lower():
|
||||
# Create SSL context that doesn't verify certificates (for local development)
|
||||
# In production, you should use a proper SSL context with certificate verification
|
||||
ssl_context = ssl.create_default_context()
|
||||
ssl_context.check_hostname = False
|
||||
ssl_context.verify_mode = ssl.CERT_NONE
|
||||
connect_args["ssl"] = ssl_context
|
||||
logger.info(f"SSL enabled with relaxed verification for {service_name}")
|
||||
|
||||
engine_config = {
|
||||
"echo": echo,
|
||||
"pool_pre_ping": pool_pre_ping,
|
||||
"pool_recycle": pool_recycle,
|
||||
"pool_size": pool_size,
|
||||
"max_overflow": max_overflow,
|
||||
"connect_args": {"command_timeout": connect_timeout},
|
||||
"connect_args": connect_args,
|
||||
**engine_kwargs
|
||||
}
|
||||
|
||||
@@ -342,12 +362,16 @@ def init_legacy_compatibility(database_url: str):
|
||||
"""Initialize legacy global variables for backward compatibility"""
|
||||
global engine, AsyncSessionLocal
|
||||
|
||||
# Add SSL parameters to database URL if PostgreSQL
|
||||
if "postgresql" in database_url.lower() and "ssl" not in database_url.lower():
|
||||
separator = "&" if "?" in database_url else "?"
|
||||
# asyncpg uses 'ssl=require' or 'ssl=verify-full', not 'sslmode'
|
||||
database_url = f"{database_url}{separator}ssl=require"
|
||||
logger.info("SSL enforcement added to legacy database URL")
|
||||
# Configure SSL for PostgreSQL if needed
|
||||
connect_args = {}
|
||||
if "postgresql" in database_url.lower() and "asyncpg" in database_url.lower():
|
||||
if "ssl" not in database_url.lower() and "sslmode" not in database_url.lower():
|
||||
# Create SSL context that doesn't verify certificates (for local development)
|
||||
ssl_context = ssl.create_default_context()
|
||||
ssl_context.check_hostname = False
|
||||
ssl_context.verify_mode = ssl.CERT_NONE
|
||||
connect_args["ssl"] = ssl_context
|
||||
logger.info("SSL enabled with relaxed verification for legacy database connection")
|
||||
|
||||
engine = create_async_engine(
|
||||
database_url,
|
||||
@@ -355,7 +379,8 @@ def init_legacy_compatibility(database_url: str):
|
||||
pool_pre_ping=True,
|
||||
pool_recycle=300,
|
||||
pool_size=20,
|
||||
max_overflow=30
|
||||
max_overflow=30,
|
||||
connect_args=connect_args
|
||||
)
|
||||
|
||||
AsyncSessionLocal = async_sessionmaker(
|
||||
|
||||
168
shared/utils/circuit_breaker.py
Normal file
168
shared/utils/circuit_breaker.py
Normal file
@@ -0,0 +1,168 @@
|
||||
"""
|
||||
Circuit Breaker Pattern Implementation
|
||||
|
||||
Prevents cascading failures by stopping requests to failing services
|
||||
and allowing them time to recover.
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import time
|
||||
from enum import Enum
|
||||
from typing import Callable, Any, Optional
|
||||
from datetime import datetime, timedelta
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CircuitState(str, Enum):
|
||||
"""Circuit breaker states"""
|
||||
CLOSED = "closed" # Normal operation
|
||||
OPEN = "open" # Circuit is open, requests fail immediately
|
||||
HALF_OPEN = "half_open" # Testing if service has recovered
|
||||
|
||||
|
||||
class CircuitBreakerOpenError(Exception):
|
||||
"""Raised when circuit breaker is open"""
|
||||
pass
|
||||
|
||||
|
||||
class CircuitBreaker:
|
||||
"""
|
||||
Circuit Breaker implementation for protecting service calls.
|
||||
|
||||
States:
|
||||
- CLOSED: Normal operation, requests pass through
|
||||
- OPEN: Too many failures, requests fail immediately
|
||||
- HALF_OPEN: Testing recovery, limited requests allowed
|
||||
|
||||
Args:
|
||||
failure_threshold: Number of failures before opening circuit
|
||||
timeout_duration: Seconds to wait before attempting recovery
|
||||
success_threshold: Successful calls needed in HALF_OPEN to close circuit
|
||||
expected_exceptions: Tuple of exceptions that count as failures
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
failure_threshold: int = 5,
|
||||
timeout_duration: int = 60,
|
||||
success_threshold: int = 2,
|
||||
expected_exceptions: tuple = (Exception,)
|
||||
):
|
||||
self.failure_threshold = failure_threshold
|
||||
self.timeout_duration = timeout_duration
|
||||
self.success_threshold = success_threshold
|
||||
self.expected_exceptions = expected_exceptions
|
||||
|
||||
self._state = CircuitState.CLOSED
|
||||
self._failure_count = 0
|
||||
self._success_count = 0
|
||||
self._last_failure_time: Optional[datetime] = None
|
||||
self._next_attempt_time: Optional[datetime] = None
|
||||
|
||||
@property
|
||||
def state(self) -> CircuitState:
|
||||
"""Get current circuit state"""
|
||||
if self._state == CircuitState.OPEN and self._should_attempt_reset():
|
||||
self._state = CircuitState.HALF_OPEN
|
||||
self._success_count = 0
|
||||
logger.info(f"Circuit breaker entering HALF_OPEN state")
|
||||
return self._state
|
||||
|
||||
def _should_attempt_reset(self) -> bool:
|
||||
"""Check if enough time has passed to attempt reset"""
|
||||
if self._next_attempt_time is None:
|
||||
return False
|
||||
return datetime.now() >= self._next_attempt_time
|
||||
|
||||
async def call(self, func: Callable, *args, **kwargs) -> Any:
|
||||
"""
|
||||
Execute function with circuit breaker protection.
|
||||
|
||||
Args:
|
||||
func: Function to execute
|
||||
*args: Positional arguments for func
|
||||
**kwargs: Keyword arguments for func
|
||||
|
||||
Returns:
|
||||
Result of func execution
|
||||
|
||||
Raises:
|
||||
CircuitBreakerOpenError: If circuit is open
|
||||
Exception: Original exception from func if circuit is closed
|
||||
"""
|
||||
if self.state == CircuitState.OPEN:
|
||||
raise CircuitBreakerOpenError(
|
||||
f"Circuit breaker is OPEN. Next attempt at {self._next_attempt_time}"
|
||||
)
|
||||
|
||||
try:
|
||||
# Execute the function
|
||||
if asyncio.iscoroutinefunction(func):
|
||||
result = await func(*args, **kwargs)
|
||||
else:
|
||||
result = func(*args, **kwargs)
|
||||
|
||||
# Success
|
||||
self._on_success()
|
||||
return result
|
||||
|
||||
except self.expected_exceptions as e:
|
||||
# Expected failure
|
||||
self._on_failure()
|
||||
raise
|
||||
|
||||
def _on_success(self):
|
||||
"""Handle successful call"""
|
||||
if self._state == CircuitState.HALF_OPEN:
|
||||
self._success_count += 1
|
||||
if self._success_count >= self.success_threshold:
|
||||
self._close_circuit()
|
||||
else:
|
||||
# In CLOSED state, reset failure count on success
|
||||
self._failure_count = 0
|
||||
|
||||
def _on_failure(self):
|
||||
"""Handle failed call"""
|
||||
self._failure_count += 1
|
||||
self._last_failure_time = datetime.now()
|
||||
|
||||
if self._state == CircuitState.HALF_OPEN:
|
||||
# Failure in HALF_OPEN returns to OPEN
|
||||
self._open_circuit()
|
||||
elif self._failure_count >= self.failure_threshold:
|
||||
# Too many failures, open the circuit
|
||||
self._open_circuit()
|
||||
|
||||
def _open_circuit(self):
|
||||
"""Open the circuit"""
|
||||
self._state = CircuitState.OPEN
|
||||
self._next_attempt_time = datetime.now() + timedelta(seconds=self.timeout_duration)
|
||||
logger.warning(
|
||||
f"Circuit breaker opened after {self._failure_count} failures. "
|
||||
f"Next attempt at {self._next_attempt_time}"
|
||||
)
|
||||
|
||||
def _close_circuit(self):
|
||||
"""Close the circuit"""
|
||||
self._state = CircuitState.CLOSED
|
||||
self._failure_count = 0
|
||||
self._success_count = 0
|
||||
self._next_attempt_time = None
|
||||
logger.info(f"Circuit breaker closed after successful recovery")
|
||||
|
||||
def reset(self):
|
||||
"""Manually reset circuit breaker to CLOSED state"""
|
||||
self._close_circuit()
|
||||
logger.info(f"Circuit breaker manually reset")
|
||||
|
||||
def get_stats(self) -> dict:
|
||||
"""Get circuit breaker statistics"""
|
||||
return {
|
||||
"state": self.state.value,
|
||||
"failure_count": self._failure_count,
|
||||
"success_count": self._success_count,
|
||||
"last_failure_time": self._last_failure_time.isoformat() if self._last_failure_time else None,
|
||||
"next_attempt_time": self._next_attempt_time.isoformat() if self._next_attempt_time else None
|
||||
}
|
||||
438
shared/utils/optimization.py
Normal file
438
shared/utils/optimization.py
Normal file
@@ -0,0 +1,438 @@
|
||||
"""
|
||||
Optimization Utilities
|
||||
|
||||
Provides optimization algorithms for procurement planning including
|
||||
MOQ rounding, economic order quantity, and multi-objective optimization.
|
||||
"""
|
||||
|
||||
import math
|
||||
from decimal import Decimal
|
||||
from typing import List, Tuple, Dict, Optional
|
||||
from dataclasses import dataclass
|
||||
|
||||
|
||||
@dataclass
|
||||
class OrderOptimizationResult:
|
||||
"""Result of order quantity optimization"""
|
||||
optimal_quantity: Decimal
|
||||
order_cost: Decimal
|
||||
holding_cost: Decimal
|
||||
total_cost: Decimal
|
||||
orders_per_year: float
|
||||
reasoning: str
|
||||
|
||||
|
||||
def calculate_economic_order_quantity(
|
||||
annual_demand: float,
|
||||
ordering_cost: float,
|
||||
holding_cost_per_unit: float
|
||||
) -> float:
|
||||
"""
|
||||
Calculate Economic Order Quantity (EOQ).
|
||||
|
||||
EOQ = sqrt((2 × D × S) / H)
|
||||
where:
|
||||
- D = Annual demand
|
||||
- S = Ordering cost per order
|
||||
- H = Holding cost per unit per year
|
||||
|
||||
Args:
|
||||
annual_demand: Annual demand in units
|
||||
ordering_cost: Cost per order placement
|
||||
holding_cost_per_unit: Annual holding cost per unit
|
||||
|
||||
Returns:
|
||||
Optimal order quantity
|
||||
"""
|
||||
if annual_demand <= 0 or ordering_cost <= 0 or holding_cost_per_unit <= 0:
|
||||
return 0.0
|
||||
|
||||
eoq = math.sqrt((2 * annual_demand * ordering_cost) / holding_cost_per_unit)
|
||||
return eoq
|
||||
|
||||
|
||||
def optimize_order_quantity(
|
||||
required_quantity: Decimal,
|
||||
annual_demand: float,
|
||||
ordering_cost: float = 50.0,
|
||||
holding_cost_rate: float = 0.25,
|
||||
unit_price: float = 1.0,
|
||||
min_order_qty: Optional[Decimal] = None,
|
||||
max_order_qty: Optional[Decimal] = None
|
||||
) -> OrderOptimizationResult:
|
||||
"""
|
||||
Optimize order quantity considering EOQ and constraints.
|
||||
|
||||
Args:
|
||||
required_quantity: Quantity needed for current period
|
||||
annual_demand: Estimated annual demand
|
||||
ordering_cost: Fixed cost per order
|
||||
holding_cost_rate: Annual holding cost as % of unit price
|
||||
unit_price: Cost per unit
|
||||
min_order_qty: Minimum order quantity (MOQ)
|
||||
max_order_qty: Maximum order quantity (storage limit)
|
||||
|
||||
Returns:
|
||||
OrderOptimizationResult with optimal quantity and costs
|
||||
"""
|
||||
holding_cost_per_unit = unit_price * holding_cost_rate
|
||||
|
||||
# Calculate EOQ
|
||||
eoq = calculate_economic_order_quantity(
|
||||
annual_demand,
|
||||
ordering_cost,
|
||||
holding_cost_per_unit
|
||||
)
|
||||
|
||||
# Start with EOQ or required quantity, whichever is larger
|
||||
optimal_qty = max(float(required_quantity), eoq)
|
||||
|
||||
reasoning = f"Base EOQ: {eoq:.2f}, Required: {required_quantity}"
|
||||
|
||||
# Apply minimum order quantity
|
||||
if min_order_qty and Decimal(optimal_qty) < min_order_qty:
|
||||
optimal_qty = float(min_order_qty)
|
||||
reasoning += f", Applied MOQ: {min_order_qty}"
|
||||
|
||||
# Apply maximum order quantity
|
||||
if max_order_qty and Decimal(optimal_qty) > max_order_qty:
|
||||
optimal_qty = float(max_order_qty)
|
||||
reasoning += f", Capped at max: {max_order_qty}"
|
||||
|
||||
# Calculate costs
|
||||
orders_per_year = annual_demand / optimal_qty if optimal_qty > 0 else 0
|
||||
annual_ordering_cost = orders_per_year * ordering_cost
|
||||
annual_holding_cost = (optimal_qty / 2) * holding_cost_per_unit
|
||||
total_annual_cost = annual_ordering_cost + annual_holding_cost
|
||||
|
||||
return OrderOptimizationResult(
|
||||
optimal_quantity=Decimal(str(optimal_qty)),
|
||||
order_cost=Decimal(str(annual_ordering_cost)),
|
||||
holding_cost=Decimal(str(annual_holding_cost)),
|
||||
total_cost=Decimal(str(total_annual_cost)),
|
||||
orders_per_year=orders_per_year,
|
||||
reasoning=reasoning
|
||||
)
|
||||
|
||||
|
||||
def round_to_moq(
|
||||
quantity: Decimal,
|
||||
moq: Decimal,
|
||||
round_up: bool = True
|
||||
) -> Decimal:
|
||||
"""
|
||||
Round quantity to meet minimum order quantity.
|
||||
|
||||
Args:
|
||||
quantity: Desired quantity
|
||||
moq: Minimum order quantity
|
||||
round_up: If True, always round up to next MOQ multiple
|
||||
|
||||
Returns:
|
||||
Rounded quantity
|
||||
"""
|
||||
if quantity <= 0 or moq <= 0:
|
||||
return quantity
|
||||
|
||||
if quantity < moq:
|
||||
return moq
|
||||
|
||||
# Calculate how many MOQs needed
|
||||
multiples = quantity / moq
|
||||
|
||||
if round_up:
|
||||
return Decimal(math.ceil(float(multiples))) * moq
|
||||
else:
|
||||
return Decimal(round(float(multiples))) * moq
|
||||
|
||||
|
||||
def round_to_package_size(
|
||||
quantity: Decimal,
|
||||
package_size: Decimal,
|
||||
allow_partial: bool = False
|
||||
) -> Decimal:
|
||||
"""
|
||||
Round quantity to package size.
|
||||
|
||||
Args:
|
||||
quantity: Desired quantity
|
||||
package_size: Size of one package
|
||||
allow_partial: If False, always round up to full packages
|
||||
|
||||
Returns:
|
||||
Rounded quantity
|
||||
"""
|
||||
if quantity <= 0 or package_size <= 0:
|
||||
return quantity
|
||||
|
||||
packages_needed = quantity / package_size
|
||||
|
||||
if allow_partial:
|
||||
return quantity
|
||||
else:
|
||||
return Decimal(math.ceil(float(packages_needed))) * package_size
|
||||
|
||||
|
||||
def apply_price_tier_optimization(
|
||||
base_quantity: Decimal,
|
||||
unit_price: Decimal,
|
||||
price_tiers: List[Dict]
|
||||
) -> Tuple[Decimal, Decimal, str]:
|
||||
"""
|
||||
Optimize quantity to take advantage of price tiers.
|
||||
|
||||
Args:
|
||||
base_quantity: Base quantity needed
|
||||
unit_price: Current unit price
|
||||
price_tiers: List of dicts with 'min_quantity' and 'unit_price'
|
||||
|
||||
Returns:
|
||||
Tuple of (optimized_quantity, unit_price, reasoning)
|
||||
"""
|
||||
if not price_tiers:
|
||||
return base_quantity, unit_price, "No price tiers available"
|
||||
|
||||
# Sort tiers by min_quantity
|
||||
sorted_tiers = sorted(price_tiers, key=lambda x: x['min_quantity'])
|
||||
|
||||
# Calculate cost at base quantity
|
||||
base_cost = base_quantity * unit_price
|
||||
|
||||
# Find current tier
|
||||
current_tier_price = unit_price
|
||||
for tier in sorted_tiers:
|
||||
if base_quantity >= Decimal(str(tier['min_quantity'])):
|
||||
current_tier_price = Decimal(str(tier['unit_price']))
|
||||
|
||||
# Check if moving to next tier would save money
|
||||
best_quantity = base_quantity
|
||||
best_price = current_tier_price
|
||||
best_savings = Decimal('0')
|
||||
reasoning = f"Current tier price: ${current_tier_price}"
|
||||
|
||||
for tier in sorted_tiers:
|
||||
tier_min_qty = Decimal(str(tier['min_quantity']))
|
||||
tier_price = Decimal(str(tier['unit_price']))
|
||||
|
||||
if tier_min_qty > base_quantity:
|
||||
# Calculate cost at this tier
|
||||
tier_cost = tier_min_qty * tier_price
|
||||
|
||||
# Calculate savings
|
||||
savings = base_cost - tier_cost
|
||||
|
||||
if savings > best_savings:
|
||||
# Additional quantity needed
|
||||
additional_qty = tier_min_qty - base_quantity
|
||||
|
||||
# Check if savings justify additional inventory
|
||||
# Simple heuristic: savings should be > 10% of additional cost
|
||||
additional_cost = additional_qty * tier_price
|
||||
if savings > additional_cost * Decimal('0.1'):
|
||||
best_quantity = tier_min_qty
|
||||
best_price = tier_price
|
||||
best_savings = savings
|
||||
reasoning = f"Upgraded to tier {tier_min_qty}+ for ${savings:.2f} savings"
|
||||
|
||||
return best_quantity, best_price, reasoning
|
||||
|
||||
|
||||
def aggregate_requirements_for_moq(
|
||||
requirements: List[Dict],
|
||||
moq: Decimal
|
||||
) -> List[Dict]:
|
||||
"""
|
||||
Aggregate multiple requirements to meet MOQ efficiently.
|
||||
|
||||
Args:
|
||||
requirements: List of requirement dicts with 'quantity' and 'date'
|
||||
moq: Minimum order quantity
|
||||
|
||||
Returns:
|
||||
List of aggregated orders
|
||||
"""
|
||||
if not requirements:
|
||||
return []
|
||||
|
||||
# Sort requirements by date
|
||||
sorted_reqs = sorted(requirements, key=lambda x: x['date'])
|
||||
|
||||
orders = []
|
||||
current_batch = []
|
||||
current_total = Decimal('0')
|
||||
|
||||
for req in sorted_reqs:
|
||||
req_qty = Decimal(str(req['quantity']))
|
||||
|
||||
# Check if adding this requirement would exceed reasonable aggregation
|
||||
# (e.g., don't aggregate more than 30 days worth)
|
||||
if current_batch:
|
||||
days_span = (req['date'] - current_batch[0]['date']).days
|
||||
if days_span > 30:
|
||||
# Finalize current batch
|
||||
if current_total > 0:
|
||||
orders.append({
|
||||
'quantity': round_to_moq(current_total, moq),
|
||||
'date': current_batch[0]['date'],
|
||||
'requirements': current_batch.copy()
|
||||
})
|
||||
current_batch = []
|
||||
current_total = Decimal('0')
|
||||
|
||||
current_batch.append(req)
|
||||
current_total += req_qty
|
||||
|
||||
# If we've met MOQ, finalize this batch
|
||||
if current_total >= moq:
|
||||
orders.append({
|
||||
'quantity': round_to_moq(current_total, moq),
|
||||
'date': current_batch[0]['date'],
|
||||
'requirements': current_batch.copy()
|
||||
})
|
||||
current_batch = []
|
||||
current_total = Decimal('0')
|
||||
|
||||
# Handle remaining requirements
|
||||
if current_batch:
|
||||
orders.append({
|
||||
'quantity': round_to_moq(current_total, moq),
|
||||
'date': current_batch[0]['date'],
|
||||
'requirements': current_batch
|
||||
})
|
||||
|
||||
return orders
|
||||
|
||||
|
||||
def calculate_order_splitting(
|
||||
total_quantity: Decimal,
|
||||
suppliers: List[Dict],
|
||||
max_supplier_capacity: Optional[Decimal] = None
|
||||
) -> List[Dict]:
|
||||
"""
|
||||
Split large order across multiple suppliers.
|
||||
|
||||
Args:
|
||||
total_quantity: Total quantity needed
|
||||
suppliers: List of supplier dicts with 'id', 'capacity', 'reliability'
|
||||
max_supplier_capacity: Maximum any single supplier should provide
|
||||
|
||||
Returns:
|
||||
List of allocations with 'supplier_id' and 'quantity'
|
||||
"""
|
||||
if not suppliers:
|
||||
return []
|
||||
|
||||
# Sort suppliers by reliability (descending)
|
||||
sorted_suppliers = sorted(
|
||||
suppliers,
|
||||
key=lambda x: x.get('reliability', 0.5),
|
||||
reverse=True
|
||||
)
|
||||
|
||||
allocations = []
|
||||
remaining = total_quantity
|
||||
|
||||
for supplier in sorted_suppliers:
|
||||
if remaining <= 0:
|
||||
break
|
||||
|
||||
supplier_capacity = Decimal(str(supplier.get('capacity', float('inf'))))
|
||||
|
||||
# Apply max capacity constraint
|
||||
if max_supplier_capacity:
|
||||
supplier_capacity = min(supplier_capacity, max_supplier_capacity)
|
||||
|
||||
# Allocate to this supplier
|
||||
allocated = min(remaining, supplier_capacity)
|
||||
|
||||
allocations.append({
|
||||
'supplier_id': supplier['id'],
|
||||
'quantity': allocated,
|
||||
'reliability': supplier.get('reliability', 0.5)
|
||||
})
|
||||
|
||||
remaining -= allocated
|
||||
|
||||
# If still remaining, distribute across suppliers
|
||||
if remaining > 0:
|
||||
# Distribute remaining proportionally to reliability
|
||||
total_reliability = sum(s.get('reliability', 0.5) for s in sorted_suppliers)
|
||||
|
||||
for i, supplier in enumerate(sorted_suppliers):
|
||||
if total_reliability > 0:
|
||||
proportion = supplier.get('reliability', 0.5) / total_reliability
|
||||
additional = remaining * Decimal(str(proportion))
|
||||
|
||||
allocations[i]['quantity'] += additional
|
||||
|
||||
return allocations
|
||||
|
||||
|
||||
def calculate_buffer_stock(
|
||||
lead_time_days: int,
|
||||
daily_demand: float,
|
||||
demand_variability: float,
|
||||
service_level: float = 0.95
|
||||
) -> Decimal:
|
||||
"""
|
||||
Calculate buffer stock based on demand variability.
|
||||
|
||||
Buffer Stock = Z × σ × √(lead_time)
|
||||
where:
|
||||
- Z = service level z-score
|
||||
- σ = demand standard deviation
|
||||
- lead_time = lead time in days
|
||||
|
||||
Args:
|
||||
lead_time_days: Supplier lead time in days
|
||||
daily_demand: Average daily demand
|
||||
demand_variability: Coefficient of variation (CV = σ/μ)
|
||||
service_level: Target service level (0-1)
|
||||
|
||||
Returns:
|
||||
Buffer stock quantity
|
||||
"""
|
||||
if lead_time_days <= 0 or daily_demand <= 0:
|
||||
return Decimal('0')
|
||||
|
||||
# Z-scores for common service levels
|
||||
z_scores = {
|
||||
0.90: 1.28,
|
||||
0.95: 1.65,
|
||||
0.975: 1.96,
|
||||
0.99: 2.33,
|
||||
0.995: 2.58
|
||||
}
|
||||
|
||||
# Get z-score for service level
|
||||
z_score = z_scores.get(service_level, 1.65) # Default to 95%
|
||||
|
||||
# Calculate standard deviation
|
||||
stddev = daily_demand * demand_variability
|
||||
|
||||
# Buffer stock formula
|
||||
buffer = z_score * stddev * math.sqrt(lead_time_days)
|
||||
|
||||
return Decimal(str(buffer))
|
||||
|
||||
|
||||
def calculate_reorder_point(
|
||||
daily_demand: float,
|
||||
lead_time_days: int,
|
||||
safety_stock: Decimal
|
||||
) -> Decimal:
|
||||
"""
|
||||
Calculate reorder point.
|
||||
|
||||
Reorder Point = (Daily Demand × Lead Time) + Safety Stock
|
||||
|
||||
Args:
|
||||
daily_demand: Average daily demand
|
||||
lead_time_days: Supplier lead time in days
|
||||
safety_stock: Safety stock quantity
|
||||
|
||||
Returns:
|
||||
Reorder point
|
||||
"""
|
||||
lead_time_demand = Decimal(str(daily_demand * lead_time_days))
|
||||
return lead_time_demand + safety_stock
|
||||
293
shared/utils/saga_pattern.py
Normal file
293
shared/utils/saga_pattern.py
Normal file
@@ -0,0 +1,293 @@
|
||||
"""
|
||||
Saga Pattern Implementation
|
||||
|
||||
Provides distributed transaction coordination with compensation logic
|
||||
for microservices architecture.
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import uuid
|
||||
from typing import Callable, List, Dict, Any, Optional, Tuple
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
from enum import Enum
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SagaStepStatus(str, Enum):
|
||||
"""Status of a saga step"""
|
||||
PENDING = "pending"
|
||||
IN_PROGRESS = "in_progress"
|
||||
COMPLETED = "completed"
|
||||
FAILED = "failed"
|
||||
COMPENSATING = "compensating"
|
||||
COMPENSATED = "compensated"
|
||||
|
||||
|
||||
class SagaStatus(str, Enum):
|
||||
"""Overall saga status"""
|
||||
PENDING = "pending"
|
||||
IN_PROGRESS = "in_progress"
|
||||
COMPLETED = "completed"
|
||||
FAILED = "failed"
|
||||
COMPENSATING = "compensating"
|
||||
COMPENSATED = "compensated"
|
||||
|
||||
|
||||
@dataclass
|
||||
class SagaStep:
|
||||
"""
|
||||
A single step in a saga with compensation logic.
|
||||
|
||||
Args:
|
||||
name: Human-readable step name
|
||||
action: Async function to execute
|
||||
compensation: Async function to undo the action
|
||||
action_args: Arguments for the action function
|
||||
action_kwargs: Keyword arguments for the action function
|
||||
"""
|
||||
name: str
|
||||
action: Callable
|
||||
compensation: Optional[Callable] = None
|
||||
action_args: tuple = field(default_factory=tuple)
|
||||
action_kwargs: dict = field(default_factory=dict)
|
||||
|
||||
# Runtime state
|
||||
status: SagaStepStatus = SagaStepStatus.PENDING
|
||||
result: Any = None
|
||||
error: Optional[Exception] = None
|
||||
started_at: Optional[datetime] = None
|
||||
completed_at: Optional[datetime] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class SagaExecution:
|
||||
"""Tracks execution state of a saga"""
|
||||
saga_id: str
|
||||
status: SagaStatus = SagaStatus.PENDING
|
||||
steps: List[SagaStep] = field(default_factory=list)
|
||||
current_step: int = 0
|
||||
started_at: Optional[datetime] = None
|
||||
completed_at: Optional[datetime] = None
|
||||
error: Optional[Exception] = None
|
||||
|
||||
|
||||
class SagaCoordinator:
|
||||
"""
|
||||
Coordinates saga execution with automatic compensation on failure.
|
||||
|
||||
Example:
|
||||
```python
|
||||
saga = SagaCoordinator()
|
||||
|
||||
saga.add_step(
|
||||
"create_order",
|
||||
action=create_order,
|
||||
compensation=delete_order,
|
||||
action_args=(order_data,)
|
||||
)
|
||||
|
||||
saga.add_step(
|
||||
"reserve_inventory",
|
||||
action=reserve_inventory,
|
||||
compensation=release_inventory,
|
||||
action_args=(order_id, items)
|
||||
)
|
||||
|
||||
result = await saga.execute()
|
||||
```
|
||||
"""
|
||||
|
||||
def __init__(self, saga_id: Optional[str] = None):
|
||||
self.execution = SagaExecution(
|
||||
saga_id=saga_id or str(uuid.uuid4())
|
||||
)
|
||||
self._completed_steps: List[SagaStep] = []
|
||||
|
||||
def add_step(
|
||||
self,
|
||||
name: str,
|
||||
action: Callable,
|
||||
compensation: Optional[Callable] = None,
|
||||
action_args: tuple = (),
|
||||
action_kwargs: dict = None
|
||||
):
|
||||
"""
|
||||
Add a step to the saga.
|
||||
|
||||
Args:
|
||||
name: Human-readable step name
|
||||
action: Async function to execute
|
||||
compensation: Async function to undo the action (optional)
|
||||
action_args: Arguments for the action function
|
||||
action_kwargs: Keyword arguments for the action function
|
||||
"""
|
||||
step = SagaStep(
|
||||
name=name,
|
||||
action=action,
|
||||
compensation=compensation,
|
||||
action_args=action_args,
|
||||
action_kwargs=action_kwargs or {}
|
||||
)
|
||||
self.execution.steps.append(step)
|
||||
logger.debug(f"Added step '{name}' to saga {self.execution.saga_id}")
|
||||
|
||||
async def execute(self) -> Tuple[bool, Optional[Any], Optional[Exception]]:
|
||||
"""
|
||||
Execute all saga steps in sequence.
|
||||
|
||||
Returns:
|
||||
Tuple of (success: bool, final_result: Any, error: Optional[Exception])
|
||||
"""
|
||||
self.execution.status = SagaStatus.IN_PROGRESS
|
||||
self.execution.started_at = datetime.now()
|
||||
|
||||
logger.info(
|
||||
f"Starting saga {self.execution.saga_id} with {len(self.execution.steps)} steps"
|
||||
)
|
||||
|
||||
try:
|
||||
# Execute each step
|
||||
for idx, step in enumerate(self.execution.steps):
|
||||
self.execution.current_step = idx
|
||||
|
||||
success = await self._execute_step(step)
|
||||
|
||||
if not success:
|
||||
# Step failed, trigger compensation
|
||||
logger.error(
|
||||
f"Saga {self.execution.saga_id} failed at step '{step.name}': {step.error}"
|
||||
)
|
||||
await self._compensate()
|
||||
|
||||
self.execution.status = SagaStatus.COMPENSATED
|
||||
self.execution.completed_at = datetime.now()
|
||||
self.execution.error = step.error
|
||||
|
||||
return False, None, step.error
|
||||
|
||||
# Step succeeded
|
||||
self._completed_steps.append(step)
|
||||
|
||||
# All steps completed successfully
|
||||
self.execution.status = SagaStatus.COMPLETED
|
||||
self.execution.completed_at = datetime.now()
|
||||
|
||||
# Return the result of the last step
|
||||
final_result = self.execution.steps[-1].result if self.execution.steps else None
|
||||
|
||||
logger.info(f"Saga {self.execution.saga_id} completed successfully")
|
||||
return True, final_result, None
|
||||
|
||||
except Exception as e:
|
||||
logger.exception(f"Unexpected error in saga {self.execution.saga_id}: {e}")
|
||||
await self._compensate()
|
||||
|
||||
self.execution.status = SagaStatus.FAILED
|
||||
self.execution.completed_at = datetime.now()
|
||||
self.execution.error = e
|
||||
|
||||
return False, None, e
|
||||
|
||||
async def _execute_step(self, step: SagaStep) -> bool:
|
||||
"""
|
||||
Execute a single saga step.
|
||||
|
||||
Returns:
|
||||
True if step succeeded, False otherwise
|
||||
"""
|
||||
step.status = SagaStepStatus.IN_PROGRESS
|
||||
step.started_at = datetime.now()
|
||||
|
||||
logger.info(f"Executing saga step '{step.name}'")
|
||||
|
||||
try:
|
||||
# Execute the action
|
||||
if asyncio.iscoroutinefunction(step.action):
|
||||
result = await step.action(*step.action_args, **step.action_kwargs)
|
||||
else:
|
||||
result = step.action(*step.action_args, **step.action_kwargs)
|
||||
|
||||
step.result = result
|
||||
step.status = SagaStepStatus.COMPLETED
|
||||
step.completed_at = datetime.now()
|
||||
|
||||
logger.info(f"Saga step '{step.name}' completed successfully")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
step.error = e
|
||||
step.status = SagaStepStatus.FAILED
|
||||
step.completed_at = datetime.now()
|
||||
|
||||
logger.error(f"Saga step '{step.name}' failed: {e}")
|
||||
return False
|
||||
|
||||
async def _compensate(self):
|
||||
"""
|
||||
Execute compensation logic for all completed steps in reverse order.
|
||||
"""
|
||||
if not self._completed_steps:
|
||||
logger.info(f"No steps to compensate for saga {self.execution.saga_id}")
|
||||
return
|
||||
|
||||
self.execution.status = SagaStatus.COMPENSATING
|
||||
|
||||
logger.info(
|
||||
f"Starting compensation for saga {self.execution.saga_id} "
|
||||
f"({len(self._completed_steps)} steps to compensate)"
|
||||
)
|
||||
|
||||
# Compensate in reverse order
|
||||
for step in reversed(self._completed_steps):
|
||||
if step.compensation is None:
|
||||
logger.warning(
|
||||
f"Step '{step.name}' has no compensation function, skipping"
|
||||
)
|
||||
continue
|
||||
|
||||
step.status = SagaStepStatus.COMPENSATING
|
||||
|
||||
try:
|
||||
logger.info(f"Compensating step '{step.name}'")
|
||||
|
||||
# Execute compensation with the result from the original action
|
||||
compensation_args = (step.result,) if step.result is not None else ()
|
||||
|
||||
if asyncio.iscoroutinefunction(step.compensation):
|
||||
await step.compensation(*compensation_args)
|
||||
else:
|
||||
step.compensation(*compensation_args)
|
||||
|
||||
step.status = SagaStepStatus.COMPENSATED
|
||||
logger.info(f"Step '{step.name}' compensated successfully")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to compensate step '{step.name}': {e}")
|
||||
# Continue compensating other steps even if one fails
|
||||
|
||||
logger.info(f"Compensation completed for saga {self.execution.saga_id}")
|
||||
|
||||
def get_execution_summary(self) -> Dict[str, Any]:
|
||||
"""Get summary of saga execution"""
|
||||
return {
|
||||
"saga_id": self.execution.saga_id,
|
||||
"status": self.execution.status.value,
|
||||
"total_steps": len(self.execution.steps),
|
||||
"current_step": self.execution.current_step,
|
||||
"completed_steps": len(self._completed_steps),
|
||||
"started_at": self.execution.started_at.isoformat() if self.execution.started_at else None,
|
||||
"completed_at": self.execution.completed_at.isoformat() if self.execution.completed_at else None,
|
||||
"error": str(self.execution.error) if self.execution.error else None,
|
||||
"steps": [
|
||||
{
|
||||
"name": step.name,
|
||||
"status": step.status.value,
|
||||
"has_compensation": step.compensation is not None,
|
||||
"error": str(step.error) if step.error else None
|
||||
}
|
||||
for step in self.execution.steps
|
||||
]
|
||||
}
|
||||
536
shared/utils/time_series_utils.py
Normal file
536
shared/utils/time_series_utils.py
Normal file
@@ -0,0 +1,536 @@
|
||||
"""
|
||||
Time Series Utilities
|
||||
|
||||
Provides utilities for time-series analysis, projection, and calculations
|
||||
used in forecasting and inventory planning.
|
||||
"""
|
||||
|
||||
import statistics
|
||||
from datetime import date, datetime, timedelta
|
||||
from typing import List, Dict, Tuple, Optional
|
||||
from decimal import Decimal
|
||||
import math
|
||||
|
||||
|
||||
def generate_date_range(
|
||||
start_date: date,
|
||||
end_date: date,
|
||||
include_end: bool = True
|
||||
) -> List[date]:
|
||||
"""
|
||||
Generate a list of dates between start and end.
|
||||
|
||||
Args:
|
||||
start_date: Start date (inclusive)
|
||||
end_date: End date
|
||||
include_end: Whether to include end date
|
||||
|
||||
Returns:
|
||||
List of dates
|
||||
"""
|
||||
dates = []
|
||||
current = start_date
|
||||
|
||||
while current < end_date or (include_end and current == end_date):
|
||||
dates.append(current)
|
||||
current += timedelta(days=1)
|
||||
|
||||
return dates
|
||||
|
||||
|
||||
def generate_future_dates(
|
||||
start_date: date,
|
||||
num_days: int
|
||||
) -> List[date]:
|
||||
"""
|
||||
Generate a list of future dates starting from start_date.
|
||||
|
||||
Args:
|
||||
start_date: Starting date
|
||||
num_days: Number of days to generate
|
||||
|
||||
Returns:
|
||||
List of dates
|
||||
"""
|
||||
return [start_date + timedelta(days=i) for i in range(num_days)]
|
||||
|
||||
|
||||
def calculate_moving_average(
|
||||
values: List[float],
|
||||
window_size: int
|
||||
) -> List[float]:
|
||||
"""
|
||||
Calculate moving average over a window.
|
||||
|
||||
Args:
|
||||
values: List of values
|
||||
window_size: Size of moving window
|
||||
|
||||
Returns:
|
||||
List of moving averages
|
||||
"""
|
||||
if len(values) < window_size:
|
||||
return []
|
||||
|
||||
moving_averages = []
|
||||
for i in range(len(values) - window_size + 1):
|
||||
window = values[i:i + window_size]
|
||||
moving_averages.append(sum(window) / window_size)
|
||||
|
||||
return moving_averages
|
||||
|
||||
|
||||
def calculate_standard_deviation(values: List[float]) -> float:
|
||||
"""
|
||||
Calculate standard deviation of values.
|
||||
|
||||
Args:
|
||||
values: List of values
|
||||
|
||||
Returns:
|
||||
Standard deviation
|
||||
"""
|
||||
if len(values) < 2:
|
||||
return 0.0
|
||||
|
||||
return statistics.stdev(values)
|
||||
|
||||
|
||||
def calculate_variance(values: List[float]) -> float:
|
||||
"""
|
||||
Calculate variance of values.
|
||||
|
||||
Args:
|
||||
values: List of values
|
||||
|
||||
Returns:
|
||||
Variance
|
||||
"""
|
||||
if len(values) < 2:
|
||||
return 0.0
|
||||
|
||||
return statistics.variance(values)
|
||||
|
||||
|
||||
def calculate_mean(values: List[float]) -> float:
|
||||
"""
|
||||
Calculate mean of values.
|
||||
|
||||
Args:
|
||||
values: List of values
|
||||
|
||||
Returns:
|
||||
Mean
|
||||
"""
|
||||
if not values:
|
||||
return 0.0
|
||||
|
||||
return statistics.mean(values)
|
||||
|
||||
|
||||
def calculate_median(values: List[float]) -> float:
|
||||
"""
|
||||
Calculate median of values.
|
||||
|
||||
Args:
|
||||
values: List of values
|
||||
|
||||
Returns:
|
||||
Median
|
||||
"""
|
||||
if not values:
|
||||
return 0.0
|
||||
|
||||
return statistics.median(values)
|
||||
|
||||
|
||||
def calculate_percentile(values: List[float], percentile: float) -> float:
|
||||
"""
|
||||
Calculate percentile of values.
|
||||
|
||||
Args:
|
||||
values: List of values
|
||||
percentile: Percentile to calculate (0-100)
|
||||
|
||||
Returns:
|
||||
Percentile value
|
||||
"""
|
||||
if not values:
|
||||
return 0.0
|
||||
|
||||
sorted_values = sorted(values)
|
||||
k = (len(sorted_values) - 1) * percentile / 100
|
||||
f = math.floor(k)
|
||||
c = math.ceil(k)
|
||||
|
||||
if f == c:
|
||||
return sorted_values[int(k)]
|
||||
|
||||
d0 = sorted_values[int(f)] * (c - k)
|
||||
d1 = sorted_values[int(c)] * (k - f)
|
||||
return d0 + d1
|
||||
|
||||
|
||||
def calculate_coefficient_of_variation(values: List[float]) -> float:
|
||||
"""
|
||||
Calculate coefficient of variation (CV = stddev / mean).
|
||||
|
||||
Args:
|
||||
values: List of values
|
||||
|
||||
Returns:
|
||||
Coefficient of variation
|
||||
"""
|
||||
if not values:
|
||||
return 0.0
|
||||
|
||||
mean = calculate_mean(values)
|
||||
if mean == 0:
|
||||
return 0.0
|
||||
|
||||
stddev = calculate_standard_deviation(values)
|
||||
return stddev / mean
|
||||
|
||||
|
||||
def aggregate_by_date(
|
||||
data: List[Tuple[date, float]],
|
||||
aggregation: str = "sum"
|
||||
) -> Dict[date, float]:
|
||||
"""
|
||||
Aggregate time-series data by date.
|
||||
|
||||
Args:
|
||||
data: List of (date, value) tuples
|
||||
aggregation: Aggregation method ('sum', 'mean', 'max', 'min')
|
||||
|
||||
Returns:
|
||||
Dictionary mapping date to aggregated value
|
||||
"""
|
||||
by_date: Dict[date, List[float]] = {}
|
||||
|
||||
for dt, value in data:
|
||||
if dt not in by_date:
|
||||
by_date[dt] = []
|
||||
by_date[dt].append(value)
|
||||
|
||||
result = {}
|
||||
for dt, values in by_date.items():
|
||||
if aggregation == "sum":
|
||||
result[dt] = sum(values)
|
||||
elif aggregation == "mean":
|
||||
result[dt] = calculate_mean(values)
|
||||
elif aggregation == "max":
|
||||
result[dt] = max(values)
|
||||
elif aggregation == "min":
|
||||
result[dt] = min(values)
|
||||
else:
|
||||
result[dt] = sum(values)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def fill_missing_dates(
|
||||
data: Dict[date, float],
|
||||
start_date: date,
|
||||
end_date: date,
|
||||
fill_value: float = 0.0
|
||||
) -> Dict[date, float]:
|
||||
"""
|
||||
Fill missing dates in time-series data.
|
||||
|
||||
Args:
|
||||
data: Dictionary mapping date to value
|
||||
start_date: Start date
|
||||
end_date: End date
|
||||
fill_value: Value to use for missing dates
|
||||
|
||||
Returns:
|
||||
Dictionary with all dates filled
|
||||
"""
|
||||
date_range = generate_date_range(start_date, end_date)
|
||||
filled_data = {}
|
||||
|
||||
for dt in date_range:
|
||||
filled_data[dt] = data.get(dt, fill_value)
|
||||
|
||||
return filled_data
|
||||
|
||||
|
||||
def calculate_trend(
|
||||
values: List[float]
|
||||
) -> Tuple[float, float]:
|
||||
"""
|
||||
Calculate linear trend (slope and intercept) using least squares.
|
||||
|
||||
Args:
|
||||
values: List of values
|
||||
|
||||
Returns:
|
||||
Tuple of (slope, intercept)
|
||||
"""
|
||||
if len(values) < 2:
|
||||
return 0.0, values[0] if values else 0.0
|
||||
|
||||
n = len(values)
|
||||
x = list(range(n))
|
||||
y = values
|
||||
|
||||
# Calculate means
|
||||
x_mean = sum(x) / n
|
||||
y_mean = sum(y) / n
|
||||
|
||||
# Calculate slope
|
||||
numerator = sum((x[i] - x_mean) * (y[i] - y_mean) for i in range(n))
|
||||
denominator = sum((x[i] - x_mean) ** 2 for i in range(n))
|
||||
|
||||
if denominator == 0:
|
||||
return 0.0, y_mean
|
||||
|
||||
slope = numerator / denominator
|
||||
intercept = y_mean - slope * x_mean
|
||||
|
||||
return slope, intercept
|
||||
|
||||
|
||||
def project_value(
|
||||
historical_values: List[float],
|
||||
periods_ahead: int,
|
||||
method: str = "mean"
|
||||
) -> List[float]:
|
||||
"""
|
||||
Project future values based on historical data.
|
||||
|
||||
Args:
|
||||
historical_values: Historical values
|
||||
periods_ahead: Number of periods to project
|
||||
method: Projection method ('mean', 'trend', 'last')
|
||||
|
||||
Returns:
|
||||
List of projected values
|
||||
"""
|
||||
if not historical_values:
|
||||
return [0.0] * periods_ahead
|
||||
|
||||
if method == "mean":
|
||||
# Use historical mean
|
||||
projected_value = calculate_mean(historical_values)
|
||||
return [projected_value] * periods_ahead
|
||||
|
||||
elif method == "last":
|
||||
# Use last value
|
||||
return [historical_values[-1]] * periods_ahead
|
||||
|
||||
elif method == "trend":
|
||||
# Use trend projection
|
||||
slope, intercept = calculate_trend(historical_values)
|
||||
n = len(historical_values)
|
||||
return [slope * (n + i) + intercept for i in range(periods_ahead)]
|
||||
|
||||
else:
|
||||
# Default to mean
|
||||
projected_value = calculate_mean(historical_values)
|
||||
return [projected_value] * periods_ahead
|
||||
|
||||
|
||||
def calculate_cumulative_sum(values: List[float]) -> List[float]:
|
||||
"""
|
||||
Calculate cumulative sum of values.
|
||||
|
||||
Args:
|
||||
values: List of values
|
||||
|
||||
Returns:
|
||||
List of cumulative sums
|
||||
"""
|
||||
cumulative = []
|
||||
total = 0.0
|
||||
|
||||
for value in values:
|
||||
total += value
|
||||
cumulative.append(total)
|
||||
|
||||
return cumulative
|
||||
|
||||
|
||||
def calculate_rolling_sum(
|
||||
values: List[float],
|
||||
window_size: int
|
||||
) -> List[float]:
|
||||
"""
|
||||
Calculate rolling sum over a window.
|
||||
|
||||
Args:
|
||||
values: List of values
|
||||
window_size: Size of rolling window
|
||||
|
||||
Returns:
|
||||
List of rolling sums
|
||||
"""
|
||||
if len(values) < window_size:
|
||||
return []
|
||||
|
||||
rolling_sums = []
|
||||
for i in range(len(values) - window_size + 1):
|
||||
window = values[i:i + window_size]
|
||||
rolling_sums.append(sum(window))
|
||||
|
||||
return rolling_sums
|
||||
|
||||
|
||||
def normalize_values(
|
||||
values: List[float],
|
||||
method: str = "minmax"
|
||||
) -> List[float]:
|
||||
"""
|
||||
Normalize values to a standard range.
|
||||
|
||||
Args:
|
||||
values: List of values
|
||||
method: Normalization method ('minmax' or 'zscore')
|
||||
|
||||
Returns:
|
||||
List of normalized values
|
||||
"""
|
||||
if not values:
|
||||
return []
|
||||
|
||||
if method == "minmax":
|
||||
# Scale to [0, 1]
|
||||
min_val = min(values)
|
||||
max_val = max(values)
|
||||
|
||||
if max_val == min_val:
|
||||
return [0.5] * len(values)
|
||||
|
||||
return [(v - min_val) / (max_val - min_val) for v in values]
|
||||
|
||||
elif method == "zscore":
|
||||
# Z-score normalization
|
||||
mean = calculate_mean(values)
|
||||
stddev = calculate_standard_deviation(values)
|
||||
|
||||
if stddev == 0:
|
||||
return [0.0] * len(values)
|
||||
|
||||
return [(v - mean) / stddev for v in values]
|
||||
|
||||
else:
|
||||
return values
|
||||
|
||||
|
||||
def detect_outliers(
|
||||
values: List[float],
|
||||
method: str = "iqr",
|
||||
threshold: float = 1.5
|
||||
) -> List[bool]:
|
||||
"""
|
||||
Detect outliers in values.
|
||||
|
||||
Args:
|
||||
values: List of values
|
||||
method: Detection method ('iqr' or 'zscore')
|
||||
threshold: Threshold for outlier detection
|
||||
|
||||
Returns:
|
||||
List of booleans indicating outliers
|
||||
"""
|
||||
if not values:
|
||||
return []
|
||||
|
||||
if method == "iqr":
|
||||
# Interquartile range method
|
||||
q1 = calculate_percentile(values, 25)
|
||||
q3 = calculate_percentile(values, 75)
|
||||
iqr = q3 - q1
|
||||
|
||||
lower_bound = q1 - threshold * iqr
|
||||
upper_bound = q3 + threshold * iqr
|
||||
|
||||
return [v < lower_bound or v > upper_bound for v in values]
|
||||
|
||||
elif method == "zscore":
|
||||
# Z-score method
|
||||
mean = calculate_mean(values)
|
||||
stddev = calculate_standard_deviation(values)
|
||||
|
||||
if stddev == 0:
|
||||
return [False] * len(values)
|
||||
|
||||
z_scores = [(v - mean) / stddev for v in values]
|
||||
return [abs(z) > threshold for z in z_scores]
|
||||
|
||||
else:
|
||||
return [False] * len(values)
|
||||
|
||||
|
||||
def interpolate_missing_values(
|
||||
values: List[Optional[float]],
|
||||
method: str = "linear"
|
||||
) -> List[float]:
|
||||
"""
|
||||
Interpolate missing values in a time series.
|
||||
|
||||
Args:
|
||||
values: List of values with possible None values
|
||||
method: Interpolation method ('linear', 'forward', 'backward')
|
||||
|
||||
Returns:
|
||||
List with interpolated values
|
||||
"""
|
||||
if not values:
|
||||
return []
|
||||
|
||||
result = []
|
||||
|
||||
if method == "forward":
|
||||
# Forward fill
|
||||
last_valid = None
|
||||
for v in values:
|
||||
if v is not None:
|
||||
last_valid = v
|
||||
result.append(last_valid if last_valid is not None else 0.0)
|
||||
|
||||
elif method == "backward":
|
||||
# Backward fill
|
||||
next_valid = None
|
||||
for v in reversed(values):
|
||||
if v is not None:
|
||||
next_valid = v
|
||||
result.insert(0, next_valid if next_valid is not None else 0.0)
|
||||
|
||||
else: # linear
|
||||
# Linear interpolation
|
||||
result = list(values)
|
||||
|
||||
for i in range(len(result)):
|
||||
if result[i] is None:
|
||||
# Find previous and next valid values
|
||||
prev_idx = None
|
||||
next_idx = None
|
||||
|
||||
for j in range(i - 1, -1, -1):
|
||||
if values[j] is not None:
|
||||
prev_idx = j
|
||||
break
|
||||
|
||||
for j in range(i + 1, len(values)):
|
||||
if values[j] is not None:
|
||||
next_idx = j
|
||||
break
|
||||
|
||||
if prev_idx is not None and next_idx is not None:
|
||||
# Linear interpolation
|
||||
x0, y0 = prev_idx, values[prev_idx]
|
||||
x1, y1 = next_idx, values[next_idx]
|
||||
result[i] = y0 + (y1 - y0) * (i - x0) / (x1 - x0)
|
||||
elif prev_idx is not None:
|
||||
# Forward fill
|
||||
result[i] = values[prev_idx]
|
||||
elif next_idx is not None:
|
||||
# Backward fill
|
||||
result[i] = values[next_idx]
|
||||
else:
|
||||
# No valid values
|
||||
result[i] = 0.0
|
||||
|
||||
return result
|
||||
Reference in New Issue
Block a user