Files
bakery-ia/shared/clients/sales_client.py

344 lines
11 KiB
Python
Raw Normal View History

2026-01-21 17:17:16 +01:00
# shared/clients/sales_client.py
"""
Sales Service Client
Handles all API calls to the sales service
"""
import httpx
import structlog
from datetime import date
from typing import Dict, Any, Optional, List, Union
from .base_service_client import BaseServiceClient
from shared.config.base import BaseServiceSettings
logger = structlog.get_logger()
class SalesServiceClient(BaseServiceClient):
"""Client for communicating with the sales service"""
def __init__(self, config: BaseServiceSettings, calling_service_name: str = "unknown"):
super().__init__(calling_service_name, config)
self.service_url = config.SALES_SERVICE_URL
def get_service_base_path(self) -> str:
return "/api/v1"
# ================================================================
# SALES DATA (with advanced pagination support)
# ================================================================
async def get_sales_data(
self,
tenant_id: str,
start_date: Optional[str] = None,
end_date: Optional[str] = None,
product_id: Optional[str] = None,
aggregation: str = "daily"
) -> Optional[List[Dict[str, Any]]]:
"""Get sales data for a date range"""
params = {"aggregation": aggregation}
if start_date:
params["start_date"] = start_date
if end_date:
params["end_date"] = end_date
if product_id:
params["product_id"] = product_id
result = await self.get("sales/sales", tenant_id=tenant_id, params=params)
# Handle both list and dict responses
if result is None:
return None
elif isinstance(result, list):
return result
elif isinstance(result, dict):
return result.get("sales", [])
else:
return None
async def get_all_sales_data(
self,
tenant_id: str,
start_date: Optional[str] = None,
end_date: Optional[str] = None,
product_id: Optional[str] = None,
aggregation: str = "daily",
page_size: int = 1000,
max_pages: int = 100
) -> List[Dict[str, Any]]:
"""
Get ALL sales data using pagination (equivalent to original fetch_sales_data)
Retrieves all records without pagination limits
"""
params = {"aggregation": aggregation}
if start_date:
params["start_date"] = start_date
if end_date:
params["end_date"] = end_date
if product_id:
params["product_id"] = product_id
# Use the inherited paginated request method
try:
all_records = await self.get_paginated(
"sales/sales",
tenant_id=tenant_id,
params=params,
page_size=page_size,
max_pages=max_pages,
timeout=2000.0
)
logger.info(f"Successfully fetched {len(all_records)} total sales records via sales service",
tenant_id=tenant_id)
return all_records
except Exception as e:
logger.error(f"Failed to fetch paginated sales data: {e}")
return []
async def upload_sales_data(
self,
tenant_id: str,
sales_data: List[Dict[str, Any]]
) -> Optional[Dict[str, Any]]:
"""Upload sales data"""
data = {"sales": sales_data}
return await self.post("sales/sales", data=data, tenant_id=tenant_id)
# ================================================================
# PRODUCTS
# ================================================================
async def get_products(self, tenant_id: str) -> Optional[List[Dict[str, Any]]]:
"""Get all products for a tenant"""
result = await self.get("sales/products", tenant_id=tenant_id)
return result.get("products", []) if result else None
async def get_product(self, tenant_id: str, product_id: str) -> Optional[Dict[str, Any]]:
"""Get a specific product"""
return await self.get(f"sales/products/{product_id}", tenant_id=tenant_id)
async def create_product(
self,
tenant_id: str,
name: str,
category: str,
price: float,
**kwargs
) -> Optional[Dict[str, Any]]:
"""Create a new product"""
data = {
"name": name,
"category": category,
"price": price,
**kwargs
}
return await self.post("sales/products", data=data, tenant_id=tenant_id)
async def update_product(
self,
tenant_id: str,
product_id: str,
**updates
) -> Optional[Dict[str, Any]]:
"""Update a product"""
return await self.put(f"sales/products/{product_id}", data=updates, tenant_id=tenant_id)
async def create_sales_record(
self,
tenant_id: str,
sales_data: Dict[str, Any]
) -> Optional[Dict[str, Any]]:
"""Create a new sales record
Args:
tenant_id: Tenant ID
sales_data: Sales record data including:
- inventory_product_id: Optional UUID for inventory tracking
- product_name: Product name
- product_category: Product category
- quantity_sold: Quantity sold
- unit_price: Unit price
- total_amount: Total amount
- sale_date: Sale date (YYYY-MM-DD)
- sales_channel: Sales channel (retail, wholesale, online, pos, etc.)
- source: Data source (manual, pos_sync, import, etc.)
- payment_method: Payment method
- notes: Optional notes
Returns:
Created sales record or None if failed
"""
try:
result = await self.post("sales/sales", data=sales_data, tenant_id=tenant_id)
if result:
logger.info("Created sales record via client",
tenant_id=tenant_id,
product=sales_data.get("product_name"))
return result
except Exception as e:
logger.error("Failed to create sales record",
error=str(e),
tenant_id=tenant_id)
return None
async def get_sales_summary(
self,
tenant_id: str,
start_date: date,
end_date: date
) -> Dict[str, Any]:
"""
Get sales summary/analytics for a tenant.
This method calls the sales analytics summary endpoint which provides
aggregated sales metrics over a date range.
Args:
tenant_id: The tenant UUID
start_date: Start date for summary range
end_date: End date for summary range
Returns:
Sales summary data including metrics like total sales, revenue, etc.
"""
params = {
"start_date": start_date.isoformat(),
"end_date": end_date.isoformat()
}
return await self.get(
"sales/analytics/summary",
tenant_id=tenant_id,
params=params
)
async def get_sales_summary_batch(
self,
tenant_ids: List[str],
start_date: date,
end_date: date
) -> Dict[str, Any]:
"""
Get sales summaries for multiple tenants in a single request.
Phase 2 optimization: Eliminates N+1 query patterns for enterprise dashboards.
Args:
tenant_ids: List of tenant IDs to fetch
start_date: Start date for summary range
end_date: End date for summary range
Returns:
Dict mapping tenant_id -> sales summary
"""
try:
if not tenant_ids:
return {}
if len(tenant_ids) > 100:
logger.warning("Batch request exceeds max tenant limit", requested=len(tenant_ids))
tenant_ids = tenant_ids[:100]
data = {
"tenant_ids": tenant_ids,
"start_date": start_date.isoformat(),
"end_date": end_date.isoformat()
}
result = await self.post(
"sales/batch/sales-summary",
data=data,
tenant_id=tenant_ids[0] # Use first tenant for auth context
)
summaries = result if isinstance(result, dict) else {}
logger.info(
"Batch retrieved sales summaries",
requested=len(tenant_ids),
found=len(summaries),
start_date=start_date.isoformat(),
end_date=end_date.isoformat()
)
return summaries
except Exception as e:
logger.error(
"Error batch fetching sales summaries",
error=str(e),
tenant_count=len(tenant_ids)
)
return {}
async def get_product_demand_patterns(
self,
tenant_id: str,
product_id: str,
start_date: Optional[date] = None,
end_date: Optional[date] = None,
min_history_days: int = 90
) -> Dict[str, Any]:
"""
Get demand pattern analysis for a specific product.
Args:
tenant_id: Tenant identifier
product_id: Product identifier (inventory_product_id)
start_date: Start date for analysis
end_date: End date for analysis
min_history_days: Minimum days of history required
Returns:
Demand pattern analysis including trends, seasonality, and statistics
"""
try:
params = {"min_history_days": min_history_days}
if start_date:
params["start_date"] = start_date.isoformat()
if end_date:
params["end_date"] = end_date.isoformat()
result = await self.get(
f"sales/analytics/products/{product_id}/demand-patterns",
tenant_id=tenant_id,
params=params
)
logger.info(
"Retrieved product demand patterns",
tenant_id=tenant_id,
product_id=product_id
)
return result if result else {}
except Exception as e:
logger.error(
"Failed to get product demand patterns",
error=str(e),
tenant_id=tenant_id,
product_id=product_id
)
return {}
# ================================================================
# DATA IMPORT
# ================================================================
async def import_sales_data(
self,
tenant_id: str,
file_content: str,
file_format: str,
filename: Optional[str] = None
) -> Optional[Dict[str, Any]]:
"""Import sales data from CSV/Excel/JSON"""
data = {
"content": file_content,
"format": file_format,
"filename": filename
}
return await self.post("sales/operations/import", data=data, tenant_id=tenant_id)