657 lines
28 KiB
Python
657 lines
28 KiB
Python
# services/sales/app/services/sales_service.py
|
|
"""
|
|
Sales Service - Business Logic Layer
|
|
"""
|
|
|
|
from typing import List, Optional, Dict, Any
|
|
from uuid import UUID
|
|
from datetime import datetime
|
|
import structlog
|
|
|
|
from app.models.sales import SalesData
|
|
from app.repositories.sales_repository import SalesRepository
|
|
from app.schemas.sales import SalesDataCreate, SalesDataUpdate, SalesDataQuery, SalesAnalytics
|
|
from app.core.database import get_db_transaction
|
|
from app.services.inventory_client import InventoryServiceClient
|
|
from shared.database.exceptions import DatabaseError
|
|
|
|
logger = structlog.get_logger()
|
|
|
|
|
|
class SalesService:
|
|
"""Service layer for sales operations"""
|
|
|
|
def __init__(self):
|
|
self.inventory_client = InventoryServiceClient()
|
|
|
|
async def create_sales_record(
|
|
self,
|
|
sales_data: SalesDataCreate,
|
|
tenant_id: UUID,
|
|
user_id: Optional[UUID] = None
|
|
) -> SalesData:
|
|
"""Create a new sales record with business validation"""
|
|
try:
|
|
# Sync product data with inventory service if inventory_product_id is provided
|
|
if sales_data.inventory_product_id:
|
|
product_cache = await self.inventory_client.sync_product_cache(
|
|
sales_data.inventory_product_id, tenant_id
|
|
)
|
|
if product_cache:
|
|
# Update cached product fields from inventory
|
|
sales_data_dict = sales_data.model_dump()
|
|
sales_data_dict.update(product_cache)
|
|
sales_data = SalesDataCreate(**sales_data_dict)
|
|
else:
|
|
logger.warning("Could not sync product from inventory",
|
|
product_id=sales_data.inventory_product_id, tenant_id=tenant_id)
|
|
|
|
# Business validation
|
|
await self._validate_sales_data(sales_data, tenant_id)
|
|
|
|
# Set user who created the record
|
|
if user_id:
|
|
sales_data_dict = sales_data.model_dump()
|
|
sales_data_dict['created_by'] = user_id
|
|
sales_data = SalesDataCreate(**sales_data_dict)
|
|
|
|
async with get_db_transaction() as db:
|
|
repository = SalesRepository(db)
|
|
record = await repository.create_sales_record(sales_data, tenant_id)
|
|
|
|
# Additional business logic (e.g., notifications, analytics updates)
|
|
await self._post_create_actions(record)
|
|
|
|
return record
|
|
|
|
except Exception as e:
|
|
logger.error("Failed to create sales record in service", error=str(e), tenant_id=tenant_id)
|
|
raise
|
|
|
|
async def update_sales_record(
|
|
self,
|
|
record_id: UUID,
|
|
update_data: SalesDataUpdate,
|
|
tenant_id: UUID
|
|
) -> SalesData:
|
|
"""Update a sales record"""
|
|
try:
|
|
async with get_db_transaction() as db:
|
|
repository = SalesRepository(db)
|
|
|
|
# Verify record belongs to tenant
|
|
existing_record = await repository.get_by_id(record_id)
|
|
if not existing_record or existing_record.tenant_id != tenant_id:
|
|
raise ValueError(f"Sales record {record_id} not found for tenant {tenant_id}")
|
|
|
|
# Update the record
|
|
updated_record = await repository.update(record_id, update_data.model_dump(exclude_unset=True))
|
|
|
|
logger.info("Updated sales record", record_id=record_id, tenant_id=tenant_id)
|
|
return updated_record
|
|
|
|
except Exception as e:
|
|
logger.error("Failed to update sales record", error=str(e), record_id=record_id, tenant_id=tenant_id)
|
|
raise
|
|
|
|
async def get_sales_records(
|
|
self,
|
|
tenant_id: UUID,
|
|
query_params: Optional[SalesDataQuery] = None
|
|
) -> List[SalesData]:
|
|
"""Get sales records for a tenant"""
|
|
try:
|
|
async with get_db_transaction() as db:
|
|
repository = SalesRepository(db)
|
|
records = await repository.get_by_tenant(tenant_id, query_params)
|
|
|
|
logger.info("Retrieved sales records", count=len(records), tenant_id=tenant_id)
|
|
return records
|
|
|
|
except Exception as e:
|
|
logger.error("Failed to get sales records", error=str(e), tenant_id=tenant_id)
|
|
raise
|
|
|
|
async def get_sales_record(self, record_id: UUID, tenant_id: UUID) -> Optional[SalesData]:
|
|
"""Get a specific sales record"""
|
|
try:
|
|
async with get_db_transaction() as db:
|
|
repository = SalesRepository(db)
|
|
record = await repository.get_by_id(record_id)
|
|
|
|
# Verify record belongs to tenant
|
|
if record and record.tenant_id != tenant_id:
|
|
return None
|
|
|
|
return record
|
|
|
|
except Exception as e:
|
|
logger.error("Failed to get sales record", error=str(e), record_id=record_id, tenant_id=tenant_id)
|
|
raise
|
|
|
|
async def delete_sales_record(self, record_id: UUID, tenant_id: UUID) -> bool:
|
|
"""Delete a sales record"""
|
|
try:
|
|
async with get_db_transaction() as db:
|
|
repository = SalesRepository(db)
|
|
|
|
# Verify record belongs to tenant
|
|
existing_record = await repository.get_by_id(record_id)
|
|
if not existing_record or existing_record.tenant_id != tenant_id:
|
|
raise ValueError(f"Sales record {record_id} not found for tenant {tenant_id}")
|
|
|
|
success = await repository.delete(record_id)
|
|
|
|
if success:
|
|
logger.info("Deleted sales record", record_id=record_id, tenant_id=tenant_id)
|
|
|
|
return success
|
|
|
|
except Exception as e:
|
|
logger.error("Failed to delete sales record", error=str(e), record_id=record_id, tenant_id=tenant_id)
|
|
raise
|
|
|
|
async def get_product_sales(
|
|
self,
|
|
tenant_id: UUID,
|
|
inventory_product_id: UUID,
|
|
start_date: Optional[datetime] = None,
|
|
end_date: Optional[datetime] = None
|
|
) -> List[SalesData]:
|
|
"""Get sales records for a specific product by inventory ID"""
|
|
try:
|
|
async with get_db_transaction() as db:
|
|
repository = SalesRepository(db)
|
|
records = await repository.get_by_inventory_product(tenant_id, inventory_product_id, start_date, end_date)
|
|
|
|
logger.info(
|
|
"Retrieved product sales",
|
|
count=len(records),
|
|
inventory_product_id=inventory_product_id,
|
|
tenant_id=tenant_id
|
|
)
|
|
return records
|
|
|
|
except Exception as e:
|
|
logger.error("Failed to get product sales", error=str(e), tenant_id=tenant_id, inventory_product_id=inventory_product_id)
|
|
raise
|
|
|
|
async def get_sales_analytics(
|
|
self,
|
|
tenant_id: UUID,
|
|
start_date: Optional[datetime] = None,
|
|
end_date: Optional[datetime] = None
|
|
) -> Dict[str, Any]:
|
|
"""Get sales analytics for a tenant"""
|
|
try:
|
|
async with get_db_transaction() as db:
|
|
repository = SalesRepository(db)
|
|
analytics = await repository.get_analytics(tenant_id, start_date, end_date)
|
|
|
|
logger.info("Retrieved sales analytics", tenant_id=tenant_id)
|
|
return analytics
|
|
|
|
except Exception as e:
|
|
logger.error("Failed to get sales analytics", error=str(e), tenant_id=tenant_id)
|
|
raise
|
|
|
|
async def get_product_categories(self, tenant_id: UUID) -> List[str]:
|
|
"""Get distinct product categories from inventory service"""
|
|
try:
|
|
# Get all unique categories from inventory service products
|
|
# This is more accurate than cached categories in sales data
|
|
ingredient_products = await self.inventory_client.search_products("", tenant_id, "ingredient")
|
|
finished_products = await self.inventory_client.search_products("", tenant_id, "finished_product")
|
|
|
|
categories = set()
|
|
for product in ingredient_products:
|
|
if product.get("ingredient_category"):
|
|
categories.add(product["ingredient_category"])
|
|
|
|
for product in finished_products:
|
|
if product.get("product_category"):
|
|
categories.add(product["product_category"])
|
|
|
|
return sorted(list(categories))
|
|
|
|
except Exception as e:
|
|
logger.error("Failed to get product categories", error=str(e), tenant_id=tenant_id)
|
|
raise
|
|
|
|
async def validate_sales_record(
|
|
self,
|
|
record_id: UUID,
|
|
tenant_id: UUID,
|
|
validation_notes: Optional[str] = None
|
|
) -> SalesData:
|
|
"""Validate a sales record"""
|
|
try:
|
|
async with get_db_transaction() as db:
|
|
repository = SalesRepository(db)
|
|
|
|
# Verify record belongs to tenant
|
|
existing_record = await repository.get_by_id(record_id)
|
|
if not existing_record or existing_record.tenant_id != tenant_id:
|
|
raise ValueError(f"Sales record {record_id} not found for tenant {tenant_id}")
|
|
|
|
validated_record = await repository.validate_record(record_id, validation_notes)
|
|
|
|
logger.info("Validated sales record", record_id=record_id, tenant_id=tenant_id)
|
|
return validated_record
|
|
|
|
except Exception as e:
|
|
logger.error("Failed to validate sales record", error=str(e), record_id=record_id, tenant_id=tenant_id)
|
|
raise
|
|
|
|
async def _validate_sales_data(self, sales_data: SalesDataCreate, tenant_id: UUID):
|
|
"""Validate sales data according to business rules"""
|
|
# Example business validations
|
|
|
|
# Check if revenue matches quantity * unit_price (if unit_price provided)
|
|
if sales_data.unit_price and sales_data.quantity_sold:
|
|
expected_revenue = sales_data.unit_price * sales_data.quantity_sold
|
|
# Apply discount if any
|
|
if sales_data.discount_applied:
|
|
expected_revenue *= (1 - sales_data.discount_applied / 100)
|
|
|
|
# Allow for small rounding differences
|
|
if abs(float(sales_data.revenue) - float(expected_revenue)) > 0.01:
|
|
logger.warning(
|
|
"Revenue mismatch detected",
|
|
expected=float(expected_revenue),
|
|
actual=float(sales_data.revenue),
|
|
tenant_id=tenant_id
|
|
)
|
|
|
|
# Check date validity (not in future)
|
|
if sales_data.date > datetime.utcnow():
|
|
raise ValueError("Sales date cannot be in the future")
|
|
|
|
# Additional business rules can be added here
|
|
logger.info("Sales data validation passed", tenant_id=tenant_id)
|
|
|
|
async def _post_create_actions(self, record: SalesData):
|
|
"""Actions to perform after creating a sales record"""
|
|
try:
|
|
# Decrease inventory for the sale
|
|
if record.inventory_product_id and record.quantity_sold and record.quantity_sold > 0:
|
|
await self._decrease_inventory_for_sale(record)
|
|
|
|
# Here you could also:
|
|
# - Send notifications
|
|
# - Update analytics caches
|
|
# - Trigger ML model updates
|
|
|
|
logger.info("Post-create actions completed", record_id=record.id)
|
|
|
|
except Exception as e:
|
|
# Don't fail the main operation for auxiliary actions
|
|
logger.warning("Failed to execute post-create actions", error=str(e), record_id=record.id)
|
|
|
|
async def _decrease_inventory_for_sale(self, sales_record: SalesData) -> Optional[Dict[str, Any]]:
|
|
"""Decrease inventory stock for a sales record"""
|
|
try:
|
|
if not sales_record.inventory_product_id:
|
|
logger.debug("No inventory_product_id for sales record, skipping stock decrease",
|
|
record_id=sales_record.id)
|
|
return None
|
|
|
|
if not sales_record.quantity_sold or sales_record.quantity_sold <= 0:
|
|
logger.debug("Invalid quantity for sales record, skipping stock decrease",
|
|
record_id=sales_record.id, quantity=sales_record.quantity_sold)
|
|
return None
|
|
|
|
consumption_data = {
|
|
"ingredient_id": str(sales_record.inventory_product_id),
|
|
"quantity": float(sales_record.quantity_sold),
|
|
"reference_number": str(sales_record.id),
|
|
"notes": f"Sales: {sales_record.product_name} - {sales_record.sales_channel}",
|
|
"fifo": True # Use FIFO method for stock consumption
|
|
}
|
|
|
|
result = await self.inventory_client._shared_client.consume_stock(
|
|
consumption_data,
|
|
str(sales_record.tenant_id)
|
|
)
|
|
|
|
if result:
|
|
logger.info("Inventory decreased for sale",
|
|
sales_record_id=sales_record.id,
|
|
inventory_product_id=sales_record.inventory_product_id,
|
|
quantity=sales_record.quantity_sold,
|
|
method="FIFO")
|
|
|
|
# Check if stock level is now low (after successful decrease)
|
|
await self._check_low_stock_threshold(
|
|
sales_record.tenant_id,
|
|
sales_record.inventory_product_id,
|
|
sales_record.product_name,
|
|
result
|
|
)
|
|
else:
|
|
logger.warning("Failed to decrease inventory for sale (no result)",
|
|
sales_record_id=sales_record.id)
|
|
|
|
return result
|
|
|
|
except ValueError as e:
|
|
# Insufficient stock - log warning but don't fail the sale
|
|
logger.warning("Insufficient stock for sale",
|
|
sales_record_id=sales_record.id,
|
|
error=str(e),
|
|
product_id=sales_record.inventory_product_id,
|
|
quantity_requested=sales_record.quantity_sold)
|
|
|
|
# Trigger low stock alert
|
|
await self._trigger_low_stock_alert(
|
|
sales_record.tenant_id,
|
|
sales_record.inventory_product_id,
|
|
sales_record.product_name,
|
|
error_message=str(e)
|
|
)
|
|
return None
|
|
|
|
except Exception as e:
|
|
# Other errors - log but don't fail the sale
|
|
logger.error("Failed to decrease inventory for sale",
|
|
sales_record_id=sales_record.id,
|
|
error=str(e),
|
|
product_id=sales_record.inventory_product_id)
|
|
return None
|
|
|
|
async def _check_low_stock_threshold(
|
|
self,
|
|
tenant_id: UUID,
|
|
product_id: UUID,
|
|
product_name: str,
|
|
consume_result: Dict[str, Any]
|
|
):
|
|
"""Check if stock level is below threshold after decrease"""
|
|
try:
|
|
# Get product details to check current stock and reorder point
|
|
product = await self.inventory_client.get_product_by_id(product_id, tenant_id)
|
|
|
|
if not product:
|
|
return
|
|
|
|
# Check if product has reorder point configured
|
|
reorder_point = product.get("reorder_point", 0)
|
|
current_stock = product.get("current_stock", 0)
|
|
|
|
# Trigger alert if stock is below reorder point
|
|
if reorder_point > 0 and current_stock <= reorder_point:
|
|
logger.warning("Stock below reorder point",
|
|
product_id=product_id,
|
|
product_name=product_name,
|
|
current_stock=current_stock,
|
|
reorder_point=reorder_point,
|
|
tenant_id=tenant_id)
|
|
|
|
await self._trigger_low_stock_alert(
|
|
tenant_id,
|
|
product_id,
|
|
product_name,
|
|
current_stock=current_stock,
|
|
reorder_point=reorder_point
|
|
)
|
|
|
|
except Exception as e:
|
|
# Don't fail the operation if alert fails
|
|
logger.error("Failed to check low stock threshold",
|
|
error=str(e),
|
|
product_id=product_id)
|
|
|
|
async def _trigger_low_stock_alert(
|
|
self,
|
|
tenant_id: UUID,
|
|
product_id: UUID,
|
|
product_name: str,
|
|
error_message: Optional[str] = None,
|
|
current_stock: Optional[float] = None,
|
|
reorder_point: Optional[float] = None
|
|
):
|
|
"""Trigger low stock alert notification"""
|
|
try:
|
|
# For now, just log the alert
|
|
# In production, this could:
|
|
# - Send email notification
|
|
# - Create in-app notification
|
|
# - Trigger webhook
|
|
# - Create alert record in database
|
|
# - Send to external alerting system (PagerDuty, Slack, etc.)
|
|
|
|
alert_data = {
|
|
"type": "low_stock",
|
|
"severity": "warning" if current_stock is not None else "critical",
|
|
"tenant_id": str(tenant_id),
|
|
"product_id": str(product_id),
|
|
"product_name": product_name,
|
|
"current_stock": current_stock,
|
|
"reorder_point": reorder_point,
|
|
"error_message": error_message,
|
|
"timestamp": datetime.utcnow().isoformat()
|
|
}
|
|
|
|
logger.warning("LOW_STOCK_ALERT",
|
|
**alert_data)
|
|
|
|
# Implement notification delivery via RabbitMQ event
|
|
try:
|
|
from shared.messaging import get_rabbitmq_client
|
|
|
|
rabbitmq_client = get_rabbitmq_client()
|
|
if rabbitmq_client:
|
|
# Publish low stock event for notification service to consume
|
|
event_payload = {
|
|
"event_id": str(uuid.uuid4()),
|
|
"event_type": "inventory.low_stock",
|
|
"timestamp": datetime.utcnow().isoformat(),
|
|
"tenant_id": str(tenant_id),
|
|
"data": alert_data
|
|
}
|
|
|
|
await rabbitmq_client.publish_event(
|
|
exchange_name="inventory.events",
|
|
routing_key="inventory.low_stock",
|
|
event_data=event_payload
|
|
)
|
|
|
|
logger.info("Published low stock alert event",
|
|
tenant_id=str(tenant_id),
|
|
product_id=product_id,
|
|
event_id=event_payload["event_id"])
|
|
else:
|
|
logger.warning("RabbitMQ client not available, notification not sent")
|
|
|
|
except Exception as notify_error:
|
|
logger.error("Failed to publish low stock notification event",
|
|
error=str(notify_error),
|
|
tenant_id=str(tenant_id))
|
|
# Don't fail the main operation if notification fails
|
|
|
|
except Exception as e:
|
|
logger.error("Failed to trigger low stock alert",
|
|
error=str(e),
|
|
product_id=product_id)
|
|
|
|
|
|
# New inventory integration methods
|
|
async def search_inventory_products(self, search_term: str, tenant_id: UUID,
|
|
product_type: Optional[str] = None) -> List[Dict[str, Any]]:
|
|
"""Search products in inventory service"""
|
|
try:
|
|
products = await self.inventory_client.search_products(search_term, tenant_id, product_type)
|
|
logger.info("Searched inventory products", search_term=search_term,
|
|
count=len(products), tenant_id=tenant_id)
|
|
return products
|
|
except Exception as e:
|
|
logger.error("Failed to search inventory products",
|
|
error=str(e), search_term=search_term, tenant_id=tenant_id)
|
|
return []
|
|
|
|
async def get_inventory_product(self, product_id: UUID, tenant_id: UUID) -> Optional[Dict[str, Any]]:
|
|
"""Get product details from inventory service"""
|
|
try:
|
|
product = await self.inventory_client.get_product_by_id(product_id, tenant_id)
|
|
if product:
|
|
logger.info("Retrieved inventory product", product_id=product_id, tenant_id=tenant_id)
|
|
return product
|
|
except Exception as e:
|
|
logger.error("Failed to get inventory product",
|
|
error=str(e), product_id=product_id, tenant_id=tenant_id)
|
|
return None
|
|
|
|
async def get_inventory_products_by_category(self, category: str, tenant_id: UUID,
|
|
product_type: Optional[str] = None) -> List[Dict[str, Any]]:
|
|
"""Get products by category from inventory service"""
|
|
try:
|
|
products = await self.inventory_client.get_products_by_category(category, tenant_id, product_type)
|
|
logger.info("Retrieved inventory products by category", category=category,
|
|
count=len(products), tenant_id=tenant_id)
|
|
return products
|
|
except Exception as e:
|
|
logger.error("Failed to get inventory products by category",
|
|
error=str(e), category=category, tenant_id=tenant_id)
|
|
return []
|
|
|
|
async def analyze_product_demand_patterns(
|
|
self,
|
|
tenant_id: UUID,
|
|
inventory_product_id: UUID,
|
|
start_date: Optional[datetime] = None,
|
|
end_date: Optional[datetime] = None,
|
|
min_history_days: int = 90
|
|
) -> Dict[str, Any]:
|
|
"""
|
|
Analyze demand patterns for a specific product from historical sales data.
|
|
|
|
This method provides insights on:
|
|
- Demand trends (increasing/decreasing)
|
|
- Volatility (coefficient of variation)
|
|
- Weekly seasonal patterns
|
|
- Peak/low demand days
|
|
|
|
Args:
|
|
tenant_id: Tenant identifier
|
|
inventory_product_id: Product identifier
|
|
start_date: Start date for analysis (optional)
|
|
end_date: End date for analysis (optional)
|
|
min_history_days: Minimum days of history required
|
|
|
|
Returns:
|
|
Analysis results with patterns, trends, and statistics
|
|
"""
|
|
try:
|
|
import pandas as pd
|
|
|
|
logger.info(
|
|
"Analyzing product demand patterns",
|
|
tenant_id=tenant_id,
|
|
inventory_product_id=inventory_product_id
|
|
)
|
|
|
|
# Fetch sales data for the product
|
|
sales_records = await self.get_product_sales(
|
|
tenant_id=tenant_id,
|
|
inventory_product_id=inventory_product_id,
|
|
start_date=start_date,
|
|
end_date=end_date
|
|
)
|
|
|
|
if not sales_records or len(sales_records) < min_history_days:
|
|
return {
|
|
'analyzed_at': datetime.utcnow().isoformat(),
|
|
'history_days': len(sales_records) if sales_records else 0,
|
|
'patterns': {},
|
|
'trend_analysis': {},
|
|
'seasonal_factors': {},
|
|
'statistics': {},
|
|
'error': f'Insufficient historical data (need {min_history_days} days, got {len(sales_records) if sales_records else 0})'
|
|
}
|
|
|
|
# Convert to DataFrame for analysis
|
|
sales_data = pd.DataFrame([{
|
|
'date': record.date,
|
|
'quantity': record.quantity_sold,
|
|
'revenue': float(record.revenue) if record.revenue else 0
|
|
} for record in sales_records])
|
|
|
|
sales_data['date'] = pd.to_datetime(sales_data['date'])
|
|
sales_data = sales_data.sort_values('date')
|
|
|
|
# Calculate basic statistics
|
|
mean_demand = sales_data['quantity'].mean()
|
|
std_demand = sales_data['quantity'].std()
|
|
cv = (std_demand / mean_demand) if mean_demand > 0 else 0
|
|
|
|
# Trend analysis
|
|
sales_data['days_since_start'] = (sales_data['date'] - sales_data['date'].min()).dt.days
|
|
trend_correlation = sales_data['days_since_start'].corr(sales_data['quantity'])
|
|
is_increasing = trend_correlation > 0.2
|
|
is_decreasing = trend_correlation < -0.2
|
|
|
|
# Seasonal pattern detection (day of week)
|
|
sales_data['day_of_week'] = sales_data['date'].dt.dayofweek
|
|
weekly_pattern = sales_data.groupby('day_of_week')['quantity'].mean().to_dict()
|
|
peak_day = max(weekly_pattern, key=weekly_pattern.get)
|
|
low_day = min(weekly_pattern, key=weekly_pattern.get)
|
|
peak_ratio = weekly_pattern[peak_day] / weekly_pattern[low_day] if weekly_pattern[low_day] > 0 else 1.0
|
|
|
|
logger.info(
|
|
"Demand pattern analysis complete",
|
|
tenant_id=tenant_id,
|
|
inventory_product_id=inventory_product_id,
|
|
data_points=len(sales_data),
|
|
trend_direction='increasing' if is_increasing else 'decreasing' if is_decreasing else 'stable'
|
|
)
|
|
|
|
return {
|
|
'analyzed_at': datetime.utcnow().isoformat(),
|
|
'history_days': len(sales_data),
|
|
'date_range': {
|
|
'start': sales_data['date'].min().isoformat(),
|
|
'end': sales_data['date'].max().isoformat()
|
|
},
|
|
'statistics': {
|
|
'mean_demand': round(mean_demand, 2),
|
|
'std_demand': round(std_demand, 2),
|
|
'coefficient_of_variation': round(cv, 3),
|
|
'total_quantity': round(sales_data['quantity'].sum(), 2),
|
|
'total_revenue': round(sales_data['revenue'].sum(), 2),
|
|
'min_demand': round(sales_data['quantity'].min(), 2),
|
|
'max_demand': round(sales_data['quantity'].max(), 2)
|
|
},
|
|
'trend_analysis': {
|
|
'correlation': round(trend_correlation, 3),
|
|
'is_increasing': is_increasing,
|
|
'is_decreasing': is_decreasing,
|
|
'direction': 'increasing' if is_increasing else 'decreasing' if is_decreasing else 'stable'
|
|
},
|
|
'patterns': {
|
|
'weekly_pattern': {str(k): round(v, 2) for k, v in weekly_pattern.items()},
|
|
'peak_day': int(peak_day),
|
|
'low_day': int(low_day)
|
|
},
|
|
'seasonal_factors': {
|
|
'peak_ratio': round(peak_ratio, 2),
|
|
'has_strong_pattern': peak_ratio > 1.5
|
|
}
|
|
}
|
|
|
|
except Exception as e:
|
|
logger.error(
|
|
"Error analyzing product demand patterns",
|
|
tenant_id=tenant_id,
|
|
inventory_product_id=inventory_product_id,
|
|
error=str(e),
|
|
exc_info=True
|
|
)
|
|
return {
|
|
'analyzed_at': datetime.utcnow().isoformat(),
|
|
'history_days': 0,
|
|
'patterns': {},
|
|
'trend_analysis': {},
|
|
'seasonal_factors': {},
|
|
'statistics': {},
|
|
'error': str(e)
|
|
} |