Initial commit - production deployment
This commit is contained in:
1
services/sales/app/__init__.py
Normal file
1
services/sales/app/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# services/sales/app/__init__.py
|
||||
1
services/sales/app/api/__init__.py
Normal file
1
services/sales/app/api/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# services/sales/app/api/__init__.py
|
||||
99
services/sales/app/api/analytics.py
Normal file
99
services/sales/app/api/analytics.py
Normal file
@@ -0,0 +1,99 @@
|
||||
# services/sales/app/api/analytics.py
|
||||
"""
|
||||
Sales Analytics API - Reporting, statistics, and insights
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, Path
|
||||
from typing import Optional, Dict, Any
|
||||
from uuid import UUID
|
||||
from datetime import datetime
|
||||
import structlog
|
||||
|
||||
from app.services.sales_service import SalesService
|
||||
from shared.routing import RouteBuilder
|
||||
from shared.auth.access_control import analytics_tier_required
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
|
||||
route_builder = RouteBuilder('sales')
|
||||
router = APIRouter(tags=["sales-analytics"])
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
def get_sales_service():
|
||||
"""Dependency injection for SalesService"""
|
||||
return SalesService()
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_analytics_route("summary")
|
||||
)
|
||||
@analytics_tier_required
|
||||
async def get_sales_analytics(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
start_date: Optional[datetime] = Query(None, description="Start date filter"),
|
||||
end_date: Optional[datetime] = Query(None, description="End date filter"),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
sales_service: SalesService = Depends(get_sales_service)
|
||||
):
|
||||
"""Get sales analytics summary for a tenant (Professional+ tier required)"""
|
||||
try:
|
||||
analytics = await sales_service.get_sales_analytics(tenant_id, start_date, end_date)
|
||||
|
||||
logger.info("Retrieved sales analytics", tenant_id=tenant_id)
|
||||
return analytics
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get sales analytics", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get sales analytics: {str(e)}")
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_analytics_route("products/{product_id}/demand-patterns")
|
||||
)
|
||||
@analytics_tier_required
|
||||
async def get_product_demand_patterns(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
product_id: UUID = Path(..., description="Product ID (inventory_product_id)"),
|
||||
start_date: Optional[datetime] = Query(None, description="Start date for analysis"),
|
||||
end_date: Optional[datetime] = Query(None, description="End date for analysis"),
|
||||
min_history_days: int = Query(90, description="Minimum days of history required", ge=30, le=365),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
sales_service: SalesService = Depends(get_sales_service)
|
||||
):
|
||||
"""
|
||||
Analyze demand patterns for a specific product (Professional+ tier required).
|
||||
|
||||
Returns:
|
||||
- Demand trends (increasing/decreasing/stable)
|
||||
- Volatility metrics (coefficient of variation)
|
||||
- Weekly seasonal patterns
|
||||
- Peak/low demand days
|
||||
- Statistical summaries
|
||||
"""
|
||||
try:
|
||||
patterns = await sales_service.analyze_product_demand_patterns(
|
||||
tenant_id=tenant_id,
|
||||
inventory_product_id=product_id,
|
||||
start_date=start_date,
|
||||
end_date=end_date,
|
||||
min_history_days=min_history_days
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Retrieved product demand patterns",
|
||||
tenant_id=tenant_id,
|
||||
product_id=product_id
|
||||
)
|
||||
return patterns
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to get product demand patterns",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id,
|
||||
product_id=product_id
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to analyze demand patterns: {str(e)}"
|
||||
)
|
||||
237
services/sales/app/api/audit.py
Normal file
237
services/sales/app/api/audit.py
Normal file
@@ -0,0 +1,237 @@
|
||||
# services/sales/app/api/audit.py
|
||||
"""
|
||||
Audit Logs API - Retrieve audit trail for sales service
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, Path, status
|
||||
from typing import Optional, Dict, Any
|
||||
from uuid import UUID
|
||||
from datetime import datetime
|
||||
import structlog
|
||||
from sqlalchemy import select, func, and_, or_
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.models import AuditLog
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from shared.auth.access_control import require_user_role
|
||||
from shared.routing import RouteBuilder
|
||||
from shared.models.audit_log_schemas import (
|
||||
AuditLogResponse,
|
||||
AuditLogListResponse,
|
||||
AuditLogStatsResponse
|
||||
)
|
||||
from app.core.database import database_manager
|
||||
|
||||
route_builder = RouteBuilder('sales')
|
||||
router = APIRouter(tags=["audit-logs"])
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
async def get_db():
|
||||
"""Database session dependency"""
|
||||
async with database_manager.get_session() as session:
|
||||
yield session
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_base_route("audit-logs"),
|
||||
response_model=AuditLogListResponse
|
||||
)
|
||||
@require_user_role(['admin', 'owner'])
|
||||
async def get_audit_logs(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
start_date: Optional[datetime] = Query(None, description="Filter logs from this date"),
|
||||
end_date: Optional[datetime] = Query(None, description="Filter logs until this date"),
|
||||
user_id: Optional[UUID] = Query(None, description="Filter by user ID"),
|
||||
action: Optional[str] = Query(None, description="Filter by action type"),
|
||||
resource_type: Optional[str] = Query(None, description="Filter by resource type"),
|
||||
severity: Optional[str] = Query(None, description="Filter by severity level"),
|
||||
search: Optional[str] = Query(None, description="Search in description field"),
|
||||
limit: int = Query(100, ge=1, le=1000, description="Number of records to return"),
|
||||
offset: int = Query(0, ge=0, description="Number of records to skip"),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get audit logs for sales service.
|
||||
Requires admin or owner role.
|
||||
"""
|
||||
try:
|
||||
logger.info(
|
||||
"Retrieving audit logs",
|
||||
tenant_id=tenant_id,
|
||||
user_id=current_user.get("user_id"),
|
||||
filters={
|
||||
"start_date": start_date,
|
||||
"end_date": end_date,
|
||||
"action": action,
|
||||
"resource_type": resource_type,
|
||||
"severity": severity
|
||||
}
|
||||
)
|
||||
|
||||
# Build query filters
|
||||
filters = [AuditLog.tenant_id == tenant_id]
|
||||
|
||||
if start_date:
|
||||
filters.append(AuditLog.created_at >= start_date)
|
||||
if end_date:
|
||||
filters.append(AuditLog.created_at <= end_date)
|
||||
if user_id:
|
||||
filters.append(AuditLog.user_id == user_id)
|
||||
if action:
|
||||
filters.append(AuditLog.action == action)
|
||||
if resource_type:
|
||||
filters.append(AuditLog.resource_type == resource_type)
|
||||
if severity:
|
||||
filters.append(AuditLog.severity == severity)
|
||||
if search:
|
||||
filters.append(AuditLog.description.ilike(f"%{search}%"))
|
||||
|
||||
# Count total matching records
|
||||
count_query = select(func.count()).select_from(AuditLog).where(and_(*filters))
|
||||
total_result = await db.execute(count_query)
|
||||
total = total_result.scalar() or 0
|
||||
|
||||
# Fetch paginated results
|
||||
query = (
|
||||
select(AuditLog)
|
||||
.where(and_(*filters))
|
||||
.order_by(AuditLog.created_at.desc())
|
||||
.limit(limit)
|
||||
.offset(offset)
|
||||
)
|
||||
|
||||
result = await db.execute(query)
|
||||
audit_logs = result.scalars().all()
|
||||
|
||||
# Convert to response models
|
||||
items = [AuditLogResponse.from_orm(log) for log in audit_logs]
|
||||
|
||||
logger.info(
|
||||
"Successfully retrieved audit logs",
|
||||
tenant_id=tenant_id,
|
||||
total=total,
|
||||
returned=len(items)
|
||||
)
|
||||
|
||||
return AuditLogListResponse(
|
||||
items=items,
|
||||
total=total,
|
||||
limit=limit,
|
||||
offset=offset,
|
||||
has_more=(offset + len(items)) < total
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to retrieve audit logs",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to retrieve audit logs: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_base_route("audit-logs/stats"),
|
||||
response_model=AuditLogStatsResponse
|
||||
)
|
||||
@require_user_role(['admin', 'owner'])
|
||||
async def get_audit_log_stats(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
start_date: Optional[datetime] = Query(None, description="Filter logs from this date"),
|
||||
end_date: Optional[datetime] = Query(None, description="Filter logs until this date"),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get audit log statistics for sales service.
|
||||
Requires admin or owner role.
|
||||
"""
|
||||
try:
|
||||
logger.info(
|
||||
"Retrieving audit log statistics",
|
||||
tenant_id=tenant_id,
|
||||
user_id=current_user.get("user_id")
|
||||
)
|
||||
|
||||
# Build base filters
|
||||
filters = [AuditLog.tenant_id == tenant_id]
|
||||
if start_date:
|
||||
filters.append(AuditLog.created_at >= start_date)
|
||||
if end_date:
|
||||
filters.append(AuditLog.created_at <= end_date)
|
||||
|
||||
# Total events
|
||||
count_query = select(func.count()).select_from(AuditLog).where(and_(*filters))
|
||||
total_result = await db.execute(count_query)
|
||||
total_events = total_result.scalar() or 0
|
||||
|
||||
# Events by action
|
||||
action_query = (
|
||||
select(AuditLog.action, func.count().label('count'))
|
||||
.where(and_(*filters))
|
||||
.group_by(AuditLog.action)
|
||||
)
|
||||
action_result = await db.execute(action_query)
|
||||
events_by_action = {row.action: row.count for row in action_result}
|
||||
|
||||
# Events by severity
|
||||
severity_query = (
|
||||
select(AuditLog.severity, func.count().label('count'))
|
||||
.where(and_(*filters))
|
||||
.group_by(AuditLog.severity)
|
||||
)
|
||||
severity_result = await db.execute(severity_query)
|
||||
events_by_severity = {row.severity: row.count for row in severity_result}
|
||||
|
||||
# Events by resource type
|
||||
resource_query = (
|
||||
select(AuditLog.resource_type, func.count().label('count'))
|
||||
.where(and_(*filters))
|
||||
.group_by(AuditLog.resource_type)
|
||||
)
|
||||
resource_result = await db.execute(resource_query)
|
||||
events_by_resource_type = {row.resource_type: row.count for row in resource_result}
|
||||
|
||||
# Date range
|
||||
date_range_query = (
|
||||
select(
|
||||
func.min(AuditLog.created_at).label('min_date'),
|
||||
func.max(AuditLog.created_at).label('max_date')
|
||||
)
|
||||
.where(and_(*filters))
|
||||
)
|
||||
date_result = await db.execute(date_range_query)
|
||||
date_row = date_result.one()
|
||||
|
||||
logger.info(
|
||||
"Successfully retrieved audit log statistics",
|
||||
tenant_id=tenant_id,
|
||||
total_events=total_events
|
||||
)
|
||||
|
||||
return AuditLogStatsResponse(
|
||||
total_events=total_events,
|
||||
events_by_action=events_by_action,
|
||||
events_by_severity=events_by_severity,
|
||||
events_by_resource_type=events_by_resource_type,
|
||||
date_range={
|
||||
"min": date_row.min_date,
|
||||
"max": date_row.max_date
|
||||
}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to retrieve audit log statistics",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to retrieve audit log statistics: {str(e)}"
|
||||
)
|
||||
160
services/sales/app/api/batch.py
Normal file
160
services/sales/app/api/batch.py
Normal file
@@ -0,0 +1,160 @@
|
||||
# services/sales/app/api/batch.py
|
||||
"""
|
||||
Sales Batch API - Batch operations for enterprise dashboards
|
||||
|
||||
Phase 2 optimization: Eliminate N+1 query patterns by fetching data for
|
||||
multiple tenants in a single request.
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Body, Path
|
||||
from typing import List, Dict, Any
|
||||
from datetime import date
|
||||
from uuid import UUID
|
||||
from pydantic import BaseModel, Field
|
||||
import structlog
|
||||
import asyncio
|
||||
|
||||
from app.services.sales_service import SalesService
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from shared.routing import RouteBuilder
|
||||
from shared.auth.access_control import require_user_role
|
||||
|
||||
route_builder = RouteBuilder('sales')
|
||||
router = APIRouter(tags=["sales-batch"])
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
def get_sales_service():
|
||||
"""Dependency injection for SalesService"""
|
||||
return SalesService()
|
||||
|
||||
|
||||
class SalesSummaryBatchRequest(BaseModel):
|
||||
"""Request model for batch sales summary"""
|
||||
tenant_ids: List[str] = Field(..., description="List of tenant IDs", max_length=100)
|
||||
start_date: date = Field(..., description="Start date for sales period")
|
||||
end_date: date = Field(..., description="End date for sales period")
|
||||
|
||||
|
||||
class SalesSummary(BaseModel):
|
||||
"""Sales summary for a single tenant"""
|
||||
tenant_id: str
|
||||
total_revenue: float
|
||||
total_orders: int
|
||||
average_order_value: float
|
||||
period_start: str
|
||||
period_end: str
|
||||
|
||||
|
||||
@router.post("/api/v1/batch/sales-summary", response_model=Dict[str, SalesSummary])
|
||||
async def get_sales_summary_batch(
|
||||
request: SalesSummaryBatchRequest = Body(...),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
sales_service: SalesService = Depends(get_sales_service)
|
||||
):
|
||||
"""
|
||||
Get sales summary for multiple tenants in a single request.
|
||||
|
||||
Optimized for enterprise dashboards to eliminate N+1 query patterns.
|
||||
Fetches sales data for all tenants in parallel.
|
||||
|
||||
Args:
|
||||
request: Batch request with tenant IDs and date range
|
||||
|
||||
Returns:
|
||||
Dictionary mapping tenant_id -> sales summary
|
||||
|
||||
Example:
|
||||
POST /api/v1/sales/batch/sales-summary
|
||||
{
|
||||
"tenant_ids": ["tenant-1", "tenant-2", "tenant-3"],
|
||||
"start_date": "2025-01-01",
|
||||
"end_date": "2025-01-31"
|
||||
}
|
||||
|
||||
Response:
|
||||
{
|
||||
"tenant-1": {"tenant_id": "tenant-1", "total_revenue": 50000, ...},
|
||||
"tenant-2": {"tenant_id": "tenant-2", "total_revenue": 45000", ...},
|
||||
"tenant-3": {"tenant_id": "tenant-3", "total_revenue": 52000, ...}
|
||||
}
|
||||
"""
|
||||
try:
|
||||
if len(request.tenant_ids) > 100:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="Maximum 100 tenant IDs allowed per batch request"
|
||||
)
|
||||
|
||||
if not request.tenant_ids:
|
||||
return {}
|
||||
|
||||
logger.info(
|
||||
"Batch fetching sales summaries",
|
||||
tenant_count=len(request.tenant_ids),
|
||||
start_date=str(request.start_date),
|
||||
end_date=str(request.end_date)
|
||||
)
|
||||
|
||||
async def fetch_tenant_sales(tenant_id: str) -> tuple[str, SalesSummary]:
|
||||
"""Fetch sales summary for a single tenant"""
|
||||
try:
|
||||
tenant_uuid = UUID(tenant_id)
|
||||
summary = await sales_service.get_sales_analytics(
|
||||
tenant_uuid,
|
||||
request.start_date,
|
||||
request.end_date
|
||||
)
|
||||
|
||||
return tenant_id, SalesSummary(
|
||||
tenant_id=tenant_id,
|
||||
total_revenue=float(summary.get('total_revenue', 0)),
|
||||
total_orders=int(summary.get('total_orders', 0)),
|
||||
average_order_value=float(summary.get('average_order_value', 0)),
|
||||
period_start=str(request.start_date),
|
||||
period_end=str(request.end_date)
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
"Failed to fetch sales for tenant in batch",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e)
|
||||
)
|
||||
return tenant_id, SalesSummary(
|
||||
tenant_id=tenant_id,
|
||||
total_revenue=0.0,
|
||||
total_orders=0,
|
||||
average_order_value=0.0,
|
||||
period_start=str(request.start_date),
|
||||
period_end=str(request.end_date)
|
||||
)
|
||||
|
||||
# Fetch all tenant sales in parallel
|
||||
tasks = [fetch_tenant_sales(tid) for tid in request.tenant_ids]
|
||||
results = await asyncio.gather(*tasks, return_exceptions=True)
|
||||
|
||||
# Build result dictionary
|
||||
result_dict = {}
|
||||
for result in results:
|
||||
if isinstance(result, Exception):
|
||||
logger.error("Exception in batch sales fetch", error=str(result))
|
||||
continue
|
||||
tenant_id, summary = result
|
||||
result_dict[tenant_id] = summary
|
||||
|
||||
logger.info(
|
||||
"Batch sales summaries retrieved",
|
||||
requested_count=len(request.tenant_ids),
|
||||
successful_count=len(result_dict)
|
||||
)
|
||||
|
||||
return result_dict
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Error in batch sales summary", error=str(e), exc_info=True)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to fetch batch sales summaries: {str(e)}"
|
||||
)
|
||||
314
services/sales/app/api/internal_demo.py
Normal file
314
services/sales/app/api/internal_demo.py
Normal file
@@ -0,0 +1,314 @@
|
||||
"""
|
||||
Internal Demo Cloning API for Sales Service
|
||||
Service-to-service endpoint for cloning sales data
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Header
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, delete, func
|
||||
import structlog
|
||||
import uuid
|
||||
from datetime import datetime, timezone, timedelta
|
||||
from typing import Optional
|
||||
import os
|
||||
from decimal import Decimal
|
||||
import sys
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent.parent.parent))
|
||||
from shared.utils.demo_dates import adjust_date_for_demo, resolve_time_marker
|
||||
|
||||
from app.core.database import get_db
|
||||
from app.models.sales import SalesData
|
||||
|
||||
from app.core.config import settings
|
||||
|
||||
logger = structlog.get_logger()
|
||||
router = APIRouter(prefix="/internal/demo", tags=["internal"])
|
||||
|
||||
# Base demo tenant IDs
|
||||
DEMO_TENANT_PROFESSIONAL = "a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6"
|
||||
|
||||
|
||||
def parse_date_field(
|
||||
field_value: any,
|
||||
session_time: datetime,
|
||||
field_name: str = "date"
|
||||
) -> Optional[datetime]:
|
||||
"""
|
||||
Parse a date field from JSON, supporting BASE_TS markers and ISO timestamps.
|
||||
|
||||
Args:
|
||||
field_value: The date field value (can be BASE_TS marker, ISO string, or None)
|
||||
session_time: Session creation time (timezone-aware UTC)
|
||||
field_name: Name of the field (for logging)
|
||||
|
||||
Returns:
|
||||
Timezone-aware UTC datetime or None
|
||||
"""
|
||||
if field_value is None:
|
||||
return None
|
||||
|
||||
# Handle BASE_TS markers
|
||||
if isinstance(field_value, str) and field_value.startswith("BASE_TS"):
|
||||
try:
|
||||
return resolve_time_marker(field_value, session_time)
|
||||
except (ValueError, AttributeError) as e:
|
||||
logger.warning(
|
||||
"Failed to resolve BASE_TS marker",
|
||||
field_name=field_name,
|
||||
marker=field_value,
|
||||
error=str(e)
|
||||
)
|
||||
return None
|
||||
|
||||
# Handle ISO timestamps (legacy format - convert to absolute datetime)
|
||||
if isinstance(field_value, str) and ('T' in field_value or 'Z' in field_value):
|
||||
try:
|
||||
parsed_date = datetime.fromisoformat(field_value.replace('Z', '+00:00'))
|
||||
# Adjust relative to session time
|
||||
return adjust_date_for_demo(parsed_date, session_time)
|
||||
except (ValueError, AttributeError) as e:
|
||||
logger.warning(
|
||||
"Failed to parse ISO timestamp",
|
||||
field_name=field_name,
|
||||
value=field_value,
|
||||
error=str(e)
|
||||
)
|
||||
return None
|
||||
|
||||
logger.warning(
|
||||
"Unknown date format",
|
||||
field_name=field_name,
|
||||
value=field_value,
|
||||
value_type=type(field_value).__name__
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
@router.post("/clone")
|
||||
async def clone_demo_data(
|
||||
base_tenant_id: str,
|
||||
virtual_tenant_id: str,
|
||||
demo_account_type: str,
|
||||
session_id: Optional[str] = None,
|
||||
session_created_at: Optional[str] = None,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Clone sales service data for a virtual demo tenant
|
||||
|
||||
Clones:
|
||||
- Sales history records from template tenant
|
||||
- Adjusts dates to recent timeframe
|
||||
- Updates product references to new virtual tenant
|
||||
|
||||
Args:
|
||||
base_tenant_id: Template tenant UUID to clone from
|
||||
virtual_tenant_id: Target virtual tenant UUID
|
||||
demo_account_type: Type of demo account
|
||||
session_id: Originating session ID for tracing
|
||||
session_created_at: ISO timestamp when demo session was created (for date adjustment)
|
||||
|
||||
Returns:
|
||||
Cloning status and record counts
|
||||
"""
|
||||
start_time = datetime.now(timezone.utc)
|
||||
|
||||
# Parse session_created_at or fallback to now
|
||||
if session_created_at:
|
||||
try:
|
||||
session_time = datetime.fromisoformat(session_created_at.replace('Z', '+00:00'))
|
||||
except (ValueError, AttributeError) as e:
|
||||
logger.warning(
|
||||
"Invalid session_created_at format, using current time",
|
||||
session_created_at=session_created_at,
|
||||
error=str(e)
|
||||
)
|
||||
session_time = datetime.now(timezone.utc)
|
||||
else:
|
||||
logger.warning("session_created_at not provided, using current time")
|
||||
session_time = datetime.now(timezone.utc)
|
||||
|
||||
logger.info(
|
||||
"Starting sales data cloning",
|
||||
base_tenant_id=base_tenant_id,
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
demo_account_type=demo_account_type,
|
||||
session_id=session_id,
|
||||
session_time=session_time.isoformat()
|
||||
)
|
||||
|
||||
try:
|
||||
# Validate UUIDs
|
||||
base_uuid = uuid.UUID(base_tenant_id)
|
||||
virtual_uuid = uuid.UUID(virtual_tenant_id)
|
||||
|
||||
# Track cloning statistics
|
||||
stats = {
|
||||
"sales_records": 0,
|
||||
}
|
||||
|
||||
# Load seed data from JSON files
|
||||
from shared.utils.seed_data_paths import get_seed_data_path
|
||||
|
||||
if demo_account_type == "professional":
|
||||
json_file = get_seed_data_path("professional", "09-sales.json")
|
||||
elif demo_account_type == "enterprise":
|
||||
json_file = get_seed_data_path("enterprise", "09-sales.json")
|
||||
elif demo_account_type == "enterprise_child":
|
||||
json_file = get_seed_data_path("enterprise", "09-sales.json", child_id=base_tenant_id)
|
||||
else:
|
||||
raise ValueError(f"Invalid demo account type: {demo_account_type}")
|
||||
|
||||
# Load JSON data
|
||||
with open(json_file, 'r', encoding='utf-8') as f:
|
||||
seed_data = json.load(f)
|
||||
|
||||
logger.info(
|
||||
"Loaded sales seed data",
|
||||
sales_records=len(seed_data.get('sales_data', []))
|
||||
)
|
||||
|
||||
# Load Sales Data from seed data
|
||||
for sale_data in seed_data.get('sales_data', []):
|
||||
# Parse date field (supports BASE_TS markers and ISO timestamps)
|
||||
# Different demo types may use different field names for the date
|
||||
# Prioritize in order: date, sale_date, sales_date
|
||||
date_value = (sale_data.get('date') or
|
||||
sale_data.get('sale_date') or
|
||||
sale_data.get('sales_date'))
|
||||
|
||||
adjusted_date = parse_date_field(
|
||||
date_value,
|
||||
session_time,
|
||||
"date"
|
||||
)
|
||||
|
||||
# Ensure date is not None for NOT NULL constraint by using session_time as fallback
|
||||
if adjusted_date is None:
|
||||
adjusted_date = session_time
|
||||
|
||||
# Create new sales record with adjusted date
|
||||
# Map different possible JSON field names to the correct model field names
|
||||
new_sale = SalesData(
|
||||
id=uuid.uuid4(),
|
||||
tenant_id=virtual_uuid,
|
||||
date=adjusted_date,
|
||||
inventory_product_id=sale_data.get('inventory_product_id') or sale_data.get('product_id'), # inventory_product_id is the model field
|
||||
quantity_sold=sale_data.get('quantity_sold') or sale_data.get('quantity', 0.0), # quantity_sold is the model field
|
||||
unit_price=sale_data.get('unit_price', 0.0), # unit_price is the model field
|
||||
revenue=sale_data.get('revenue') or sale_data.get('total_revenue') or sale_data.get('total_amount', 0.0), # revenue is the model field
|
||||
cost_of_goods=sale_data.get('cost_of_goods', 0.0), # cost_of_goods is the model field
|
||||
discount_applied=sale_data.get('discount_applied', 0.0), # discount_applied is the model field
|
||||
location_id=sale_data.get('location_id'),
|
||||
sales_channel=sale_data.get('sales_channel', 'IN_STORE'), # sales_channel is the model field
|
||||
source="demo_clone", # Mark as seeded
|
||||
is_validated=sale_data.get('is_validated', True),
|
||||
validation_notes=sale_data.get('validation_notes'),
|
||||
notes=sale_data.get('notes'),
|
||||
weather_condition=sale_data.get('weather_condition'),
|
||||
is_holiday=sale_data.get('is_holiday', False),
|
||||
is_weekend=sale_data.get('is_weekend', False),
|
||||
created_at=session_time,
|
||||
updated_at=session_time
|
||||
)
|
||||
db.add(new_sale)
|
||||
stats["sales_records"] += 1
|
||||
|
||||
# Commit all changes
|
||||
await db.commit()
|
||||
|
||||
total_records = sum(stats.values())
|
||||
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
||||
|
||||
logger.info(
|
||||
"Sales data cloning completed",
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
total_records=total_records,
|
||||
stats=stats,
|
||||
duration_ms=duration_ms
|
||||
)
|
||||
|
||||
return {
|
||||
"service": "sales",
|
||||
"status": "completed",
|
||||
"records_cloned": total_records,
|
||||
"duration_ms": duration_ms,
|
||||
"details": stats
|
||||
}
|
||||
|
||||
except ValueError as e:
|
||||
logger.error("Invalid UUID format", error=str(e))
|
||||
raise HTTPException(status_code=400, detail=f"Invalid UUID: {str(e)}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to clone sales data",
|
||||
error=str(e),
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
# Rollback on error
|
||||
await db.rollback()
|
||||
|
||||
return {
|
||||
"service": "sales",
|
||||
"status": "failed",
|
||||
"records_cloned": 0,
|
||||
"duration_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000),
|
||||
"error": str(e)
|
||||
}
|
||||
|
||||
|
||||
@router.get("/clone/health")
|
||||
async def clone_health_check():
|
||||
"""
|
||||
Health check for internal cloning endpoint
|
||||
Used by orchestrator to verify service availability
|
||||
"""
|
||||
return {
|
||||
"service": "sales",
|
||||
"clone_endpoint": "available",
|
||||
"version": "2.0.0"
|
||||
}
|
||||
|
||||
|
||||
@router.delete("/tenant/{virtual_tenant_id}")
|
||||
async def delete_demo_data(
|
||||
virtual_tenant_id: str,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Delete all sales data for a virtual demo tenant"""
|
||||
logger.info("Deleting sales data for virtual tenant", virtual_tenant_id=virtual_tenant_id)
|
||||
start_time = datetime.now(timezone.utc)
|
||||
|
||||
try:
|
||||
virtual_uuid = uuid.UUID(virtual_tenant_id)
|
||||
|
||||
# Count records
|
||||
sales_count = await db.scalar(select(func.count(SalesData.id)).where(SalesData.tenant_id == virtual_uuid))
|
||||
|
||||
# Delete sales data
|
||||
await db.execute(delete(SalesData).where(SalesData.tenant_id == virtual_uuid))
|
||||
await db.commit()
|
||||
|
||||
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
||||
logger.info("Sales data deleted successfully", virtual_tenant_id=virtual_tenant_id, duration_ms=duration_ms)
|
||||
|
||||
return {
|
||||
"service": "sales",
|
||||
"status": "deleted",
|
||||
"virtual_tenant_id": virtual_tenant_id,
|
||||
"records_deleted": {
|
||||
"sales": sales_count,
|
||||
"total": sales_count
|
||||
},
|
||||
"duration_ms": duration_ms
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error("Failed to delete sales data", error=str(e), exc_info=True)
|
||||
await db.rollback()
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
520
services/sales/app/api/sales_operations.py
Normal file
520
services/sales/app/api/sales_operations.py
Normal file
@@ -0,0 +1,520 @@
|
||||
# services/sales/app/api/sales_operations.py
|
||||
"""
|
||||
Sales Operations API - Business operations and complex workflows
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, Path, UploadFile, File, Form
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from typing import List, Optional, Dict, Any
|
||||
from uuid import UUID
|
||||
from datetime import datetime
|
||||
import structlog
|
||||
import json
|
||||
|
||||
from app.schemas.sales import SalesDataResponse
|
||||
from app.services.sales_service import SalesService
|
||||
from app.services.data_import_service import DataImportService
|
||||
from app.core.database import get_db
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from shared.auth.access_control import require_user_role
|
||||
from shared.routing import RouteBuilder
|
||||
|
||||
route_builder = RouteBuilder('sales')
|
||||
router = APIRouter(tags=["sales-operations"])
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
def get_sales_service():
|
||||
"""Dependency injection for SalesService"""
|
||||
return SalesService()
|
||||
|
||||
|
||||
def get_import_service():
|
||||
"""Dependency injection for DataImportService"""
|
||||
return DataImportService()
|
||||
|
||||
|
||||
@router.post(
|
||||
route_builder.build_operations_route("validate-record"),
|
||||
response_model=SalesDataResponse
|
||||
)
|
||||
async def validate_sales_record(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
record_id: UUID = Path(..., description="Sales record ID"),
|
||||
validation_notes: Optional[str] = Query(None, description="Validation notes"),
|
||||
sales_service: SalesService = Depends(get_sales_service)
|
||||
):
|
||||
"""Mark a sales record as validated"""
|
||||
try:
|
||||
validated_record = await sales_service.validate_sales_record(record_id, tenant_id, validation_notes)
|
||||
|
||||
logger.info("Validated sales record", record_id=record_id, tenant_id=tenant_id)
|
||||
return validated_record
|
||||
|
||||
except ValueError as ve:
|
||||
logger.warning("Error validating sales record", error=str(ve), record_id=record_id)
|
||||
raise HTTPException(status_code=400, detail=str(ve))
|
||||
except Exception as e:
|
||||
logger.error("Failed to validate sales record", error=str(e), record_id=record_id, tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to validate sales record: {str(e)}")
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_nested_resource_route("inventory-products", "inventory_product_id", "sales"),
|
||||
response_model=List[SalesDataResponse]
|
||||
)
|
||||
async def get_product_sales(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
inventory_product_id: UUID = Path(..., description="Inventory product ID"),
|
||||
start_date: Optional[datetime] = Query(None, description="Start date filter"),
|
||||
end_date: Optional[datetime] = Query(None, description="End date filter"),
|
||||
sales_service: SalesService = Depends(get_sales_service)
|
||||
):
|
||||
"""Get sales records for a specific product (cross-service query)"""
|
||||
try:
|
||||
records = await sales_service.get_product_sales(tenant_id, inventory_product_id, start_date, end_date)
|
||||
|
||||
logger.info("Retrieved product sales", count=len(records), inventory_product_id=inventory_product_id, tenant_id=tenant_id)
|
||||
return records
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get product sales", error=str(e), tenant_id=tenant_id, inventory_product_id=inventory_product_id)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get product sales: {str(e)}")
|
||||
|
||||
|
||||
@router.post(
|
||||
route_builder.build_operations_route("import/validate-json")
|
||||
)
|
||||
async def validate_json_data(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
data: Dict[str, Any] = None,
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
import_service: DataImportService = Depends(get_import_service)
|
||||
):
|
||||
"""Validate JSON sales data"""
|
||||
try:
|
||||
if not data:
|
||||
raise HTTPException(status_code=400, detail="No data provided")
|
||||
|
||||
logger.info("Validating JSON data", tenant_id=tenant_id, record_count=len(data.get("records", [])))
|
||||
|
||||
if "records" in data:
|
||||
validation_data = {
|
||||
"tenant_id": str(tenant_id),
|
||||
"data": json.dumps(data.get("records", [])),
|
||||
"data_format": "json"
|
||||
}
|
||||
else:
|
||||
validation_data = data.copy()
|
||||
validation_data["tenant_id"] = str(tenant_id)
|
||||
if "data_format" not in validation_data:
|
||||
validation_data["data_format"] = "json"
|
||||
|
||||
validation_result = await import_service.validate_import_data(validation_data)
|
||||
|
||||
logger.info("JSON validation completed", tenant_id=tenant_id, valid=validation_result.is_valid)
|
||||
|
||||
return {
|
||||
"is_valid": validation_result.is_valid,
|
||||
"total_records": validation_result.total_records,
|
||||
"valid_records": validation_result.valid_records,
|
||||
"invalid_records": validation_result.invalid_records,
|
||||
"errors": validation_result.errors,
|
||||
"warnings": validation_result.warnings,
|
||||
"summary": validation_result.summary
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to validate JSON data", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to validate data: {str(e)}")
|
||||
|
||||
|
||||
@router.post(
|
||||
route_builder.build_operations_route("import/validate")
|
||||
)
|
||||
async def validate_sales_data_universal(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
file: Optional[UploadFile] = File(None),
|
||||
data: Optional[Dict[str, Any]] = None,
|
||||
file_format: Optional[str] = Form(None),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
import_service: DataImportService = Depends(get_import_service)
|
||||
):
|
||||
"""Universal validation endpoint for sales data - supports files and JSON"""
|
||||
try:
|
||||
logger.info("=== VALIDATION ENDPOINT CALLED ===",
|
||||
tenant_id=tenant_id,
|
||||
file_present=file is not None,
|
||||
file_filename=file.filename if file else None,
|
||||
data_present=data is not None,
|
||||
file_format=file_format)
|
||||
|
||||
if file and file.filename:
|
||||
logger.info("Processing file upload branch", tenant_id=tenant_id, filename=file.filename)
|
||||
|
||||
filename = file.filename.lower()
|
||||
if filename.endswith('.csv'):
|
||||
detected_format = 'csv'
|
||||
elif filename.endswith('.xlsx') or filename.endswith('.xls'):
|
||||
detected_format = 'excel'
|
||||
elif filename.endswith('.json'):
|
||||
detected_format = 'json'
|
||||
else:
|
||||
detected_format = file_format or 'csv'
|
||||
|
||||
content = await file.read()
|
||||
|
||||
if detected_format in ['xlsx', 'xls', 'excel']:
|
||||
import base64
|
||||
file_content = base64.b64encode(content).decode('utf-8')
|
||||
else:
|
||||
file_content = content.decode('utf-8')
|
||||
|
||||
validation_data = {
|
||||
"tenant_id": str(tenant_id),
|
||||
"data": file_content,
|
||||
"data_format": detected_format,
|
||||
"filename": file.filename
|
||||
}
|
||||
|
||||
elif data:
|
||||
logger.info("Processing JSON data branch", tenant_id=tenant_id, data_keys=list(data.keys()) if data else [])
|
||||
|
||||
validation_data = data.copy()
|
||||
validation_data["tenant_id"] = str(tenant_id)
|
||||
if "data_format" not in validation_data:
|
||||
validation_data["data_format"] = "json"
|
||||
|
||||
else:
|
||||
logger.error("No file or data provided", tenant_id=tenant_id, file_present=file is not None, data_present=data is not None)
|
||||
raise HTTPException(status_code=400, detail="No file or data provided for validation")
|
||||
|
||||
logger.info("About to call validate_import_data", validation_data_keys=list(validation_data.keys()), data_size=len(validation_data.get("data", "")))
|
||||
validation_result = await import_service.validate_import_data(validation_data)
|
||||
logger.info("Validation completed", is_valid=validation_result.is_valid, errors_count=len(validation_result.errors))
|
||||
|
||||
logger.info("Validation completed",
|
||||
tenant_id=tenant_id,
|
||||
valid=validation_result.is_valid,
|
||||
total_records=validation_result.total_records)
|
||||
|
||||
return {
|
||||
"is_valid": validation_result.is_valid,
|
||||
"total_records": validation_result.total_records,
|
||||
"valid_records": validation_result.valid_records,
|
||||
"invalid_records": validation_result.invalid_records,
|
||||
"errors": validation_result.errors,
|
||||
"warnings": validation_result.warnings,
|
||||
"summary": validation_result.summary,
|
||||
"unique_products": validation_result.unique_products,
|
||||
"product_list": validation_result.product_list,
|
||||
"message": "Validation completed successfully" if validation_result.is_valid else "Validation found errors",
|
||||
"details": {
|
||||
"total_records": validation_result.total_records,
|
||||
"format": validation_data.get("data_format", "unknown")
|
||||
}
|
||||
}
|
||||
|
||||
except HTTPException:
|
||||
# Re-raise HTTP exceptions as-is (don't convert to 500)
|
||||
raise
|
||||
except Exception as e:
|
||||
error_msg = str(e) if e else "Unknown error occurred during validation"
|
||||
logger.error("Failed to validate sales data", error=error_msg, tenant_id=tenant_id, exc_info=True)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to validate data: {error_msg}")
|
||||
|
||||
|
||||
@router.post(
|
||||
route_builder.build_operations_route("import/validate-csv")
|
||||
)
|
||||
async def validate_csv_data_legacy(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
file: UploadFile = File(...),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
import_service: DataImportService = Depends(get_import_service)
|
||||
):
|
||||
"""Legacy CSV validation endpoint - redirects to universal validator"""
|
||||
return await validate_sales_data_universal(
|
||||
tenant_id=tenant_id,
|
||||
file=file,
|
||||
current_user=current_user,
|
||||
import_service=import_service
|
||||
)
|
||||
|
||||
|
||||
@router.post(
|
||||
route_builder.build_operations_route("import")
|
||||
)
|
||||
async def import_sales_data(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
data: Optional[Dict[str, Any]] = None,
|
||||
file: Optional[UploadFile] = File(None),
|
||||
file_format: Optional[str] = Form(None),
|
||||
update_existing: bool = Form(False, description="Whether to update existing records"),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
import_service: DataImportService = Depends(get_import_service)
|
||||
):
|
||||
"""Enhanced import sales data - supports multiple file formats and JSON"""
|
||||
try:
|
||||
if file:
|
||||
if not file.filename:
|
||||
raise HTTPException(status_code=400, detail="No file provided")
|
||||
|
||||
logger.info("Starting enhanced file import", tenant_id=tenant_id, filename=file.filename)
|
||||
|
||||
filename = file.filename.lower()
|
||||
if filename.endswith('.csv'):
|
||||
detected_format = 'csv'
|
||||
elif filename.endswith('.xlsx') or filename.endswith('.xls'):
|
||||
detected_format = 'excel'
|
||||
elif filename.endswith('.json'):
|
||||
detected_format = 'json'
|
||||
else:
|
||||
detected_format = file_format or 'csv'
|
||||
|
||||
content = await file.read()
|
||||
|
||||
if detected_format in ['xlsx', 'xls', 'excel']:
|
||||
import base64
|
||||
file_content = base64.b64encode(content).decode('utf-8')
|
||||
else:
|
||||
file_content = content.decode('utf-8')
|
||||
|
||||
import_result = await import_service.process_import(
|
||||
str(tenant_id),
|
||||
file_content,
|
||||
detected_format,
|
||||
filename=file.filename
|
||||
)
|
||||
|
||||
elif data:
|
||||
logger.info("Starting enhanced JSON data import", tenant_id=tenant_id, record_count=len(data.get("records", [])))
|
||||
|
||||
if "records" in data:
|
||||
records_json = json.dumps(data.get("records", []))
|
||||
import_result = await import_service.process_import(
|
||||
str(tenant_id),
|
||||
records_json,
|
||||
"json"
|
||||
)
|
||||
else:
|
||||
import_result = await import_service.process_import(
|
||||
str(tenant_id),
|
||||
data.get("data", ""),
|
||||
data.get("data_format", "json")
|
||||
)
|
||||
else:
|
||||
raise HTTPException(status_code=400, detail="No data or file provided")
|
||||
|
||||
logger.info("Enhanced import completed",
|
||||
tenant_id=tenant_id,
|
||||
created=import_result.records_created,
|
||||
updated=import_result.records_updated,
|
||||
failed=import_result.records_failed,
|
||||
processing_time=import_result.processing_time_seconds)
|
||||
|
||||
response = {
|
||||
"success": import_result.success,
|
||||
"records_processed": import_result.records_processed,
|
||||
"records_created": import_result.records_created,
|
||||
"records_updated": import_result.records_updated,
|
||||
"records_failed": import_result.records_failed,
|
||||
"errors": import_result.errors,
|
||||
"warnings": import_result.warnings,
|
||||
"processing_time_seconds": import_result.processing_time_seconds,
|
||||
"records_imported": import_result.records_created,
|
||||
"message": f"Successfully imported {import_result.records_created} records" if import_result.success else "Import completed with errors"
|
||||
}
|
||||
|
||||
if file:
|
||||
response["file_info"] = {
|
||||
"name": file.filename,
|
||||
"format": detected_format,
|
||||
"size_bytes": len(content) if 'content' in locals() else 0
|
||||
}
|
||||
|
||||
return response
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to import sales data", error=str(e), tenant_id=tenant_id, exc_info=True)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to import data: {str(e)}")
|
||||
|
||||
|
||||
@router.post(
|
||||
route_builder.build_operations_route("import/csv")
|
||||
)
|
||||
async def import_csv_data(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
file: UploadFile = File(...),
|
||||
update_existing: bool = Form(False, description="Whether to update existing records"),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
import_service: DataImportService = Depends(get_import_service)
|
||||
):
|
||||
"""Import CSV sales data file"""
|
||||
try:
|
||||
if not file.filename.endswith('.csv'):
|
||||
raise HTTPException(status_code=400, detail="File must be a CSV file")
|
||||
|
||||
logger.info("Starting CSV data import", tenant_id=tenant_id, filename=file.filename)
|
||||
|
||||
content = await file.read()
|
||||
file_content = content.decode('utf-8')
|
||||
|
||||
import_result = await import_service.process_import(
|
||||
tenant_id,
|
||||
file_content,
|
||||
"csv",
|
||||
filename=file.filename
|
||||
)
|
||||
|
||||
logger.info("CSV import completed",
|
||||
tenant_id=tenant_id,
|
||||
filename=file.filename,
|
||||
created=import_result.records_created,
|
||||
updated=import_result.records_updated,
|
||||
failed=import_result.records_failed)
|
||||
|
||||
return {
|
||||
"success": import_result.success,
|
||||
"records_processed": import_result.records_processed,
|
||||
"records_created": import_result.records_created,
|
||||
"records_updated": import_result.records_updated,
|
||||
"records_failed": import_result.records_failed,
|
||||
"errors": import_result.errors,
|
||||
"warnings": import_result.warnings,
|
||||
"processing_time_seconds": import_result.processing_time_seconds
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to import CSV data", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to import CSV data: {str(e)}")
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_operations_route("import/template")
|
||||
)
|
||||
async def get_import_template(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
format: str = "csv"
|
||||
):
|
||||
"""Get sales data import template"""
|
||||
try:
|
||||
if format not in ["csv", "json"]:
|
||||
raise HTTPException(status_code=400, detail="Format must be 'csv' or 'json'")
|
||||
|
||||
if format == "csv":
|
||||
template = "date,product_name,product_category,product_sku,quantity_sold,unit_price,revenue,cost_of_goods,discount_applied,location_id,sales_channel,source,notes,weather_condition,is_holiday,is_weekend"
|
||||
else:
|
||||
template = {
|
||||
"records": [
|
||||
{
|
||||
"date": "2024-01-01T10:00:00Z",
|
||||
"product_name": "Sample Product",
|
||||
"product_category": "Sample Category",
|
||||
"product_sku": "SAMPLE001",
|
||||
"quantity_sold": 1,
|
||||
"unit_price": 10.50,
|
||||
"revenue": 10.50,
|
||||
"cost_of_goods": 5.25,
|
||||
"discount_applied": 0.0,
|
||||
"location_id": "LOC001",
|
||||
"sales_channel": "in_store",
|
||||
"source": "manual",
|
||||
"notes": "Sample sales record",
|
||||
"weather_condition": "sunny",
|
||||
"is_holiday": False,
|
||||
"is_weekend": False
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
return {"template": template, "format": format}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get import template", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get import template: {str(e)}")
|
||||
|
||||
# ============================================================================
|
||||
# Tenant Data Deletion Operations (Internal Service Only)
|
||||
# ============================================================================
|
||||
|
||||
from shared.auth.access_control import service_only_access
|
||||
from shared.services.tenant_deletion import TenantDataDeletionResult
|
||||
from app.services.tenant_deletion_service import SalesTenantDeletionService
|
||||
|
||||
|
||||
@router.delete(
|
||||
route_builder.build_base_route("tenant/{tenant_id}", include_tenant_prefix=False),
|
||||
response_model=dict
|
||||
)
|
||||
@service_only_access
|
||||
async def delete_tenant_data(
|
||||
tenant_id: str = Path(..., description="Tenant ID to delete data for"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Delete all sales data for a tenant (Internal service only)
|
||||
"""
|
||||
try:
|
||||
logger.info("sales.tenant_deletion.api_called", tenant_id=tenant_id)
|
||||
|
||||
deletion_service = SalesTenantDeletionService(db)
|
||||
result = await deletion_service.safe_delete_tenant_data(tenant_id)
|
||||
|
||||
if not result.success:
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Tenant data deletion failed: {', '.join(result.errors)}"
|
||||
)
|
||||
|
||||
return {
|
||||
"message": "Tenant data deletion completed successfully",
|
||||
"summary": result.to_dict()
|
||||
}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("sales.tenant_deletion.api_error", tenant_id=tenant_id, error=str(e), exc_info=True)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to delete tenant data: {str(e)}")
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_base_route("tenant/{tenant_id}/deletion-preview", include_tenant_prefix=False),
|
||||
response_model=dict
|
||||
)
|
||||
@service_only_access
|
||||
async def preview_tenant_data_deletion(
|
||||
tenant_id: str = Path(..., description="Tenant ID to preview deletion for"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Preview what data would be deleted for a tenant (dry-run)
|
||||
"""
|
||||
try:
|
||||
logger.info("sales.tenant_deletion.preview_called", tenant_id=tenant_id)
|
||||
|
||||
deletion_service = SalesTenantDeletionService(db)
|
||||
preview_data = await deletion_service.get_tenant_data_preview(tenant_id)
|
||||
result = TenantDataDeletionResult(tenant_id=tenant_id, service_name=deletion_service.service_name)
|
||||
result.deleted_counts = preview_data
|
||||
result.success = True
|
||||
|
||||
if not result.success:
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Tenant deletion preview failed: {', '.join(result.errors)}"
|
||||
)
|
||||
|
||||
return {
|
||||
"tenant_id": tenant_id,
|
||||
"service": "sales-service",
|
||||
"data_counts": result.deleted_counts,
|
||||
"total_items": sum(result.deleted_counts.values())
|
||||
}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("sales.tenant_deletion.preview_error", tenant_id=tenant_id, error=str(e), exc_info=True)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to preview tenant data deletion: {str(e)}")
|
||||
244
services/sales/app/api/sales_records.py
Normal file
244
services/sales/app/api/sales_records.py
Normal file
@@ -0,0 +1,244 @@
|
||||
# services/sales/app/api/sales_records.py
|
||||
"""
|
||||
Sales Records API - Atomic CRUD operations on SalesData model
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, Path, status
|
||||
from typing import List, Optional, Dict, Any
|
||||
from uuid import UUID
|
||||
from datetime import datetime
|
||||
import structlog
|
||||
|
||||
from app.schemas.sales import (
|
||||
SalesDataCreate,
|
||||
SalesDataUpdate,
|
||||
SalesDataResponse,
|
||||
SalesDataQuery
|
||||
)
|
||||
from app.services.sales_service import SalesService
|
||||
from app.models import AuditLog
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from shared.auth.access_control import require_user_role
|
||||
from shared.routing import RouteBuilder
|
||||
from shared.security import create_audit_logger, AuditSeverity, AuditAction
|
||||
|
||||
route_builder = RouteBuilder('sales')
|
||||
router = APIRouter(tags=["sales-records"])
|
||||
logger = structlog.get_logger()
|
||||
|
||||
# Initialize audit logger
|
||||
audit_logger = create_audit_logger("sales-service", AuditLog)
|
||||
|
||||
|
||||
def get_sales_service():
|
||||
"""Dependency injection for SalesService"""
|
||||
return SalesService()
|
||||
|
||||
|
||||
@router.post(
|
||||
route_builder.build_base_route("sales"),
|
||||
response_model=SalesDataResponse,
|
||||
status_code=status.HTTP_201_CREATED
|
||||
)
|
||||
@require_user_role(['admin', 'owner', 'member'])
|
||||
async def create_sales_record(
|
||||
sales_data: SalesDataCreate,
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
sales_service: SalesService = Depends(get_sales_service)
|
||||
):
|
||||
"""Create a new sales record"""
|
||||
try:
|
||||
logger.info(
|
||||
"Creating sales record",
|
||||
product=sales_data.product_name,
|
||||
quantity=sales_data.quantity_sold,
|
||||
tenant_id=tenant_id,
|
||||
user_id=current_user.get("user_id")
|
||||
)
|
||||
|
||||
record = await sales_service.create_sales_record(
|
||||
sales_data,
|
||||
tenant_id,
|
||||
user_id=UUID(current_user["user_id"]) if current_user.get("user_id") else None
|
||||
)
|
||||
|
||||
logger.info("Successfully created sales record", record_id=record.id, tenant_id=tenant_id)
|
||||
return record
|
||||
|
||||
except ValueError as ve:
|
||||
logger.warning("Validation error creating sales record", error=str(ve), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=400, detail=str(ve))
|
||||
except Exception as e:
|
||||
logger.error("Failed to create sales record", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to create sales record: {str(e)}")
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_base_route("sales"),
|
||||
response_model=List[SalesDataResponse]
|
||||
)
|
||||
async def get_sales_records(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
start_date: Optional[datetime] = Query(None, description="Start date filter"),
|
||||
end_date: Optional[datetime] = Query(None, description="End date filter"),
|
||||
product_name: Optional[str] = Query(None, description="Product name filter"),
|
||||
product_category: Optional[str] = Query(None, description="Product category filter"),
|
||||
location_id: Optional[str] = Query(None, description="Location filter"),
|
||||
sales_channel: Optional[str] = Query(None, description="Sales channel filter"),
|
||||
source: Optional[str] = Query(None, description="Data source filter"),
|
||||
is_validated: Optional[bool] = Query(None, description="Validation status filter"),
|
||||
limit: int = Query(50, ge=1, le=1000, description="Number of records to return"),
|
||||
offset: int = Query(0, ge=0, description="Number of records to skip"),
|
||||
order_by: str = Query("date", description="Field to order by"),
|
||||
order_direction: str = Query("desc", description="Order direction (asc/desc)"),
|
||||
sales_service: SalesService = Depends(get_sales_service)
|
||||
):
|
||||
"""Get sales records for a tenant with filtering and pagination"""
|
||||
try:
|
||||
query_params = SalesDataQuery(
|
||||
start_date=start_date,
|
||||
end_date=end_date,
|
||||
product_name=product_name,
|
||||
product_category=product_category,
|
||||
location_id=location_id,
|
||||
sales_channel=sales_channel,
|
||||
source=source,
|
||||
is_validated=is_validated,
|
||||
limit=limit,
|
||||
offset=offset,
|
||||
order_by=order_by,
|
||||
order_direction=order_direction
|
||||
)
|
||||
|
||||
records = await sales_service.get_sales_records(tenant_id, query_params)
|
||||
|
||||
logger.info("Retrieved sales records", count=len(records), tenant_id=tenant_id)
|
||||
return records
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get sales records", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get sales records: {str(e)}")
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_resource_detail_route("sales", "record_id"),
|
||||
response_model=SalesDataResponse
|
||||
)
|
||||
async def get_sales_record(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
record_id: UUID = Path(..., description="Sales record ID"),
|
||||
sales_service: SalesService = Depends(get_sales_service)
|
||||
):
|
||||
"""Get a specific sales record"""
|
||||
try:
|
||||
record = await sales_service.get_sales_record(record_id, tenant_id)
|
||||
|
||||
if not record:
|
||||
raise HTTPException(status_code=404, detail="Sales record not found")
|
||||
|
||||
return record
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Failed to get sales record", error=str(e), record_id=record_id, tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get sales record: {str(e)}")
|
||||
|
||||
|
||||
@router.put(
|
||||
route_builder.build_resource_detail_route("sales", "record_id"),
|
||||
response_model=SalesDataResponse
|
||||
)
|
||||
async def update_sales_record(
|
||||
update_data: SalesDataUpdate,
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
record_id: UUID = Path(..., description="Sales record ID"),
|
||||
sales_service: SalesService = Depends(get_sales_service)
|
||||
):
|
||||
"""Update a sales record"""
|
||||
try:
|
||||
updated_record = await sales_service.update_sales_record(record_id, update_data, tenant_id)
|
||||
|
||||
logger.info("Updated sales record", record_id=record_id, tenant_id=tenant_id)
|
||||
return updated_record
|
||||
|
||||
except ValueError as ve:
|
||||
logger.warning("Validation error updating sales record", error=str(ve), record_id=record_id)
|
||||
raise HTTPException(status_code=400, detail=str(ve))
|
||||
except Exception as e:
|
||||
logger.error("Failed to update sales record", error=str(e), record_id=record_id, tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to update sales record: {str(e)}")
|
||||
|
||||
|
||||
@router.delete(
|
||||
route_builder.build_resource_detail_route("sales", "record_id")
|
||||
)
|
||||
@require_user_role(['admin', 'owner'])
|
||||
async def delete_sales_record(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
record_id: UUID = Path(..., description="Sales record ID"),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
sales_service: SalesService = Depends(get_sales_service)
|
||||
):
|
||||
"""Delete a sales record (Admin+ only)"""
|
||||
try:
|
||||
# Get record details before deletion for audit log
|
||||
record = await sales_service.get_sales_record(record_id, tenant_id)
|
||||
|
||||
success = await sales_service.delete_sales_record(record_id, tenant_id)
|
||||
|
||||
if not success:
|
||||
raise HTTPException(status_code=404, detail="Sales record not found")
|
||||
|
||||
# Log audit event for sales record deletion
|
||||
try:
|
||||
from app.core.database import get_db
|
||||
db = next(get_db())
|
||||
await audit_logger.log_deletion(
|
||||
db_session=db,
|
||||
tenant_id=str(tenant_id),
|
||||
user_id=current_user["user_id"],
|
||||
resource_type="sales_record",
|
||||
resource_id=str(record_id),
|
||||
resource_data={
|
||||
"product_name": record.product_name if record else None,
|
||||
"quantity_sold": record.quantity_sold if record else None,
|
||||
"sale_date": record.date.isoformat() if record and record.date else None
|
||||
} if record else None,
|
||||
description=f"Deleted sales record for {record.product_name if record else 'unknown product'}",
|
||||
endpoint=f"/sales/{record_id}",
|
||||
method="DELETE"
|
||||
)
|
||||
except Exception as audit_error:
|
||||
logger.warning("Failed to log audit event", error=str(audit_error))
|
||||
|
||||
logger.info("Deleted sales record", record_id=record_id, tenant_id=tenant_id)
|
||||
return {"message": "Sales record deleted successfully"}
|
||||
|
||||
except ValueError as ve:
|
||||
logger.warning("Error deleting sales record", error=str(ve), record_id=record_id)
|
||||
raise HTTPException(status_code=400, detail=str(ve))
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Failed to delete sales record", error=str(e), record_id=record_id, tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to delete sales record: {str(e)}")
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_base_route("categories"),
|
||||
response_model=List[str]
|
||||
)
|
||||
async def get_product_categories(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
sales_service: SalesService = Depends(get_sales_service)
|
||||
):
|
||||
"""Get distinct product categories from sales data"""
|
||||
try:
|
||||
categories = await sales_service.get_product_categories(tenant_id)
|
||||
return categories
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get product categories", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get product categories: {str(e)}")
|
||||
535
services/sales/app/consumers/sales_event_consumer.py
Normal file
535
services/sales/app/consumers/sales_event_consumer.py
Normal file
@@ -0,0 +1,535 @@
|
||||
"""
|
||||
Sales Event Consumer
|
||||
Processes sales transaction events from RabbitMQ and updates analytics
|
||||
Handles completed sales and refunds from POS systems
|
||||
"""
|
||||
import json
|
||||
import structlog
|
||||
from typing import Dict, Any
|
||||
from datetime import datetime, date
|
||||
from decimal import Decimal
|
||||
from collections import defaultdict
|
||||
|
||||
from shared.messaging import RabbitMQClient
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, func
|
||||
from sqlalchemy.dialects.postgresql import insert
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class SalesEventConsumer:
|
||||
"""
|
||||
Consumes sales transaction events and updates sales analytics
|
||||
Processes events from POS consumer
|
||||
"""
|
||||
|
||||
def __init__(self, db_session: AsyncSession):
|
||||
self.db_session = db_session
|
||||
|
||||
async def consume_sales_events(
|
||||
self,
|
||||
rabbitmq_client: RabbitMQClient
|
||||
):
|
||||
"""
|
||||
Start consuming sales events from RabbitMQ
|
||||
"""
|
||||
async def process_message(message):
|
||||
"""Process a single sales event message"""
|
||||
try:
|
||||
async with message.process():
|
||||
# Parse event data
|
||||
event_data = json.loads(message.body.decode())
|
||||
logger.info(
|
||||
"Received sales event",
|
||||
event_id=event_data.get('event_id'),
|
||||
event_type=event_data.get('event_type'),
|
||||
tenant_id=event_data.get('tenant_id')
|
||||
)
|
||||
|
||||
# Process the event
|
||||
await self.process_sales_event(event_data)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error processing sales event",
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
# Start consuming events
|
||||
await rabbitmq_client.consume_events(
|
||||
exchange_name="sales.events",
|
||||
queue_name="sales.processing.queue",
|
||||
routing_key="sales.transaction.*",
|
||||
callback=process_message
|
||||
)
|
||||
|
||||
logger.info("Started consuming sales events")
|
||||
|
||||
async def process_sales_event(self, event_data: Dict[str, Any]) -> bool:
|
||||
"""
|
||||
Process a sales event based on type
|
||||
|
||||
Args:
|
||||
event_data: Full event payload from RabbitMQ
|
||||
|
||||
Returns:
|
||||
bool: True if processed successfully
|
||||
"""
|
||||
try:
|
||||
event_type = event_data.get('event_type')
|
||||
data = event_data.get('data', {})
|
||||
tenant_id = event_data.get('tenant_id')
|
||||
|
||||
if not tenant_id:
|
||||
logger.warning("Sales event missing tenant_id", event_data=event_data)
|
||||
return False
|
||||
|
||||
# Route to appropriate handler
|
||||
if event_type == 'sales.transaction.completed':
|
||||
success = await self._handle_transaction_completed(tenant_id, data)
|
||||
elif event_type == 'sales.transaction.refunded':
|
||||
success = await self._handle_transaction_refunded(tenant_id, data)
|
||||
else:
|
||||
logger.warning("Unknown sales event type", event_type=event_type)
|
||||
success = True # Mark as processed to avoid retry
|
||||
|
||||
if success:
|
||||
logger.info(
|
||||
"Sales event processed successfully",
|
||||
event_type=event_type,
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
else:
|
||||
logger.error(
|
||||
"Sales event processing failed",
|
||||
event_type=event_type,
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
|
||||
return success
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error in process_sales_event",
|
||||
error=str(e),
|
||||
event_id=event_data.get('event_id'),
|
||||
exc_info=True
|
||||
)
|
||||
return False
|
||||
|
||||
async def _handle_transaction_completed(
|
||||
self,
|
||||
tenant_id: str,
|
||||
data: Dict[str, Any]
|
||||
) -> bool:
|
||||
"""
|
||||
Handle completed sale transaction
|
||||
|
||||
Updates:
|
||||
- Daily sales analytics aggregates
|
||||
- Revenue tracking
|
||||
- Transaction counters
|
||||
- Product sales tracking
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
data: Transaction data from event
|
||||
|
||||
Returns:
|
||||
bool: True if handled successfully
|
||||
"""
|
||||
try:
|
||||
transaction_id = data.get('transaction_id')
|
||||
total_amount = Decimal(str(data.get('total_amount', 0)))
|
||||
transaction_date_str = data.get('transaction_date')
|
||||
items = data.get('items', [])
|
||||
pos_system = data.get('pos_system', 'unknown')
|
||||
|
||||
if not transaction_id:
|
||||
logger.warning("Transaction missing ID", data=data)
|
||||
return False
|
||||
|
||||
# Parse transaction date
|
||||
if transaction_date_str:
|
||||
if isinstance(transaction_date_str, str):
|
||||
transaction_date = datetime.fromisoformat(
|
||||
transaction_date_str.replace('Z', '+00:00')
|
||||
).date()
|
||||
else:
|
||||
transaction_date = datetime.utcnow().date()
|
||||
else:
|
||||
transaction_date = datetime.utcnow().date()
|
||||
|
||||
# Check for duplicate processing (idempotency)
|
||||
# In production, would check a processed_transactions table
|
||||
# For now, we rely on unique constraints in analytics table
|
||||
|
||||
# Update daily sales analytics
|
||||
await self._update_daily_analytics(
|
||||
tenant_id=tenant_id,
|
||||
transaction_date=transaction_date,
|
||||
revenue=total_amount,
|
||||
transaction_count=1,
|
||||
refund_amount=Decimal('0')
|
||||
)
|
||||
|
||||
# Update product sales tracking
|
||||
await self._update_product_sales(
|
||||
tenant_id=tenant_id,
|
||||
transaction_date=transaction_date,
|
||||
items=items
|
||||
)
|
||||
|
||||
# Store transaction record (optional detailed tracking)
|
||||
await self._store_transaction_record(
|
||||
tenant_id=tenant_id,
|
||||
transaction_id=transaction_id,
|
||||
transaction_date=transaction_date,
|
||||
total_amount=total_amount,
|
||||
items=items,
|
||||
pos_system=pos_system,
|
||||
transaction_type='sale'
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Transaction processed and analytics updated",
|
||||
tenant_id=tenant_id,
|
||||
transaction_id=transaction_id,
|
||||
total_amount=float(total_amount),
|
||||
date=str(transaction_date)
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error handling transaction completed",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id,
|
||||
transaction_id=data.get('transaction_id'),
|
||||
exc_info=True
|
||||
)
|
||||
return False
|
||||
|
||||
async def _handle_transaction_refunded(
|
||||
self,
|
||||
tenant_id: str,
|
||||
data: Dict[str, Any]
|
||||
) -> bool:
|
||||
"""
|
||||
Handle refunded sale transaction
|
||||
|
||||
Updates:
|
||||
- Daily sales analytics (negative revenue)
|
||||
- Refund counters
|
||||
- Product refund tracking
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
data: Refund data from event
|
||||
|
||||
Returns:
|
||||
bool: True if handled successfully
|
||||
"""
|
||||
try:
|
||||
refund_id = data.get('refund_id')
|
||||
original_transaction_id = data.get('original_transaction_id')
|
||||
refund_amount = Decimal(str(data.get('refund_amount', 0)))
|
||||
refund_date_str = data.get('refund_date')
|
||||
items = data.get('items', [])
|
||||
pos_system = data.get('pos_system', 'unknown')
|
||||
|
||||
if not refund_id:
|
||||
logger.warning("Refund missing ID", data=data)
|
||||
return False
|
||||
|
||||
# Parse refund date
|
||||
if refund_date_str:
|
||||
if isinstance(refund_date_str, str):
|
||||
refund_date = datetime.fromisoformat(
|
||||
refund_date_str.replace('Z', '+00:00')
|
||||
).date()
|
||||
else:
|
||||
refund_date = datetime.utcnow().date()
|
||||
else:
|
||||
refund_date = datetime.utcnow().date()
|
||||
|
||||
# Update daily sales analytics (subtract revenue, add refund)
|
||||
await self._update_daily_analytics(
|
||||
tenant_id=tenant_id,
|
||||
transaction_date=refund_date,
|
||||
revenue=-refund_amount, # Negative revenue
|
||||
transaction_count=0, # Don't increment transaction count for refunds
|
||||
refund_amount=refund_amount
|
||||
)
|
||||
|
||||
# Update product refund tracking
|
||||
await self._update_product_refunds(
|
||||
tenant_id=tenant_id,
|
||||
refund_date=refund_date,
|
||||
items=items
|
||||
)
|
||||
|
||||
# Store refund record
|
||||
await self._store_transaction_record(
|
||||
tenant_id=tenant_id,
|
||||
transaction_id=refund_id,
|
||||
transaction_date=refund_date,
|
||||
total_amount=-refund_amount,
|
||||
items=items,
|
||||
pos_system=pos_system,
|
||||
transaction_type='refund',
|
||||
original_transaction_id=original_transaction_id
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Refund processed and analytics updated",
|
||||
tenant_id=tenant_id,
|
||||
refund_id=refund_id,
|
||||
refund_amount=float(refund_amount),
|
||||
date=str(refund_date)
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error handling transaction refunded",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id,
|
||||
refund_id=data.get('refund_id'),
|
||||
exc_info=True
|
||||
)
|
||||
return False
|
||||
|
||||
async def _update_daily_analytics(
|
||||
self,
|
||||
tenant_id: str,
|
||||
transaction_date: date,
|
||||
revenue: Decimal,
|
||||
transaction_count: int,
|
||||
refund_amount: Decimal
|
||||
):
|
||||
"""
|
||||
Update or create daily sales analytics record
|
||||
|
||||
Uses UPSERT (INSERT ... ON CONFLICT UPDATE) for atomic updates
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
transaction_date: Date of transaction
|
||||
revenue: Revenue amount (negative for refunds)
|
||||
transaction_count: Number of transactions
|
||||
refund_amount: Refund amount
|
||||
"""
|
||||
try:
|
||||
# Note: This assumes a sales_analytics table exists
|
||||
# In production, ensure table is created via migration
|
||||
from app.models.sales_analytics import SalesAnalytics
|
||||
|
||||
# Use PostgreSQL UPSERT for atomic updates
|
||||
stmt = insert(SalesAnalytics).values(
|
||||
tenant_id=tenant_id,
|
||||
date=transaction_date,
|
||||
total_revenue=revenue,
|
||||
total_transactions=transaction_count,
|
||||
total_refunds=refund_amount,
|
||||
average_transaction_value=revenue if transaction_count > 0 else Decimal('0'),
|
||||
updated_at=datetime.utcnow()
|
||||
).on_conflict_do_update(
|
||||
index_elements=['tenant_id', 'date'],
|
||||
set_={
|
||||
'total_revenue': SalesAnalytics.total_revenue + revenue,
|
||||
'total_transactions': SalesAnalytics.total_transactions + transaction_count,
|
||||
'total_refunds': SalesAnalytics.total_refunds + refund_amount,
|
||||
'average_transaction_value': (
|
||||
(SalesAnalytics.total_revenue + revenue) /
|
||||
func.greatest(SalesAnalytics.total_transactions + transaction_count, 1)
|
||||
),
|
||||
'updated_at': datetime.utcnow()
|
||||
}
|
||||
)
|
||||
|
||||
await self.db_session.execute(stmt)
|
||||
await self.db_session.commit()
|
||||
|
||||
logger.info(
|
||||
"Daily analytics updated",
|
||||
tenant_id=tenant_id,
|
||||
date=str(transaction_date),
|
||||
revenue_delta=float(revenue),
|
||||
transaction_count_delta=transaction_count
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
await self.db_session.rollback()
|
||||
logger.error(
|
||||
"Failed to update daily analytics",
|
||||
tenant_id=tenant_id,
|
||||
date=str(transaction_date),
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
raise
|
||||
|
||||
async def _update_product_sales(
|
||||
self,
|
||||
tenant_id: str,
|
||||
transaction_date: date,
|
||||
items: list
|
||||
):
|
||||
"""
|
||||
Update product sales tracking
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
transaction_date: Date of transaction
|
||||
items: List of items sold
|
||||
"""
|
||||
try:
|
||||
# Aggregate items by product
|
||||
product_sales = defaultdict(lambda: {'quantity': 0, 'revenue': Decimal('0')})
|
||||
|
||||
for item in items:
|
||||
product_id = item.get('product_id')
|
||||
if not product_id:
|
||||
continue
|
||||
|
||||
quantity = item.get('quantity', 0)
|
||||
unit_price = Decimal(str(item.get('unit_price', 0)))
|
||||
revenue = quantity * unit_price
|
||||
|
||||
product_sales[product_id]['quantity'] += quantity
|
||||
product_sales[product_id]['revenue'] += revenue
|
||||
|
||||
# Update each product's sales (would need product_sales table)
|
||||
# For now, log the aggregation
|
||||
logger.info(
|
||||
"Product sales aggregated",
|
||||
tenant_id=tenant_id,
|
||||
date=str(transaction_date),
|
||||
products_count=len(product_sales)
|
||||
)
|
||||
|
||||
# In production, insert/update product_sales table here
|
||||
# Similar UPSERT pattern as daily analytics
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to update product sales",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e)
|
||||
)
|
||||
|
||||
async def _update_product_refunds(
|
||||
self,
|
||||
tenant_id: str,
|
||||
refund_date: date,
|
||||
items: list
|
||||
):
|
||||
"""
|
||||
Update product refund tracking
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
refund_date: Date of refund
|
||||
items: List of items refunded
|
||||
"""
|
||||
try:
|
||||
# Similar to product sales, but for refunds
|
||||
product_refunds = defaultdict(lambda: {'quantity': 0, 'amount': Decimal('0')})
|
||||
|
||||
for item in items:
|
||||
product_id = item.get('product_id')
|
||||
if not product_id:
|
||||
continue
|
||||
|
||||
quantity = item.get('quantity', 0)
|
||||
unit_price = Decimal(str(item.get('unit_price', 0)))
|
||||
amount = quantity * unit_price
|
||||
|
||||
product_refunds[product_id]['quantity'] += quantity
|
||||
product_refunds[product_id]['amount'] += amount
|
||||
|
||||
logger.info(
|
||||
"Product refunds aggregated",
|
||||
tenant_id=tenant_id,
|
||||
date=str(refund_date),
|
||||
products_count=len(product_refunds)
|
||||
)
|
||||
|
||||
# In production, update product_refunds table
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to update product refunds",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e)
|
||||
)
|
||||
|
||||
async def _store_transaction_record(
|
||||
self,
|
||||
tenant_id: str,
|
||||
transaction_id: str,
|
||||
transaction_date: date,
|
||||
total_amount: Decimal,
|
||||
items: list,
|
||||
pos_system: str,
|
||||
transaction_type: str,
|
||||
original_transaction_id: str = None
|
||||
):
|
||||
"""
|
||||
Store detailed transaction record
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
transaction_id: Transaction/refund ID
|
||||
transaction_date: Date of transaction
|
||||
total_amount: Total amount
|
||||
items: Transaction items
|
||||
pos_system: POS system name
|
||||
transaction_type: 'sale' or 'refund'
|
||||
original_transaction_id: For refunds, the original transaction ID
|
||||
"""
|
||||
try:
|
||||
# Would store in transactions table for detailed tracking
|
||||
# For now, just log
|
||||
logger.info(
|
||||
"Transaction record created",
|
||||
tenant_id=tenant_id,
|
||||
transaction_id=transaction_id,
|
||||
type=transaction_type,
|
||||
amount=float(total_amount),
|
||||
items_count=len(items),
|
||||
pos_system=pos_system
|
||||
)
|
||||
|
||||
# In production, insert into transactions table:
|
||||
# from app.models.transactions import Transaction
|
||||
# transaction = Transaction(
|
||||
# id=transaction_id,
|
||||
# tenant_id=tenant_id,
|
||||
# transaction_date=transaction_date,
|
||||
# total_amount=total_amount,
|
||||
# items=items,
|
||||
# pos_system=pos_system,
|
||||
# transaction_type=transaction_type,
|
||||
# original_transaction_id=original_transaction_id
|
||||
# )
|
||||
# self.db_session.add(transaction)
|
||||
# await self.db_session.commit()
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to store transaction record",
|
||||
transaction_id=transaction_id,
|
||||
error=str(e)
|
||||
)
|
||||
|
||||
|
||||
# Factory function for creating consumer instance
|
||||
def create_sales_event_consumer(db_session: AsyncSession) -> SalesEventConsumer:
|
||||
"""Create sales event consumer instance"""
|
||||
return SalesEventConsumer(db_session)
|
||||
1
services/sales/app/core/__init__.py
Normal file
1
services/sales/app/core/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# services/sales/app/core/__init__.py
|
||||
72
services/sales/app/core/config.py
Normal file
72
services/sales/app/core/config.py
Normal file
@@ -0,0 +1,72 @@
|
||||
# services/sales/app/core/config.py
|
||||
"""
|
||||
Sales Service Configuration
|
||||
"""
|
||||
|
||||
import os
|
||||
from typing import List
|
||||
from pydantic import Field
|
||||
from shared.config.base import BaseServiceSettings
|
||||
|
||||
|
||||
class Settings(BaseServiceSettings):
|
||||
"""Sales service settings extending base configuration"""
|
||||
|
||||
# Override service-specific settings
|
||||
SERVICE_NAME: str = "sales-service"
|
||||
VERSION: str = "1.0.0"
|
||||
APP_NAME: str = "Bakery Sales Service"
|
||||
DESCRIPTION: str = "Sales data management and analytics service"
|
||||
|
||||
# API Configuration
|
||||
API_V1_STR: str = "/api/v1"
|
||||
|
||||
# Database configuration (secure approach - build from components)
|
||||
@property
|
||||
def DATABASE_URL(self) -> str:
|
||||
"""Build database URL from secure components"""
|
||||
# Try complete URL first (for backward compatibility)
|
||||
complete_url = os.getenv("SALES_DATABASE_URL")
|
||||
if complete_url:
|
||||
return complete_url
|
||||
|
||||
# Build from components (secure approach)
|
||||
user = os.getenv("SALES_DB_USER", "sales_user")
|
||||
password = os.getenv("SALES_DB_PASSWORD", "sales_pass123")
|
||||
host = os.getenv("SALES_DB_HOST", "localhost")
|
||||
port = os.getenv("SALES_DB_PORT", "5432")
|
||||
name = os.getenv("SALES_DB_NAME", "sales_db")
|
||||
|
||||
return f"postgresql+asyncpg://{user}:{password}@{host}:{port}/{name}"
|
||||
|
||||
# Sales-specific Redis database
|
||||
REDIS_DB: int = Field(default=2, env="SALES_REDIS_DB")
|
||||
|
||||
# File upload configuration
|
||||
MAX_UPLOAD_SIZE: int = 10 * 1024 * 1024 # 10MB
|
||||
UPLOAD_PATH: str = Field(default="/tmp/uploads", env="SALES_UPLOAD_PATH")
|
||||
ALLOWED_FILE_EXTENSIONS: List[str] = [".csv", ".xlsx", ".xls"]
|
||||
|
||||
# Pagination
|
||||
DEFAULT_PAGE_SIZE: int = 50
|
||||
MAX_PAGE_SIZE: int = 1000
|
||||
|
||||
# Data validation
|
||||
MIN_QUANTITY: float = 0.01
|
||||
MAX_QUANTITY: float = 10000.0
|
||||
MIN_REVENUE: float = 0.01
|
||||
MAX_REVENUE: float = 100000.0
|
||||
|
||||
# Sales-specific cache TTL (5 minutes)
|
||||
SALES_CACHE_TTL: int = 300
|
||||
PRODUCT_CACHE_TTL: int = 600 # 10 minutes
|
||||
|
||||
# Inter-service communication
|
||||
INVENTORY_SERVICE_URL: str = Field(
|
||||
default="http://inventory-service:8000",
|
||||
env="INVENTORY_SERVICE_URL"
|
||||
)
|
||||
|
||||
|
||||
# Global settings instance
|
||||
settings = Settings()
|
||||
86
services/sales/app/core/database.py
Normal file
86
services/sales/app/core/database.py
Normal file
@@ -0,0 +1,86 @@
|
||||
# services/sales/app/core/database.py
|
||||
"""
|
||||
Sales Service Database Configuration using shared database manager
|
||||
"""
|
||||
|
||||
import structlog
|
||||
from contextlib import asynccontextmanager
|
||||
from typing import AsyncGenerator
|
||||
|
||||
from app.core.config import settings
|
||||
from shared.database.base import DatabaseManager, Base
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
# Create database manager instance
|
||||
database_manager = DatabaseManager(
|
||||
database_url=settings.DATABASE_URL,
|
||||
service_name="sales-service",
|
||||
pool_size=settings.DB_POOL_SIZE,
|
||||
max_overflow=settings.DB_MAX_OVERFLOW,
|
||||
pool_recycle=settings.DB_POOL_RECYCLE,
|
||||
echo=settings.DB_ECHO
|
||||
)
|
||||
|
||||
|
||||
async def get_db():
|
||||
"""
|
||||
Database dependency for FastAPI - using shared database manager
|
||||
"""
|
||||
async for session in database_manager.get_db():
|
||||
yield session
|
||||
|
||||
|
||||
async def init_db():
|
||||
"""Initialize database tables using shared database manager"""
|
||||
try:
|
||||
logger.info("Initializing Sales Service database...")
|
||||
|
||||
# Import all models to ensure they're registered
|
||||
from app.models import sales # noqa: F401
|
||||
|
||||
# Create all tables using database manager
|
||||
await database_manager.create_tables(Base.metadata)
|
||||
|
||||
logger.info("Sales Service database initialized successfully")
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to initialize database", error=str(e))
|
||||
raise
|
||||
|
||||
|
||||
async def close_db():
|
||||
"""Close database connections using shared database manager"""
|
||||
try:
|
||||
await database_manager.close_connections()
|
||||
logger.info("Database connections closed")
|
||||
except Exception as e:
|
||||
logger.error("Error closing database connections", error=str(e))
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def get_db_transaction():
|
||||
"""
|
||||
Context manager for database transactions using shared database manager
|
||||
"""
|
||||
async with database_manager.get_session() as session:
|
||||
try:
|
||||
async with session.begin():
|
||||
yield session
|
||||
except Exception as e:
|
||||
logger.error("Transaction error", error=str(e))
|
||||
raise
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def get_background_session():
|
||||
"""
|
||||
Context manager for background tasks using shared database manager
|
||||
"""
|
||||
async with database_manager.get_background_session() as session:
|
||||
yield session
|
||||
|
||||
|
||||
async def health_check():
|
||||
"""Database health check using shared database manager"""
|
||||
return await database_manager.health_check()
|
||||
154
services/sales/app/main.py
Normal file
154
services/sales/app/main.py
Normal file
@@ -0,0 +1,154 @@
|
||||
# services/sales/app/main.py
|
||||
"""
|
||||
Sales Service Main Application
|
||||
"""
|
||||
|
||||
from fastapi import FastAPI
|
||||
from sqlalchemy import text
|
||||
from app.core.config import settings
|
||||
from app.core.database import database_manager
|
||||
from shared.service_base import StandardFastAPIService
|
||||
|
||||
# Import API routers
|
||||
from app.api import sales_records, sales_operations, analytics, audit, batch, internal_demo
|
||||
|
||||
|
||||
class SalesService(StandardFastAPIService):
|
||||
"""Sales Service with standardized setup"""
|
||||
|
||||
expected_migration_version = "00001"
|
||||
|
||||
async def on_startup(self, app):
|
||||
"""Custom startup logic including migration verification"""
|
||||
await self.verify_migrations()
|
||||
await super().on_startup(app)
|
||||
|
||||
async def verify_migrations(self):
|
||||
"""Verify database schema matches the latest migrations."""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
result = await session.execute(text("SELECT version_num FROM alembic_version"))
|
||||
version = result.scalar()
|
||||
if version != self.expected_migration_version:
|
||||
self.logger.error(f"Migration version mismatch: expected {self.expected_migration_version}, got {version}")
|
||||
raise RuntimeError(f"Migration version mismatch: expected {self.expected_migration_version}, got {version}")
|
||||
self.logger.info(f"Migration verification successful: {version}")
|
||||
except Exception as e:
|
||||
self.logger.error(f"Migration verification failed: {e}")
|
||||
raise
|
||||
|
||||
def __init__(self):
|
||||
# Define expected database tables for health checks
|
||||
sales_expected_tables = ['sales_data', 'sales_import_jobs']
|
||||
|
||||
super().__init__(
|
||||
service_name="sales-service",
|
||||
app_name="Bakery Sales Service",
|
||||
description="Sales data management service for bakery operations",
|
||||
version="1.0.0",
|
||||
log_level=settings.LOG_LEVEL,
|
||||
cors_origins=settings.CORS_ORIGINS,
|
||||
api_prefix="", # Empty because RouteBuilder already includes /api/v1
|
||||
database_manager=database_manager,
|
||||
expected_tables=sales_expected_tables
|
||||
)
|
||||
|
||||
async def on_startup(self, app: FastAPI):
|
||||
"""Custom startup logic for sales service"""
|
||||
# Register custom metrics
|
||||
self.register_custom_metrics({
|
||||
"sales_records_created_total": {
|
||||
"type": "counter",
|
||||
"description": "Total sales records created"
|
||||
},
|
||||
"sales_records_updated_total": {
|
||||
"type": "counter",
|
||||
"description": "Total sales records updated"
|
||||
},
|
||||
"sales_queries_total": {
|
||||
"type": "counter",
|
||||
"description": "Sales record queries"
|
||||
},
|
||||
"product_queries_total": {
|
||||
"type": "counter",
|
||||
"description": "Product catalog queries"
|
||||
},
|
||||
"import_jobs_total": {
|
||||
"type": "counter",
|
||||
"description": "Data import jobs"
|
||||
},
|
||||
"export_jobs_total": {
|
||||
"type": "counter",
|
||||
"description": "Data export jobs"
|
||||
},
|
||||
"sales_create_duration_seconds": {
|
||||
"type": "histogram",
|
||||
"description": "Sales record creation duration"
|
||||
},
|
||||
"sales_query_duration_seconds": {
|
||||
"type": "histogram",
|
||||
"description": "Sales query duration"
|
||||
},
|
||||
"import_processing_duration_seconds": {
|
||||
"type": "histogram",
|
||||
"description": "Import processing duration"
|
||||
},
|
||||
"export_generation_duration_seconds": {
|
||||
"type": "histogram",
|
||||
"description": "Export generation duration"
|
||||
}
|
||||
})
|
||||
|
||||
async def on_shutdown(self, app: FastAPI):
|
||||
"""Custom shutdown logic for sales service"""
|
||||
# Database cleanup is handled by the base class
|
||||
pass
|
||||
|
||||
def get_service_features(self):
|
||||
"""Return sales-specific features"""
|
||||
return [
|
||||
"sales_data_management",
|
||||
"product_catalog",
|
||||
"data_import_export",
|
||||
"sales_analytics",
|
||||
"performance_tracking"
|
||||
]
|
||||
|
||||
def setup_custom_endpoints(self):
|
||||
"""Setup custom endpoints for sales service"""
|
||||
@self.app.get("/")
|
||||
async def root():
|
||||
"""Root endpoint"""
|
||||
return {
|
||||
"service": "Sales Service",
|
||||
"version": "1.0.0",
|
||||
"status": "running",
|
||||
"endpoints": {
|
||||
"health": "/health",
|
||||
"docs": "/docs",
|
||||
"sales": "/api/v1/sales",
|
||||
"products": "/api/v1/products"
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
# Create service instance
|
||||
service = SalesService()
|
||||
|
||||
# Create FastAPI app with standardized setup
|
||||
app = service.create_app()
|
||||
|
||||
# Setup standard endpoints
|
||||
service.setup_standard_endpoints()
|
||||
|
||||
# Setup custom endpoints
|
||||
service.setup_custom_endpoints()
|
||||
|
||||
# Include routers
|
||||
# IMPORTANT: Register audit router FIRST to avoid route matching conflicts
|
||||
service.add_router(audit.router)
|
||||
service.add_router(batch.router)
|
||||
service.add_router(sales_records.router)
|
||||
service.add_router(sales_operations.router)
|
||||
service.add_router(analytics.router)
|
||||
service.add_router(internal_demo.router, tags=["internal-demo"])
|
||||
12
services/sales/app/models/__init__.py
Normal file
12
services/sales/app/models/__init__.py
Normal file
@@ -0,0 +1,12 @@
|
||||
|
||||
# Import AuditLog model for this service
|
||||
from shared.security import create_audit_log_model
|
||||
from shared.database.base import Base
|
||||
|
||||
# Create audit log model for this service
|
||||
AuditLog = create_audit_log_model(Base)
|
||||
# services/sales/app/models/__init__.py
|
||||
|
||||
from .sales import SalesData, SalesImportJob
|
||||
|
||||
__all__ = ["SalesData", "SalesImportJob", "AuditLog"]
|
||||
171
services/sales/app/models/sales.py
Normal file
171
services/sales/app/models/sales.py
Normal file
@@ -0,0 +1,171 @@
|
||||
# services/sales/app/models/sales.py
|
||||
"""
|
||||
Sales data models for Sales Service
|
||||
Enhanced with additional fields and relationships
|
||||
"""
|
||||
|
||||
from sqlalchemy import Column, String, DateTime, Float, Integer, Text, Index, Boolean, Numeric, ForeignKey
|
||||
from sqlalchemy.dialects.postgresql import UUID
|
||||
from sqlalchemy.orm import relationship
|
||||
import uuid
|
||||
from datetime import datetime, timezone
|
||||
from typing import Dict, Any, Optional
|
||||
|
||||
from shared.database.base import Base
|
||||
|
||||
|
||||
class SalesData(Base):
|
||||
"""Enhanced sales data model"""
|
||||
__tablename__ = "sales_data"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
date = Column(DateTime(timezone=True), nullable=False, index=True)
|
||||
|
||||
# Product reference to inventory service (REQUIRED)
|
||||
inventory_product_id = Column(UUID(as_uuid=True), nullable=False, index=True) # Reference to inventory.ingredients.id
|
||||
|
||||
# Sales data
|
||||
quantity_sold = Column(Integer, nullable=False)
|
||||
unit_price = Column(Numeric(10, 2), nullable=True)
|
||||
revenue = Column(Numeric(10, 2), nullable=False)
|
||||
cost_of_goods = Column(Numeric(10, 2), nullable=True) # For profit calculation
|
||||
discount_applied = Column(Numeric(5, 2), nullable=True, default=0.0) # Percentage
|
||||
|
||||
# Location and channel
|
||||
location_id = Column(String(100), nullable=True, index=True)
|
||||
sales_channel = Column(String(50), nullable=True, default="in_store") # in_store, online, delivery
|
||||
|
||||
# Data source and quality
|
||||
source = Column(String(50), nullable=False, default="manual") # manual, pos, online, import
|
||||
is_validated = Column(Boolean, default=False)
|
||||
validation_notes = Column(Text, nullable=True)
|
||||
|
||||
# Additional metadata
|
||||
notes = Column(Text, nullable=True)
|
||||
weather_condition = Column(String(50), nullable=True) # For correlation analysis
|
||||
is_holiday = Column(Boolean, default=False)
|
||||
is_weekend = Column(Boolean, default=False)
|
||||
|
||||
# Audit fields
|
||||
created_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc))
|
||||
updated_at = Column(DateTime(timezone=True),
|
||||
default=lambda: datetime.now(timezone.utc),
|
||||
onupdate=lambda: datetime.now(timezone.utc))
|
||||
created_by = Column(UUID(as_uuid=True), nullable=True) # User ID
|
||||
|
||||
# Performance-optimized indexes
|
||||
__table_args__ = (
|
||||
# Core query patterns
|
||||
Index('idx_sales_tenant_date', 'tenant_id', 'date'),
|
||||
Index('idx_sales_tenant_location', 'tenant_id', 'location_id'),
|
||||
|
||||
# Analytics queries
|
||||
Index('idx_sales_date_range', 'date', 'tenant_id'),
|
||||
Index('idx_sales_channel_date', 'sales_channel', 'date', 'tenant_id'),
|
||||
|
||||
# Data quality queries
|
||||
Index('idx_sales_source_validated', 'source', 'is_validated', 'tenant_id'),
|
||||
# Primary product reference index
|
||||
Index('idx_sales_inventory_product', 'inventory_product_id', 'tenant_id'),
|
||||
Index('idx_sales_product_date', 'inventory_product_id', 'date', 'tenant_id'),
|
||||
)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Convert model to dictionary for API responses"""
|
||||
return {
|
||||
'id': str(self.id),
|
||||
'tenant_id': str(self.tenant_id),
|
||||
'date': self.date.isoformat() if self.date else None,
|
||||
'inventory_product_id': str(self.inventory_product_id),
|
||||
'quantity_sold': self.quantity_sold,
|
||||
'unit_price': float(self.unit_price) if self.unit_price else None,
|
||||
'revenue': float(self.revenue) if self.revenue else None,
|
||||
'cost_of_goods': float(self.cost_of_goods) if self.cost_of_goods else None,
|
||||
'discount_applied': float(self.discount_applied) if self.discount_applied else None,
|
||||
'location_id': self.location_id,
|
||||
'sales_channel': self.sales_channel,
|
||||
'source': self.source,
|
||||
'is_validated': self.is_validated,
|
||||
'validation_notes': self.validation_notes,
|
||||
'notes': self.notes,
|
||||
'weather_condition': self.weather_condition,
|
||||
'is_holiday': self.is_holiday,
|
||||
'is_weekend': self.is_weekend,
|
||||
'created_at': self.created_at.isoformat() if self.created_at else None,
|
||||
'updated_at': self.updated_at.isoformat() if self.updated_at else None,
|
||||
'created_by': str(self.created_by) if self.created_by else None,
|
||||
}
|
||||
|
||||
@property
|
||||
def profit_margin(self) -> Optional[float]:
|
||||
"""Calculate profit margin if cost data is available"""
|
||||
if self.revenue and self.cost_of_goods:
|
||||
return float((self.revenue - self.cost_of_goods) / self.revenue * 100)
|
||||
return None
|
||||
|
||||
|
||||
# Product model removed - using inventory service as single source of truth
|
||||
# Product data is now referenced via inventory_product_id in SalesData model
|
||||
|
||||
|
||||
class SalesImportJob(Base):
|
||||
"""Track sales data import jobs"""
|
||||
__tablename__ = "sales_import_jobs"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
|
||||
# Job details
|
||||
filename = Column(String(255), nullable=False)
|
||||
file_size = Column(Integer, nullable=True)
|
||||
import_type = Column(String(50), nullable=False, default="csv") # csv, xlsx, api
|
||||
|
||||
# Processing status
|
||||
status = Column(String(20), nullable=False, default="pending") # pending, processing, completed, failed
|
||||
progress_percentage = Column(Float, default=0.0)
|
||||
|
||||
# Results
|
||||
total_rows = Column(Integer, default=0)
|
||||
processed_rows = Column(Integer, default=0)
|
||||
successful_imports = Column(Integer, default=0)
|
||||
failed_imports = Column(Integer, default=0)
|
||||
duplicate_rows = Column(Integer, default=0)
|
||||
|
||||
# Error tracking
|
||||
error_message = Column(Text, nullable=True)
|
||||
validation_errors = Column(Text, nullable=True) # JSON string of validation errors
|
||||
|
||||
# Timestamps
|
||||
started_at = Column(DateTime(timezone=True), nullable=True)
|
||||
completed_at = Column(DateTime(timezone=True), nullable=True)
|
||||
created_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc))
|
||||
created_by = Column(UUID(as_uuid=True), nullable=True)
|
||||
|
||||
__table_args__ = (
|
||||
Index('idx_import_jobs_tenant_status', 'tenant_id', 'status', 'created_at'),
|
||||
Index('idx_import_jobs_status_date', 'status', 'created_at'),
|
||||
)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Convert model to dictionary for API responses"""
|
||||
return {
|
||||
'id': str(self.id),
|
||||
'tenant_id': str(self.tenant_id),
|
||||
'filename': self.filename,
|
||||
'file_size': self.file_size,
|
||||
'import_type': self.import_type,
|
||||
'status': self.status,
|
||||
'progress_percentage': self.progress_percentage,
|
||||
'total_rows': self.total_rows,
|
||||
'processed_rows': self.processed_rows,
|
||||
'successful_imports': self.successful_imports,
|
||||
'failed_imports': self.failed_imports,
|
||||
'duplicate_rows': self.duplicate_rows,
|
||||
'error_message': self.error_message,
|
||||
'validation_errors': self.validation_errors,
|
||||
'started_at': self.started_at.isoformat() if self.started_at else None,
|
||||
'completed_at': self.completed_at.isoformat() if self.completed_at else None,
|
||||
'created_at': self.created_at.isoformat() if self.created_at else None,
|
||||
'created_by': str(self.created_by) if self.created_by else None,
|
||||
}
|
||||
5
services/sales/app/repositories/__init__.py
Normal file
5
services/sales/app/repositories/__init__.py
Normal file
@@ -0,0 +1,5 @@
|
||||
# services/sales/app/repositories/__init__.py
|
||||
|
||||
from .sales_repository import SalesRepository
|
||||
|
||||
__all__ = ["SalesRepository"]
|
||||
335
services/sales/app/repositories/sales_repository.py
Normal file
335
services/sales/app/repositories/sales_repository.py
Normal file
@@ -0,0 +1,335 @@
|
||||
# services/sales/app/repositories/sales_repository.py
|
||||
"""
|
||||
Sales Repository using Repository Pattern
|
||||
"""
|
||||
|
||||
from typing import List, Optional, Dict, Any
|
||||
from uuid import UUID
|
||||
from datetime import datetime
|
||||
from sqlalchemy import select, func, and_, or_, desc, asc
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
import structlog
|
||||
|
||||
from app.models.sales import SalesData
|
||||
from app.schemas.sales import SalesDataCreate, SalesDataUpdate, SalesDataQuery
|
||||
from shared.database.repository import BaseRepository
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class SalesRepository(BaseRepository[SalesData, SalesDataCreate, SalesDataUpdate]):
|
||||
"""Repository for sales data operations"""
|
||||
|
||||
def __init__(self, session: AsyncSession):
|
||||
super().__init__(SalesData, session)
|
||||
|
||||
async def create_sales_record(self, sales_data: SalesDataCreate, tenant_id: UUID) -> SalesData:
|
||||
"""Create a new sales record"""
|
||||
try:
|
||||
# Prepare data
|
||||
create_data = sales_data.model_dump()
|
||||
create_data['tenant_id'] = tenant_id
|
||||
|
||||
# Calculate weekend flag if not provided
|
||||
if sales_data.date and create_data.get('is_weekend') is None:
|
||||
create_data['is_weekend'] = sales_data.date.weekday() >= 5
|
||||
|
||||
# Create record
|
||||
record = await self.create(create_data)
|
||||
logger.info(
|
||||
"Created sales record",
|
||||
record_id=record.id,
|
||||
inventory_product_id=record.inventory_product_id,
|
||||
quantity=record.quantity_sold,
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
return record
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to create sales record", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def get_by_tenant(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
query_params: Optional[SalesDataQuery] = None
|
||||
) -> List[SalesData]:
|
||||
"""Get sales records by tenant with optional filtering"""
|
||||
try:
|
||||
# Build base query
|
||||
stmt = select(SalesData).where(SalesData.tenant_id == tenant_id)
|
||||
|
||||
# Apply filters if query_params provided
|
||||
if query_params:
|
||||
if query_params.start_date:
|
||||
stmt = stmt.where(SalesData.date >= query_params.start_date)
|
||||
if query_params.end_date:
|
||||
stmt = stmt.where(SalesData.date <= query_params.end_date)
|
||||
# Note: product_name queries now require joining with inventory service
|
||||
# if query_params.product_name:
|
||||
# # Would need to join with inventory service to filter by product name
|
||||
# pass
|
||||
# Note: product_category field was removed - filtering by category now requires inventory service
|
||||
# if query_params.product_category:
|
||||
# # Would need to join with inventory service to filter by product category
|
||||
# pass
|
||||
if hasattr(query_params, 'inventory_product_id') and query_params.inventory_product_id:
|
||||
stmt = stmt.where(SalesData.inventory_product_id == query_params.inventory_product_id)
|
||||
if query_params.location_id:
|
||||
stmt = stmt.where(SalesData.location_id == query_params.location_id)
|
||||
if query_params.sales_channel:
|
||||
stmt = stmt.where(SalesData.sales_channel == query_params.sales_channel)
|
||||
if query_params.source:
|
||||
stmt = stmt.where(SalesData.source == query_params.source)
|
||||
if query_params.is_validated is not None:
|
||||
stmt = stmt.where(SalesData.is_validated == query_params.is_validated)
|
||||
|
||||
# Apply ordering
|
||||
if query_params.order_by and hasattr(SalesData, query_params.order_by):
|
||||
order_col = getattr(SalesData, query_params.order_by)
|
||||
if query_params.order_direction == 'asc':
|
||||
stmt = stmt.order_by(asc(order_col))
|
||||
else:
|
||||
stmt = stmt.order_by(desc(order_col))
|
||||
else:
|
||||
stmt = stmt.order_by(desc(SalesData.date))
|
||||
|
||||
# Apply pagination
|
||||
stmt = stmt.offset(query_params.offset).limit(query_params.limit)
|
||||
else:
|
||||
# Default ordering with safety limit for direct repository calls
|
||||
# Note: API calls always provide query_params, so this only applies to direct usage
|
||||
stmt = stmt.order_by(desc(SalesData.date)).limit(10000)
|
||||
|
||||
result = await self.session.execute(stmt)
|
||||
records = result.scalars().all()
|
||||
|
||||
logger.info(
|
||||
"Retrieved sales records",
|
||||
count=len(records),
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
return list(records)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get sales records", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def get_by_inventory_product(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
inventory_product_id: UUID,
|
||||
start_date: Optional[datetime] = None,
|
||||
end_date: Optional[datetime] = None
|
||||
) -> List[SalesData]:
|
||||
"""Get sales records for a specific inventory product"""
|
||||
try:
|
||||
stmt = select(SalesData).where(
|
||||
and_(
|
||||
SalesData.tenant_id == tenant_id,
|
||||
SalesData.inventory_product_id == inventory_product_id
|
||||
)
|
||||
)
|
||||
|
||||
if start_date:
|
||||
stmt = stmt.where(SalesData.date >= start_date)
|
||||
if end_date:
|
||||
stmt = stmt.where(SalesData.date <= end_date)
|
||||
|
||||
stmt = stmt.order_by(desc(SalesData.date))
|
||||
|
||||
result = await self.session.execute(stmt)
|
||||
records = result.scalars().all()
|
||||
|
||||
return list(records)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get product sales", error=str(e), tenant_id=tenant_id, inventory_product_id=inventory_product_id)
|
||||
raise
|
||||
|
||||
async def get_analytics(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
start_date: Optional[datetime] = None,
|
||||
end_date: Optional[datetime] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""Get sales analytics for a tenant"""
|
||||
try:
|
||||
# Build base query
|
||||
base_query = select(SalesData).where(SalesData.tenant_id == tenant_id)
|
||||
|
||||
if start_date:
|
||||
base_query = base_query.where(SalesData.date >= start_date)
|
||||
if end_date:
|
||||
base_query = base_query.where(SalesData.date <= end_date)
|
||||
|
||||
# Total revenue and quantity
|
||||
summary_query = select(
|
||||
func.sum(SalesData.revenue).label('total_revenue'),
|
||||
func.sum(SalesData.quantity_sold).label('total_quantity'),
|
||||
func.count().label('total_transactions'),
|
||||
func.avg(SalesData.revenue).label('avg_transaction_value')
|
||||
).where(SalesData.tenant_id == tenant_id)
|
||||
|
||||
if start_date:
|
||||
summary_query = summary_query.where(SalesData.date >= start_date)
|
||||
if end_date:
|
||||
summary_query = summary_query.where(SalesData.date <= end_date)
|
||||
|
||||
result = await self.session.execute(summary_query)
|
||||
summary = result.first()
|
||||
|
||||
# Top products
|
||||
top_products_query = select(
|
||||
SalesData.inventory_product_id, # Note: was product_name
|
||||
func.sum(SalesData.revenue).label('revenue'),
|
||||
func.sum(SalesData.quantity_sold).label('quantity')
|
||||
).where(SalesData.tenant_id == tenant_id)
|
||||
|
||||
if start_date:
|
||||
top_products_query = top_products_query.where(SalesData.date >= start_date)
|
||||
if end_date:
|
||||
top_products_query = top_products_query.where(SalesData.date <= end_date)
|
||||
|
||||
top_products_query = top_products_query.group_by(
|
||||
SalesData.inventory_product_id # Note: was product_name
|
||||
).order_by(
|
||||
desc(func.sum(SalesData.revenue))
|
||||
).limit(10)
|
||||
|
||||
top_products_result = await self.session.execute(top_products_query)
|
||||
top_products = [
|
||||
{
|
||||
'inventory_product_id': str(row.inventory_product_id), # Note: was product_name
|
||||
'revenue': float(row.revenue) if row.revenue else 0,
|
||||
'quantity': row.quantity or 0
|
||||
}
|
||||
for row in top_products_result
|
||||
]
|
||||
|
||||
# Sales by channel
|
||||
channel_query = select(
|
||||
SalesData.sales_channel,
|
||||
func.sum(SalesData.revenue).label('revenue'),
|
||||
func.count().label('transactions')
|
||||
).where(SalesData.tenant_id == tenant_id)
|
||||
|
||||
if start_date:
|
||||
channel_query = channel_query.where(SalesData.date >= start_date)
|
||||
if end_date:
|
||||
channel_query = channel_query.where(SalesData.date <= end_date)
|
||||
|
||||
channel_query = channel_query.group_by(SalesData.sales_channel)
|
||||
|
||||
channel_result = await self.session.execute(channel_query)
|
||||
sales_by_channel = {
|
||||
row.sales_channel: {
|
||||
'revenue': float(row.revenue) if row.revenue else 0,
|
||||
'transactions': row.transactions or 0
|
||||
}
|
||||
for row in channel_result
|
||||
}
|
||||
|
||||
return {
|
||||
'total_revenue': float(summary.total_revenue) if summary.total_revenue else 0,
|
||||
'total_quantity': summary.total_quantity or 0,
|
||||
'total_transactions': summary.total_transactions or 0,
|
||||
'average_transaction_value': float(summary.avg_transaction_value) if summary.avg_transaction_value else 0,
|
||||
'top_products': top_products,
|
||||
'sales_by_channel': sales_by_channel
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get sales analytics", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def get_product_categories(self, tenant_id: UUID) -> List[str]:
|
||||
"""Get distinct product categories for a tenant"""
|
||||
try:
|
||||
# Note: product_category field was removed - categories now managed via inventory service
|
||||
# This method should be updated to query categories from inventory service
|
||||
# For now, return empty list to avoid breaking existing code
|
||||
logger.warning("get_product_categories called but product_category field was removed",
|
||||
tenant_id=tenant_id)
|
||||
categories = []
|
||||
|
||||
return sorted(categories)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get product categories", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def validate_record(self, record_id: UUID, validation_notes: Optional[str] = None) -> SalesData:
|
||||
"""Mark a sales record as validated"""
|
||||
try:
|
||||
record = await self.get_by_id(record_id)
|
||||
if not record:
|
||||
raise ValueError(f"Sales record {record_id} not found")
|
||||
|
||||
update_data = {
|
||||
'is_validated': True,
|
||||
'validation_notes': validation_notes
|
||||
}
|
||||
|
||||
updated_record = await self.update(record_id, update_data)
|
||||
|
||||
logger.info("Validated sales record", record_id=record_id)
|
||||
return updated_record
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to validate sales record", error=str(e), record_id=record_id)
|
||||
raise
|
||||
|
||||
async def create_sales_records_bulk(
|
||||
self,
|
||||
sales_data_list: List[SalesDataCreate],
|
||||
tenant_id: UUID,
|
||||
return_records: bool = False
|
||||
) -> int | List[SalesData]:
|
||||
"""Bulk insert sales records for performance optimization
|
||||
|
||||
Args:
|
||||
sales_data_list: List of sales data to create
|
||||
tenant_id: Tenant ID
|
||||
return_records: If True, returns list of created records instead of count
|
||||
|
||||
Returns:
|
||||
Either count of created records (int) or list of created records (List[SalesData])
|
||||
"""
|
||||
try:
|
||||
from uuid import uuid4
|
||||
|
||||
records = []
|
||||
for sales_data in sales_data_list:
|
||||
is_weekend = sales_data.date.weekday() >= 5 if sales_data.date else False
|
||||
|
||||
record = SalesData(
|
||||
id=uuid4(),
|
||||
tenant_id=tenant_id,
|
||||
date=sales_data.date,
|
||||
inventory_product_id=sales_data.inventory_product_id,
|
||||
quantity_sold=sales_data.quantity_sold,
|
||||
unit_price=sales_data.unit_price,
|
||||
revenue=sales_data.revenue,
|
||||
location_id=sales_data.location_id,
|
||||
sales_channel=sales_data.sales_channel,
|
||||
source=sales_data.source,
|
||||
is_weekend=is_weekend,
|
||||
is_validated=getattr(sales_data, 'is_validated', False)
|
||||
)
|
||||
records.append(record)
|
||||
|
||||
self.session.add_all(records)
|
||||
await self.session.flush()
|
||||
|
||||
logger.info(
|
||||
"Bulk created sales records",
|
||||
count=len(records),
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
|
||||
return records if return_records else len(records)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to bulk create sales records", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
19
services/sales/app/schemas/__init__.py
Normal file
19
services/sales/app/schemas/__init__.py
Normal file
@@ -0,0 +1,19 @@
|
||||
# services/sales/app/schemas/__init__.py
|
||||
|
||||
from .sales import (
|
||||
SalesDataCreate,
|
||||
SalesDataUpdate,
|
||||
SalesDataResponse,
|
||||
SalesDataQuery,
|
||||
SalesAnalytics,
|
||||
ProductSalesAnalytics
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"SalesDataCreate",
|
||||
"SalesDataUpdate",
|
||||
"SalesDataResponse",
|
||||
"SalesDataQuery",
|
||||
"SalesAnalytics",
|
||||
"ProductSalesAnalytics"
|
||||
]
|
||||
148
services/sales/app/schemas/sales.py
Normal file
148
services/sales/app/schemas/sales.py
Normal file
@@ -0,0 +1,148 @@
|
||||
# services/sales/app/schemas/sales.py
|
||||
"""
|
||||
Sales Service Pydantic Schemas
|
||||
"""
|
||||
|
||||
from pydantic import BaseModel, Field, validator
|
||||
from typing import Optional, List
|
||||
from datetime import datetime
|
||||
from uuid import UUID
|
||||
from decimal import Decimal
|
||||
|
||||
|
||||
class SalesDataBase(BaseModel):
|
||||
"""Base sales data schema"""
|
||||
# Product reference - REQUIRED reference to inventory service
|
||||
inventory_product_id: UUID = Field(..., description="Reference to inventory service product")
|
||||
|
||||
quantity_sold: int = Field(..., gt=0, description="Quantity sold")
|
||||
unit_price: Optional[Decimal] = Field(None, ge=0, description="Unit price")
|
||||
revenue: Decimal = Field(..., gt=0, description="Total revenue")
|
||||
cost_of_goods: Optional[Decimal] = Field(None, ge=0, description="Cost of goods sold")
|
||||
discount_applied: Optional[Decimal] = Field(0, ge=0, le=100, description="Discount percentage")
|
||||
|
||||
location_id: Optional[str] = Field(None, max_length=100, description="Location identifier")
|
||||
sales_channel: Optional[str] = Field("in_store", description="Sales channel")
|
||||
source: str = Field("manual", description="Data source")
|
||||
|
||||
notes: Optional[str] = Field(None, description="Additional notes")
|
||||
weather_condition: Optional[str] = Field(None, max_length=50, description="Weather condition")
|
||||
is_holiday: bool = Field(False, description="Holiday flag")
|
||||
is_weekend: bool = Field(False, description="Weekend flag")
|
||||
|
||||
@validator('sales_channel')
|
||||
def validate_sales_channel(cls, v):
|
||||
allowed_channels = ['in_store', 'online', 'delivery', 'wholesale']
|
||||
if v not in allowed_channels:
|
||||
raise ValueError(f'Sales channel must be one of: {allowed_channels}')
|
||||
return v
|
||||
|
||||
@validator('source')
|
||||
def validate_source(cls, v):
|
||||
allowed_sources = ['manual', 'pos', 'online', 'import', 'api', 'csv', 'demo_clone']
|
||||
if v not in allowed_sources:
|
||||
raise ValueError(f'Source must be one of: {allowed_sources}')
|
||||
return v
|
||||
|
||||
|
||||
class SalesDataCreate(SalesDataBase):
|
||||
"""Schema for creating sales data"""
|
||||
tenant_id: Optional[UUID] = Field(None, description="Tenant ID (set automatically)")
|
||||
date: datetime = Field(..., description="Sale date and time")
|
||||
|
||||
|
||||
class SalesDataUpdate(BaseModel):
|
||||
"""Schema for updating sales data"""
|
||||
# Note: product_name and product_category fields removed - use inventory service for product management
|
||||
# product_name: Optional[str] = Field(None, min_length=1, max_length=255) # DEPRECATED
|
||||
# product_category: Optional[str] = Field(None, max_length=100) # DEPRECATED
|
||||
# product_sku: Optional[str] = Field(None, max_length=100) # DEPRECATED - use inventory service
|
||||
|
||||
quantity_sold: Optional[int] = Field(None, gt=0)
|
||||
unit_price: Optional[Decimal] = Field(None, ge=0)
|
||||
revenue: Optional[Decimal] = Field(None, gt=0)
|
||||
cost_of_goods: Optional[Decimal] = Field(None, ge=0)
|
||||
discount_applied: Optional[Decimal] = Field(None, ge=0, le=100)
|
||||
|
||||
location_id: Optional[str] = Field(None, max_length=100)
|
||||
sales_channel: Optional[str] = None
|
||||
|
||||
notes: Optional[str] = None
|
||||
weather_condition: Optional[str] = Field(None, max_length=50)
|
||||
is_holiday: Optional[bool] = None
|
||||
is_weekend: Optional[bool] = None
|
||||
|
||||
validation_notes: Optional[str] = None
|
||||
is_validated: Optional[bool] = None
|
||||
|
||||
|
||||
class SalesDataResponse(SalesDataBase):
|
||||
"""Schema for sales data responses"""
|
||||
id: UUID
|
||||
tenant_id: UUID
|
||||
date: datetime
|
||||
|
||||
is_validated: bool = False
|
||||
validation_notes: Optional[str] = None
|
||||
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
created_by: Optional[UUID] = None
|
||||
|
||||
profit_margin: Optional[float] = Field(None, description="Calculated profit margin")
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class SalesDataQuery(BaseModel):
|
||||
"""Schema for sales data queries"""
|
||||
start_date: Optional[datetime] = None
|
||||
end_date: Optional[datetime] = None
|
||||
# Note: product_name and product_category filtering now requires inventory service integration
|
||||
# product_name: Optional[str] = None # DEPRECATED - use inventory_product_id or join with inventory service
|
||||
# product_category: Optional[str] = None # DEPRECATED - use inventory service categories
|
||||
inventory_product_id: Optional[UUID] = None # Filter by specific inventory product ID
|
||||
location_id: Optional[str] = None
|
||||
sales_channel: Optional[str] = None
|
||||
source: Optional[str] = None
|
||||
is_validated: Optional[bool] = None
|
||||
|
||||
limit: int = Field(50, ge=1, le=1000, description="Number of records to return")
|
||||
offset: int = Field(0, ge=0, description="Number of records to skip")
|
||||
|
||||
order_by: str = Field("date", description="Field to order by")
|
||||
order_direction: str = Field("desc", description="Order direction")
|
||||
|
||||
@validator('order_direction')
|
||||
def validate_order_direction(cls, v):
|
||||
if v.lower() not in ['asc', 'desc']:
|
||||
raise ValueError('Order direction must be "asc" or "desc"')
|
||||
return v.lower()
|
||||
|
||||
|
||||
# Product schemas removed - using inventory service as single source of truth
|
||||
# Product data is accessed via inventory service client
|
||||
|
||||
|
||||
# Analytics schemas
|
||||
class SalesAnalytics(BaseModel):
|
||||
"""Sales analytics response"""
|
||||
total_revenue: Decimal
|
||||
total_quantity: int
|
||||
total_transactions: int
|
||||
average_transaction_value: Decimal
|
||||
top_products: List[dict]
|
||||
sales_by_channel: dict
|
||||
sales_by_day: List[dict]
|
||||
|
||||
|
||||
class ProductSalesAnalytics(BaseModel):
|
||||
"""Product-specific sales analytics"""
|
||||
inventory_product_id: UUID # Reference to inventory service product
|
||||
# Note: product_name can be fetched from inventory service using inventory_product_id
|
||||
total_revenue: Decimal
|
||||
total_quantity: int
|
||||
total_transactions: int
|
||||
average_price: Decimal
|
||||
growth_rate: Optional[float] = None
|
||||
6
services/sales/app/services/__init__.py
Normal file
6
services/sales/app/services/__init__.py
Normal file
@@ -0,0 +1,6 @@
|
||||
# services/sales/app/services/__init__.py
|
||||
|
||||
from .sales_service import SalesService
|
||||
from .data_import_service import DataImportService
|
||||
|
||||
__all__ = ["SalesService", "DataImportService"]
|
||||
1101
services/sales/app/services/data_import_service.py
Normal file
1101
services/sales/app/services/data_import_service.py
Normal file
File diff suppressed because it is too large
Load Diff
158
services/sales/app/services/inventory_client.py
Normal file
158
services/sales/app/services/inventory_client.py
Normal file
@@ -0,0 +1,158 @@
|
||||
# services/sales/app/services/inventory_client.py
|
||||
"""
|
||||
Inventory Service Client - Inter-service communication
|
||||
Handles communication with the inventory service to fetch product data
|
||||
"""
|
||||
|
||||
import structlog
|
||||
from typing import Dict, Any, List, Optional
|
||||
from uuid import UUID
|
||||
|
||||
from shared.clients.inventory_client import InventoryServiceClient as SharedInventoryClient
|
||||
from app.core.config import settings
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
class InventoryServiceClient:
|
||||
"""Client for communicating with the inventory service via shared client"""
|
||||
|
||||
def __init__(self):
|
||||
self._shared_client = SharedInventoryClient(settings)
|
||||
|
||||
async def classify_products_batch(self, product_list: List[Dict[str, Any]], tenant_id: UUID) -> Optional[Dict[str, Any]]:
|
||||
"""Classify multiple products for inventory creation"""
|
||||
try:
|
||||
# Convert product_list to expected format for shared client
|
||||
products = []
|
||||
for item in product_list:
|
||||
if isinstance(item, str):
|
||||
# If it's just a product name
|
||||
products.append({"product_name": item})
|
||||
elif isinstance(item, dict):
|
||||
# If it's already a dict, ensure required fields
|
||||
product_data = {
|
||||
"product_name": item.get("product_name", item.get("name", str(item))),
|
||||
"sales_volume": item.get("sales_volume", item.get("total_quantity"))
|
||||
}
|
||||
products.append(product_data)
|
||||
|
||||
result = await self._shared_client.classify_products_batch(products, str(tenant_id))
|
||||
if result:
|
||||
logger.info("Classified products batch",
|
||||
count=len(products), tenant_id=tenant_id)
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error in batch product classification",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def get_product_by_id(self, product_id: UUID, tenant_id: UUID) -> Optional[Dict[str, Any]]:
|
||||
"""Get product details from inventory service by ID"""
|
||||
try:
|
||||
result = await self._shared_client.get_ingredient_by_id(product_id, str(tenant_id))
|
||||
if result:
|
||||
logger.info("Retrieved product from inventory service",
|
||||
product_id=product_id, tenant_id=tenant_id)
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error fetching product by ID",
|
||||
error=str(e), product_id=product_id, tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def get_product_by_sku(self, sku: str, tenant_id: UUID) -> Optional[Dict[str, Any]]:
|
||||
"""Get product details from inventory service by SKU"""
|
||||
try:
|
||||
# Search for product by SKU using shared client
|
||||
products = await self._shared_client.search_ingredients(
|
||||
str(tenant_id), search=sku, limit=1
|
||||
)
|
||||
|
||||
if products:
|
||||
product_data = products[0]
|
||||
logger.info("Retrieved product by SKU from inventory service",
|
||||
sku=sku, tenant_id=tenant_id)
|
||||
return product_data
|
||||
else:
|
||||
logger.warning("Product not found by SKU in inventory service",
|
||||
sku=sku, tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error fetching product by SKU",
|
||||
error=str(e), sku=sku, tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def search_products(self, search_term: str, tenant_id: UUID,
|
||||
product_type: Optional[str] = None) -> List[Dict[str, Any]]:
|
||||
"""Search products in inventory service"""
|
||||
try:
|
||||
products = await self._shared_client.search_ingredients(
|
||||
str(tenant_id), search=search_term, limit=50
|
||||
)
|
||||
|
||||
logger.info("Searched products in inventory service",
|
||||
search_term=search_term, count=len(products), tenant_id=tenant_id)
|
||||
return products
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error searching products",
|
||||
error=str(e), search_term=search_term, tenant_id=tenant_id)
|
||||
return []
|
||||
|
||||
async def get_products_by_category(self, category: str, tenant_id: UUID,
|
||||
product_type: Optional[str] = None) -> List[Dict[str, Any]]:
|
||||
"""Get products by category from inventory service"""
|
||||
try:
|
||||
products = await self._shared_client.search_ingredients(
|
||||
str(tenant_id), category=category, limit=100
|
||||
)
|
||||
|
||||
logger.info("Retrieved products by category from inventory service",
|
||||
category=category, count=len(products), tenant_id=tenant_id)
|
||||
return products
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error fetching products by category",
|
||||
error=str(e), category=category, tenant_id=tenant_id)
|
||||
return []
|
||||
|
||||
async def create_ingredient(self, ingredient_data: Dict[str, Any], tenant_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Create a new ingredient/product in inventory service"""
|
||||
try:
|
||||
result = await self._shared_client.create_ingredient(ingredient_data, tenant_id)
|
||||
if result:
|
||||
logger.info("Created ingredient in inventory service",
|
||||
ingredient_name=ingredient_data.get('name'), tenant_id=tenant_id)
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error creating ingredient",
|
||||
error=str(e), ingredient_data=ingredient_data, tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def resolve_or_create_products_batch(
|
||||
self,
|
||||
products: List[Dict[str, Any]],
|
||||
tenant_id: str
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""Resolve or create multiple products in a single batch operation"""
|
||||
try:
|
||||
result = await self._shared_client.resolve_or_create_products_batch(products, tenant_id)
|
||||
if result:
|
||||
logger.info("Batch product resolution complete",
|
||||
created=result.get('created_count', 0),
|
||||
resolved=result.get('resolved_count', 0),
|
||||
tenant_id=tenant_id)
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error in batch product resolution",
|
||||
error=str(e), products_count=len(products), tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
# Dependency injection
|
||||
async def get_inventory_client() -> InventoryServiceClient:
|
||||
"""Get inventory service client instance"""
|
||||
return InventoryServiceClient()
|
||||
657
services/sales/app/services/sales_service.py
Normal file
657
services/sales/app/services/sales_service.py
Normal file
@@ -0,0 +1,657 @@
|
||||
# services/sales/app/services/sales_service.py
|
||||
"""
|
||||
Sales Service - Business Logic Layer
|
||||
"""
|
||||
|
||||
from typing import List, Optional, Dict, Any
|
||||
from uuid import UUID
|
||||
from datetime import datetime
|
||||
import structlog
|
||||
|
||||
from app.models.sales import SalesData
|
||||
from app.repositories.sales_repository import SalesRepository
|
||||
from app.schemas.sales import SalesDataCreate, SalesDataUpdate, SalesDataQuery, SalesAnalytics
|
||||
from app.core.database import get_db_transaction
|
||||
from app.services.inventory_client import InventoryServiceClient
|
||||
from shared.database.exceptions import DatabaseError
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class SalesService:
|
||||
"""Service layer for sales operations"""
|
||||
|
||||
def __init__(self):
|
||||
self.inventory_client = InventoryServiceClient()
|
||||
|
||||
async def create_sales_record(
|
||||
self,
|
||||
sales_data: SalesDataCreate,
|
||||
tenant_id: UUID,
|
||||
user_id: Optional[UUID] = None
|
||||
) -> SalesData:
|
||||
"""Create a new sales record with business validation"""
|
||||
try:
|
||||
# Sync product data with inventory service if inventory_product_id is provided
|
||||
if sales_data.inventory_product_id:
|
||||
product_cache = await self.inventory_client.sync_product_cache(
|
||||
sales_data.inventory_product_id, tenant_id
|
||||
)
|
||||
if product_cache:
|
||||
# Update cached product fields from inventory
|
||||
sales_data_dict = sales_data.model_dump()
|
||||
sales_data_dict.update(product_cache)
|
||||
sales_data = SalesDataCreate(**sales_data_dict)
|
||||
else:
|
||||
logger.warning("Could not sync product from inventory",
|
||||
product_id=sales_data.inventory_product_id, tenant_id=tenant_id)
|
||||
|
||||
# Business validation
|
||||
await self._validate_sales_data(sales_data, tenant_id)
|
||||
|
||||
# Set user who created the record
|
||||
if user_id:
|
||||
sales_data_dict = sales_data.model_dump()
|
||||
sales_data_dict['created_by'] = user_id
|
||||
sales_data = SalesDataCreate(**sales_data_dict)
|
||||
|
||||
async with get_db_transaction() as db:
|
||||
repository = SalesRepository(db)
|
||||
record = await repository.create_sales_record(sales_data, tenant_id)
|
||||
|
||||
# Additional business logic (e.g., notifications, analytics updates)
|
||||
await self._post_create_actions(record)
|
||||
|
||||
return record
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to create sales record in service", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def update_sales_record(
|
||||
self,
|
||||
record_id: UUID,
|
||||
update_data: SalesDataUpdate,
|
||||
tenant_id: UUID
|
||||
) -> SalesData:
|
||||
"""Update a sales record"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
repository = SalesRepository(db)
|
||||
|
||||
# Verify record belongs to tenant
|
||||
existing_record = await repository.get_by_id(record_id)
|
||||
if not existing_record or existing_record.tenant_id != tenant_id:
|
||||
raise ValueError(f"Sales record {record_id} not found for tenant {tenant_id}")
|
||||
|
||||
# Update the record
|
||||
updated_record = await repository.update(record_id, update_data.model_dump(exclude_unset=True))
|
||||
|
||||
logger.info("Updated sales record", record_id=record_id, tenant_id=tenant_id)
|
||||
return updated_record
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to update sales record", error=str(e), record_id=record_id, tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def get_sales_records(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
query_params: Optional[SalesDataQuery] = None
|
||||
) -> List[SalesData]:
|
||||
"""Get sales records for a tenant"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
repository = SalesRepository(db)
|
||||
records = await repository.get_by_tenant(tenant_id, query_params)
|
||||
|
||||
logger.info("Retrieved sales records", count=len(records), tenant_id=tenant_id)
|
||||
return records
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get sales records", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def get_sales_record(self, record_id: UUID, tenant_id: UUID) -> Optional[SalesData]:
|
||||
"""Get a specific sales record"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
repository = SalesRepository(db)
|
||||
record = await repository.get_by_id(record_id)
|
||||
|
||||
# Verify record belongs to tenant
|
||||
if record and record.tenant_id != tenant_id:
|
||||
return None
|
||||
|
||||
return record
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get sales record", error=str(e), record_id=record_id, tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def delete_sales_record(self, record_id: UUID, tenant_id: UUID) -> bool:
|
||||
"""Delete a sales record"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
repository = SalesRepository(db)
|
||||
|
||||
# Verify record belongs to tenant
|
||||
existing_record = await repository.get_by_id(record_id)
|
||||
if not existing_record or existing_record.tenant_id != tenant_id:
|
||||
raise ValueError(f"Sales record {record_id} not found for tenant {tenant_id}")
|
||||
|
||||
success = await repository.delete(record_id)
|
||||
|
||||
if success:
|
||||
logger.info("Deleted sales record", record_id=record_id, tenant_id=tenant_id)
|
||||
|
||||
return success
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to delete sales record", error=str(e), record_id=record_id, tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def get_product_sales(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
inventory_product_id: UUID,
|
||||
start_date: Optional[datetime] = None,
|
||||
end_date: Optional[datetime] = None
|
||||
) -> List[SalesData]:
|
||||
"""Get sales records for a specific product by inventory ID"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
repository = SalesRepository(db)
|
||||
records = await repository.get_by_inventory_product(tenant_id, inventory_product_id, start_date, end_date)
|
||||
|
||||
logger.info(
|
||||
"Retrieved product sales",
|
||||
count=len(records),
|
||||
inventory_product_id=inventory_product_id,
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
return records
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get product sales", error=str(e), tenant_id=tenant_id, inventory_product_id=inventory_product_id)
|
||||
raise
|
||||
|
||||
async def get_sales_analytics(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
start_date: Optional[datetime] = None,
|
||||
end_date: Optional[datetime] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""Get sales analytics for a tenant"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
repository = SalesRepository(db)
|
||||
analytics = await repository.get_analytics(tenant_id, start_date, end_date)
|
||||
|
||||
logger.info("Retrieved sales analytics", tenant_id=tenant_id)
|
||||
return analytics
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get sales analytics", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def get_product_categories(self, tenant_id: UUID) -> List[str]:
|
||||
"""Get distinct product categories from inventory service"""
|
||||
try:
|
||||
# Get all unique categories from inventory service products
|
||||
# This is more accurate than cached categories in sales data
|
||||
ingredient_products = await self.inventory_client.search_products("", tenant_id, "ingredient")
|
||||
finished_products = await self.inventory_client.search_products("", tenant_id, "finished_product")
|
||||
|
||||
categories = set()
|
||||
for product in ingredient_products:
|
||||
if product.get("ingredient_category"):
|
||||
categories.add(product["ingredient_category"])
|
||||
|
||||
for product in finished_products:
|
||||
if product.get("product_category"):
|
||||
categories.add(product["product_category"])
|
||||
|
||||
return sorted(list(categories))
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get product categories", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def validate_sales_record(
|
||||
self,
|
||||
record_id: UUID,
|
||||
tenant_id: UUID,
|
||||
validation_notes: Optional[str] = None
|
||||
) -> SalesData:
|
||||
"""Validate a sales record"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
repository = SalesRepository(db)
|
||||
|
||||
# Verify record belongs to tenant
|
||||
existing_record = await repository.get_by_id(record_id)
|
||||
if not existing_record or existing_record.tenant_id != tenant_id:
|
||||
raise ValueError(f"Sales record {record_id} not found for tenant {tenant_id}")
|
||||
|
||||
validated_record = await repository.validate_record(record_id, validation_notes)
|
||||
|
||||
logger.info("Validated sales record", record_id=record_id, tenant_id=tenant_id)
|
||||
return validated_record
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to validate sales record", error=str(e), record_id=record_id, tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def _validate_sales_data(self, sales_data: SalesDataCreate, tenant_id: UUID):
|
||||
"""Validate sales data according to business rules"""
|
||||
# Example business validations
|
||||
|
||||
# Check if revenue matches quantity * unit_price (if unit_price provided)
|
||||
if sales_data.unit_price and sales_data.quantity_sold:
|
||||
expected_revenue = sales_data.unit_price * sales_data.quantity_sold
|
||||
# Apply discount if any
|
||||
if sales_data.discount_applied:
|
||||
expected_revenue *= (1 - sales_data.discount_applied / 100)
|
||||
|
||||
# Allow for small rounding differences
|
||||
if abs(float(sales_data.revenue) - float(expected_revenue)) > 0.01:
|
||||
logger.warning(
|
||||
"Revenue mismatch detected",
|
||||
expected=float(expected_revenue),
|
||||
actual=float(sales_data.revenue),
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
|
||||
# Check date validity (not in future)
|
||||
if sales_data.date > datetime.utcnow():
|
||||
raise ValueError("Sales date cannot be in the future")
|
||||
|
||||
# Additional business rules can be added here
|
||||
logger.info("Sales data validation passed", tenant_id=tenant_id)
|
||||
|
||||
async def _post_create_actions(self, record: SalesData):
|
||||
"""Actions to perform after creating a sales record"""
|
||||
try:
|
||||
# Decrease inventory for the sale
|
||||
if record.inventory_product_id and record.quantity_sold and record.quantity_sold > 0:
|
||||
await self._decrease_inventory_for_sale(record)
|
||||
|
||||
# Here you could also:
|
||||
# - Send notifications
|
||||
# - Update analytics caches
|
||||
# - Trigger ML model updates
|
||||
|
||||
logger.info("Post-create actions completed", record_id=record.id)
|
||||
|
||||
except Exception as e:
|
||||
# Don't fail the main operation for auxiliary actions
|
||||
logger.warning("Failed to execute post-create actions", error=str(e), record_id=record.id)
|
||||
|
||||
async def _decrease_inventory_for_sale(self, sales_record: SalesData) -> Optional[Dict[str, Any]]:
|
||||
"""Decrease inventory stock for a sales record"""
|
||||
try:
|
||||
if not sales_record.inventory_product_id:
|
||||
logger.debug("No inventory_product_id for sales record, skipping stock decrease",
|
||||
record_id=sales_record.id)
|
||||
return None
|
||||
|
||||
if not sales_record.quantity_sold or sales_record.quantity_sold <= 0:
|
||||
logger.debug("Invalid quantity for sales record, skipping stock decrease",
|
||||
record_id=sales_record.id, quantity=sales_record.quantity_sold)
|
||||
return None
|
||||
|
||||
consumption_data = {
|
||||
"ingredient_id": str(sales_record.inventory_product_id),
|
||||
"quantity": float(sales_record.quantity_sold),
|
||||
"reference_number": str(sales_record.id),
|
||||
"notes": f"Sales: {sales_record.product_name} - {sales_record.sales_channel}",
|
||||
"fifo": True # Use FIFO method for stock consumption
|
||||
}
|
||||
|
||||
result = await self.inventory_client._shared_client.consume_stock(
|
||||
consumption_data,
|
||||
str(sales_record.tenant_id)
|
||||
)
|
||||
|
||||
if result:
|
||||
logger.info("Inventory decreased for sale",
|
||||
sales_record_id=sales_record.id,
|
||||
inventory_product_id=sales_record.inventory_product_id,
|
||||
quantity=sales_record.quantity_sold,
|
||||
method="FIFO")
|
||||
|
||||
# Check if stock level is now low (after successful decrease)
|
||||
await self._check_low_stock_threshold(
|
||||
sales_record.tenant_id,
|
||||
sales_record.inventory_product_id,
|
||||
sales_record.product_name,
|
||||
result
|
||||
)
|
||||
else:
|
||||
logger.warning("Failed to decrease inventory for sale (no result)",
|
||||
sales_record_id=sales_record.id)
|
||||
|
||||
return result
|
||||
|
||||
except ValueError as e:
|
||||
# Insufficient stock - log warning but don't fail the sale
|
||||
logger.warning("Insufficient stock for sale",
|
||||
sales_record_id=sales_record.id,
|
||||
error=str(e),
|
||||
product_id=sales_record.inventory_product_id,
|
||||
quantity_requested=sales_record.quantity_sold)
|
||||
|
||||
# Trigger low stock alert
|
||||
await self._trigger_low_stock_alert(
|
||||
sales_record.tenant_id,
|
||||
sales_record.inventory_product_id,
|
||||
sales_record.product_name,
|
||||
error_message=str(e)
|
||||
)
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
# Other errors - log but don't fail the sale
|
||||
logger.error("Failed to decrease inventory for sale",
|
||||
sales_record_id=sales_record.id,
|
||||
error=str(e),
|
||||
product_id=sales_record.inventory_product_id)
|
||||
return None
|
||||
|
||||
async def _check_low_stock_threshold(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
product_id: UUID,
|
||||
product_name: str,
|
||||
consume_result: Dict[str, Any]
|
||||
):
|
||||
"""Check if stock level is below threshold after decrease"""
|
||||
try:
|
||||
# Get product details to check current stock and reorder point
|
||||
product = await self.inventory_client.get_product_by_id(product_id, tenant_id)
|
||||
|
||||
if not product:
|
||||
return
|
||||
|
||||
# Check if product has reorder point configured
|
||||
reorder_point = product.get("reorder_point", 0)
|
||||
current_stock = product.get("current_stock", 0)
|
||||
|
||||
# Trigger alert if stock is below reorder point
|
||||
if reorder_point > 0 and current_stock <= reorder_point:
|
||||
logger.warning("Stock below reorder point",
|
||||
product_id=product_id,
|
||||
product_name=product_name,
|
||||
current_stock=current_stock,
|
||||
reorder_point=reorder_point,
|
||||
tenant_id=tenant_id)
|
||||
|
||||
await self._trigger_low_stock_alert(
|
||||
tenant_id,
|
||||
product_id,
|
||||
product_name,
|
||||
current_stock=current_stock,
|
||||
reorder_point=reorder_point
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
# Don't fail the operation if alert fails
|
||||
logger.error("Failed to check low stock threshold",
|
||||
error=str(e),
|
||||
product_id=product_id)
|
||||
|
||||
async def _trigger_low_stock_alert(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
product_id: UUID,
|
||||
product_name: str,
|
||||
error_message: Optional[str] = None,
|
||||
current_stock: Optional[float] = None,
|
||||
reorder_point: Optional[float] = None
|
||||
):
|
||||
"""Trigger low stock alert notification"""
|
||||
try:
|
||||
# For now, just log the alert
|
||||
# In production, this could:
|
||||
# - Send email notification
|
||||
# - Create in-app notification
|
||||
# - Trigger webhook
|
||||
# - Create alert record in database
|
||||
# - Send to external alerting system (PagerDuty, Slack, etc.)
|
||||
|
||||
alert_data = {
|
||||
"type": "low_stock",
|
||||
"severity": "warning" if current_stock is not None else "critical",
|
||||
"tenant_id": str(tenant_id),
|
||||
"product_id": str(product_id),
|
||||
"product_name": product_name,
|
||||
"current_stock": current_stock,
|
||||
"reorder_point": reorder_point,
|
||||
"error_message": error_message,
|
||||
"timestamp": datetime.utcnow().isoformat()
|
||||
}
|
||||
|
||||
logger.warning("LOW_STOCK_ALERT",
|
||||
**alert_data)
|
||||
|
||||
# Implement notification delivery via RabbitMQ event
|
||||
try:
|
||||
from shared.messaging import get_rabbitmq_client
|
||||
|
||||
rabbitmq_client = get_rabbitmq_client()
|
||||
if rabbitmq_client:
|
||||
# Publish low stock event for notification service to consume
|
||||
event_payload = {
|
||||
"event_id": str(uuid.uuid4()),
|
||||
"event_type": "inventory.low_stock",
|
||||
"timestamp": datetime.utcnow().isoformat(),
|
||||
"tenant_id": str(tenant_id),
|
||||
"data": alert_data
|
||||
}
|
||||
|
||||
await rabbitmq_client.publish_event(
|
||||
exchange_name="inventory.events",
|
||||
routing_key="inventory.low_stock",
|
||||
event_data=event_payload
|
||||
)
|
||||
|
||||
logger.info("Published low stock alert event",
|
||||
tenant_id=str(tenant_id),
|
||||
product_id=product_id,
|
||||
event_id=event_payload["event_id"])
|
||||
else:
|
||||
logger.warning("RabbitMQ client not available, notification not sent")
|
||||
|
||||
except Exception as notify_error:
|
||||
logger.error("Failed to publish low stock notification event",
|
||||
error=str(notify_error),
|
||||
tenant_id=str(tenant_id))
|
||||
# Don't fail the main operation if notification fails
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to trigger low stock alert",
|
||||
error=str(e),
|
||||
product_id=product_id)
|
||||
|
||||
|
||||
# New inventory integration methods
|
||||
async def search_inventory_products(self, search_term: str, tenant_id: UUID,
|
||||
product_type: Optional[str] = None) -> List[Dict[str, Any]]:
|
||||
"""Search products in inventory service"""
|
||||
try:
|
||||
products = await self.inventory_client.search_products(search_term, tenant_id, product_type)
|
||||
logger.info("Searched inventory products", search_term=search_term,
|
||||
count=len(products), tenant_id=tenant_id)
|
||||
return products
|
||||
except Exception as e:
|
||||
logger.error("Failed to search inventory products",
|
||||
error=str(e), search_term=search_term, tenant_id=tenant_id)
|
||||
return []
|
||||
|
||||
async def get_inventory_product(self, product_id: UUID, tenant_id: UUID) -> Optional[Dict[str, Any]]:
|
||||
"""Get product details from inventory service"""
|
||||
try:
|
||||
product = await self.inventory_client.get_product_by_id(product_id, tenant_id)
|
||||
if product:
|
||||
logger.info("Retrieved inventory product", product_id=product_id, tenant_id=tenant_id)
|
||||
return product
|
||||
except Exception as e:
|
||||
logger.error("Failed to get inventory product",
|
||||
error=str(e), product_id=product_id, tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
async def get_inventory_products_by_category(self, category: str, tenant_id: UUID,
|
||||
product_type: Optional[str] = None) -> List[Dict[str, Any]]:
|
||||
"""Get products by category from inventory service"""
|
||||
try:
|
||||
products = await self.inventory_client.get_products_by_category(category, tenant_id, product_type)
|
||||
logger.info("Retrieved inventory products by category", category=category,
|
||||
count=len(products), tenant_id=tenant_id)
|
||||
return products
|
||||
except Exception as e:
|
||||
logger.error("Failed to get inventory products by category",
|
||||
error=str(e), category=category, tenant_id=tenant_id)
|
||||
return []
|
||||
|
||||
async def analyze_product_demand_patterns(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
inventory_product_id: UUID,
|
||||
start_date: Optional[datetime] = None,
|
||||
end_date: Optional[datetime] = None,
|
||||
min_history_days: int = 90
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Analyze demand patterns for a specific product from historical sales data.
|
||||
|
||||
This method provides insights on:
|
||||
- Demand trends (increasing/decreasing)
|
||||
- Volatility (coefficient of variation)
|
||||
- Weekly seasonal patterns
|
||||
- Peak/low demand days
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant identifier
|
||||
inventory_product_id: Product identifier
|
||||
start_date: Start date for analysis (optional)
|
||||
end_date: End date for analysis (optional)
|
||||
min_history_days: Minimum days of history required
|
||||
|
||||
Returns:
|
||||
Analysis results with patterns, trends, and statistics
|
||||
"""
|
||||
try:
|
||||
import pandas as pd
|
||||
|
||||
logger.info(
|
||||
"Analyzing product demand patterns",
|
||||
tenant_id=tenant_id,
|
||||
inventory_product_id=inventory_product_id
|
||||
)
|
||||
|
||||
# Fetch sales data for the product
|
||||
sales_records = await self.get_product_sales(
|
||||
tenant_id=tenant_id,
|
||||
inventory_product_id=inventory_product_id,
|
||||
start_date=start_date,
|
||||
end_date=end_date
|
||||
)
|
||||
|
||||
if not sales_records or len(sales_records) < min_history_days:
|
||||
return {
|
||||
'analyzed_at': datetime.utcnow().isoformat(),
|
||||
'history_days': len(sales_records) if sales_records else 0,
|
||||
'patterns': {},
|
||||
'trend_analysis': {},
|
||||
'seasonal_factors': {},
|
||||
'statistics': {},
|
||||
'error': f'Insufficient historical data (need {min_history_days} days, got {len(sales_records) if sales_records else 0})'
|
||||
}
|
||||
|
||||
# Convert to DataFrame for analysis
|
||||
sales_data = pd.DataFrame([{
|
||||
'date': record.date,
|
||||
'quantity': record.quantity_sold,
|
||||
'revenue': float(record.revenue) if record.revenue else 0
|
||||
} for record in sales_records])
|
||||
|
||||
sales_data['date'] = pd.to_datetime(sales_data['date'])
|
||||
sales_data = sales_data.sort_values('date')
|
||||
|
||||
# Calculate basic statistics
|
||||
mean_demand = sales_data['quantity'].mean()
|
||||
std_demand = sales_data['quantity'].std()
|
||||
cv = (std_demand / mean_demand) if mean_demand > 0 else 0
|
||||
|
||||
# Trend analysis
|
||||
sales_data['days_since_start'] = (sales_data['date'] - sales_data['date'].min()).dt.days
|
||||
trend_correlation = sales_data['days_since_start'].corr(sales_data['quantity'])
|
||||
is_increasing = trend_correlation > 0.2
|
||||
is_decreasing = trend_correlation < -0.2
|
||||
|
||||
# Seasonal pattern detection (day of week)
|
||||
sales_data['day_of_week'] = sales_data['date'].dt.dayofweek
|
||||
weekly_pattern = sales_data.groupby('day_of_week')['quantity'].mean().to_dict()
|
||||
peak_day = max(weekly_pattern, key=weekly_pattern.get)
|
||||
low_day = min(weekly_pattern, key=weekly_pattern.get)
|
||||
peak_ratio = weekly_pattern[peak_day] / weekly_pattern[low_day] if weekly_pattern[low_day] > 0 else 1.0
|
||||
|
||||
logger.info(
|
||||
"Demand pattern analysis complete",
|
||||
tenant_id=tenant_id,
|
||||
inventory_product_id=inventory_product_id,
|
||||
data_points=len(sales_data),
|
||||
trend_direction='increasing' if is_increasing else 'decreasing' if is_decreasing else 'stable'
|
||||
)
|
||||
|
||||
return {
|
||||
'analyzed_at': datetime.utcnow().isoformat(),
|
||||
'history_days': len(sales_data),
|
||||
'date_range': {
|
||||
'start': sales_data['date'].min().isoformat(),
|
||||
'end': sales_data['date'].max().isoformat()
|
||||
},
|
||||
'statistics': {
|
||||
'mean_demand': round(mean_demand, 2),
|
||||
'std_demand': round(std_demand, 2),
|
||||
'coefficient_of_variation': round(cv, 3),
|
||||
'total_quantity': round(sales_data['quantity'].sum(), 2),
|
||||
'total_revenue': round(sales_data['revenue'].sum(), 2),
|
||||
'min_demand': round(sales_data['quantity'].min(), 2),
|
||||
'max_demand': round(sales_data['quantity'].max(), 2)
|
||||
},
|
||||
'trend_analysis': {
|
||||
'correlation': round(trend_correlation, 3),
|
||||
'is_increasing': is_increasing,
|
||||
'is_decreasing': is_decreasing,
|
||||
'direction': 'increasing' if is_increasing else 'decreasing' if is_decreasing else 'stable'
|
||||
},
|
||||
'patterns': {
|
||||
'weekly_pattern': {str(k): round(v, 2) for k, v in weekly_pattern.items()},
|
||||
'peak_day': int(peak_day),
|
||||
'low_day': int(low_day)
|
||||
},
|
||||
'seasonal_factors': {
|
||||
'peak_ratio': round(peak_ratio, 2),
|
||||
'has_strong_pattern': peak_ratio > 1.5
|
||||
}
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error analyzing product demand patterns",
|
||||
tenant_id=tenant_id,
|
||||
inventory_product_id=inventory_product_id,
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
return {
|
||||
'analyzed_at': datetime.utcnow().isoformat(),
|
||||
'history_days': 0,
|
||||
'patterns': {},
|
||||
'trend_analysis': {},
|
||||
'seasonal_factors': {},
|
||||
'statistics': {},
|
||||
'error': str(e)
|
||||
}
|
||||
81
services/sales/app/services/tenant_deletion_service.py
Normal file
81
services/sales/app/services/tenant_deletion_service.py
Normal file
@@ -0,0 +1,81 @@
|
||||
"""
|
||||
Sales Service - Tenant Data Deletion
|
||||
Handles deletion of all sales-related data for a tenant
|
||||
"""
|
||||
from typing import Dict
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, delete, func
|
||||
import structlog
|
||||
|
||||
from shared.services.tenant_deletion import BaseTenantDataDeletionService, TenantDataDeletionResult
|
||||
from app.models.sales import SalesData
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class SalesTenantDeletionService(BaseTenantDataDeletionService):
|
||||
"""Service for deleting all sales-related data for a tenant"""
|
||||
|
||||
def __init__(self, db_session: AsyncSession):
|
||||
super().__init__("sales-service")
|
||||
self.db = db_session
|
||||
|
||||
async def get_tenant_data_preview(self, tenant_id: str) -> Dict[str, int]:
|
||||
"""Get counts of what would be deleted"""
|
||||
|
||||
try:
|
||||
preview = {}
|
||||
|
||||
# Count sales data
|
||||
sales_count = await self.db.scalar(
|
||||
select(func.count(SalesData.id)).where(SalesData.tenant_id == tenant_id)
|
||||
)
|
||||
preview["sales_records"] = sales_count or 0
|
||||
|
||||
return preview
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting deletion preview",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
return {}
|
||||
|
||||
async def delete_tenant_data(self, tenant_id: str) -> TenantDataDeletionResult:
|
||||
"""Delete all data for a tenant"""
|
||||
|
||||
result = TenantDataDeletionResult(tenant_id, self.service_name)
|
||||
|
||||
try:
|
||||
# Delete all sales data for the tenant
|
||||
try:
|
||||
sales_delete = await self.db.execute(
|
||||
delete(SalesData).where(SalesData.tenant_id == tenant_id)
|
||||
)
|
||||
deleted_sales = sales_delete.rowcount
|
||||
result.add_deleted_items("sales_records", deleted_sales)
|
||||
|
||||
logger.info("Deleted sales data for tenant",
|
||||
tenant_id=tenant_id,
|
||||
count=deleted_sales)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error deleting sales data",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
result.add_error(f"Sales data deletion: {str(e)}")
|
||||
|
||||
# Commit all deletions
|
||||
await self.db.commit()
|
||||
|
||||
logger.info("Tenant data deletion completed",
|
||||
tenant_id=tenant_id,
|
||||
deleted_counts=result.deleted_counts)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Fatal error during tenant data deletion",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
await self.db.rollback()
|
||||
result.add_error(f"Fatal error: {str(e)}")
|
||||
|
||||
return result
|
||||
Reference in New Issue
Block a user