Initial commit - production deployment

This commit is contained in:
2026-01-21 17:17:16 +01:00
commit c23d00dd92
2289 changed files with 638440 additions and 0 deletions

View File

@@ -0,0 +1 @@
# services/sales/app/api/__init__.py

View File

@@ -0,0 +1,99 @@
# services/sales/app/api/analytics.py
"""
Sales Analytics API - Reporting, statistics, and insights
"""
from fastapi import APIRouter, Depends, HTTPException, Query, Path
from typing import Optional, Dict, Any
from uuid import UUID
from datetime import datetime
import structlog
from app.services.sales_service import SalesService
from shared.routing import RouteBuilder
from shared.auth.access_control import analytics_tier_required
from shared.auth.decorators import get_current_user_dep
route_builder = RouteBuilder('sales')
router = APIRouter(tags=["sales-analytics"])
logger = structlog.get_logger()
def get_sales_service():
"""Dependency injection for SalesService"""
return SalesService()
@router.get(
route_builder.build_analytics_route("summary")
)
@analytics_tier_required
async def get_sales_analytics(
tenant_id: UUID = Path(..., description="Tenant ID"),
start_date: Optional[datetime] = Query(None, description="Start date filter"),
end_date: Optional[datetime] = Query(None, description="End date filter"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
sales_service: SalesService = Depends(get_sales_service)
):
"""Get sales analytics summary for a tenant (Professional+ tier required)"""
try:
analytics = await sales_service.get_sales_analytics(tenant_id, start_date, end_date)
logger.info("Retrieved sales analytics", tenant_id=tenant_id)
return analytics
except Exception as e:
logger.error("Failed to get sales analytics", error=str(e), tenant_id=tenant_id)
raise HTTPException(status_code=500, detail=f"Failed to get sales analytics: {str(e)}")
@router.get(
route_builder.build_analytics_route("products/{product_id}/demand-patterns")
)
@analytics_tier_required
async def get_product_demand_patterns(
tenant_id: UUID = Path(..., description="Tenant ID"),
product_id: UUID = Path(..., description="Product ID (inventory_product_id)"),
start_date: Optional[datetime] = Query(None, description="Start date for analysis"),
end_date: Optional[datetime] = Query(None, description="End date for analysis"),
min_history_days: int = Query(90, description="Minimum days of history required", ge=30, le=365),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
sales_service: SalesService = Depends(get_sales_service)
):
"""
Analyze demand patterns for a specific product (Professional+ tier required).
Returns:
- Demand trends (increasing/decreasing/stable)
- Volatility metrics (coefficient of variation)
- Weekly seasonal patterns
- Peak/low demand days
- Statistical summaries
"""
try:
patterns = await sales_service.analyze_product_demand_patterns(
tenant_id=tenant_id,
inventory_product_id=product_id,
start_date=start_date,
end_date=end_date,
min_history_days=min_history_days
)
logger.info(
"Retrieved product demand patterns",
tenant_id=tenant_id,
product_id=product_id
)
return patterns
except Exception as e:
logger.error(
"Failed to get product demand patterns",
error=str(e),
tenant_id=tenant_id,
product_id=product_id
)
raise HTTPException(
status_code=500,
detail=f"Failed to analyze demand patterns: {str(e)}"
)

View File

@@ -0,0 +1,237 @@
# services/sales/app/api/audit.py
"""
Audit Logs API - Retrieve audit trail for sales service
"""
from fastapi import APIRouter, Depends, HTTPException, Query, Path, status
from typing import Optional, Dict, Any
from uuid import UUID
from datetime import datetime
import structlog
from sqlalchemy import select, func, and_, or_
from sqlalchemy.ext.asyncio import AsyncSession
from app.models import AuditLog
from shared.auth.decorators import get_current_user_dep
from shared.auth.access_control import require_user_role
from shared.routing import RouteBuilder
from shared.models.audit_log_schemas import (
AuditLogResponse,
AuditLogListResponse,
AuditLogStatsResponse
)
from app.core.database import database_manager
route_builder = RouteBuilder('sales')
router = APIRouter(tags=["audit-logs"])
logger = structlog.get_logger()
async def get_db():
"""Database session dependency"""
async with database_manager.get_session() as session:
yield session
@router.get(
route_builder.build_base_route("audit-logs"),
response_model=AuditLogListResponse
)
@require_user_role(['admin', 'owner'])
async def get_audit_logs(
tenant_id: UUID = Path(..., description="Tenant ID"),
start_date: Optional[datetime] = Query(None, description="Filter logs from this date"),
end_date: Optional[datetime] = Query(None, description="Filter logs until this date"),
user_id: Optional[UUID] = Query(None, description="Filter by user ID"),
action: Optional[str] = Query(None, description="Filter by action type"),
resource_type: Optional[str] = Query(None, description="Filter by resource type"),
severity: Optional[str] = Query(None, description="Filter by severity level"),
search: Optional[str] = Query(None, description="Search in description field"),
limit: int = Query(100, ge=1, le=1000, description="Number of records to return"),
offset: int = Query(0, ge=0, description="Number of records to skip"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Get audit logs for sales service.
Requires admin or owner role.
"""
try:
logger.info(
"Retrieving audit logs",
tenant_id=tenant_id,
user_id=current_user.get("user_id"),
filters={
"start_date": start_date,
"end_date": end_date,
"action": action,
"resource_type": resource_type,
"severity": severity
}
)
# Build query filters
filters = [AuditLog.tenant_id == tenant_id]
if start_date:
filters.append(AuditLog.created_at >= start_date)
if end_date:
filters.append(AuditLog.created_at <= end_date)
if user_id:
filters.append(AuditLog.user_id == user_id)
if action:
filters.append(AuditLog.action == action)
if resource_type:
filters.append(AuditLog.resource_type == resource_type)
if severity:
filters.append(AuditLog.severity == severity)
if search:
filters.append(AuditLog.description.ilike(f"%{search}%"))
# Count total matching records
count_query = select(func.count()).select_from(AuditLog).where(and_(*filters))
total_result = await db.execute(count_query)
total = total_result.scalar() or 0
# Fetch paginated results
query = (
select(AuditLog)
.where(and_(*filters))
.order_by(AuditLog.created_at.desc())
.limit(limit)
.offset(offset)
)
result = await db.execute(query)
audit_logs = result.scalars().all()
# Convert to response models
items = [AuditLogResponse.from_orm(log) for log in audit_logs]
logger.info(
"Successfully retrieved audit logs",
tenant_id=tenant_id,
total=total,
returned=len(items)
)
return AuditLogListResponse(
items=items,
total=total,
limit=limit,
offset=offset,
has_more=(offset + len(items)) < total
)
except Exception as e:
logger.error(
"Failed to retrieve audit logs",
error=str(e),
tenant_id=tenant_id
)
raise HTTPException(
status_code=500,
detail=f"Failed to retrieve audit logs: {str(e)}"
)
@router.get(
route_builder.build_base_route("audit-logs/stats"),
response_model=AuditLogStatsResponse
)
@require_user_role(['admin', 'owner'])
async def get_audit_log_stats(
tenant_id: UUID = Path(..., description="Tenant ID"),
start_date: Optional[datetime] = Query(None, description="Filter logs from this date"),
end_date: Optional[datetime] = Query(None, description="Filter logs until this date"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Get audit log statistics for sales service.
Requires admin or owner role.
"""
try:
logger.info(
"Retrieving audit log statistics",
tenant_id=tenant_id,
user_id=current_user.get("user_id")
)
# Build base filters
filters = [AuditLog.tenant_id == tenant_id]
if start_date:
filters.append(AuditLog.created_at >= start_date)
if end_date:
filters.append(AuditLog.created_at <= end_date)
# Total events
count_query = select(func.count()).select_from(AuditLog).where(and_(*filters))
total_result = await db.execute(count_query)
total_events = total_result.scalar() or 0
# Events by action
action_query = (
select(AuditLog.action, func.count().label('count'))
.where(and_(*filters))
.group_by(AuditLog.action)
)
action_result = await db.execute(action_query)
events_by_action = {row.action: row.count for row in action_result}
# Events by severity
severity_query = (
select(AuditLog.severity, func.count().label('count'))
.where(and_(*filters))
.group_by(AuditLog.severity)
)
severity_result = await db.execute(severity_query)
events_by_severity = {row.severity: row.count for row in severity_result}
# Events by resource type
resource_query = (
select(AuditLog.resource_type, func.count().label('count'))
.where(and_(*filters))
.group_by(AuditLog.resource_type)
)
resource_result = await db.execute(resource_query)
events_by_resource_type = {row.resource_type: row.count for row in resource_result}
# Date range
date_range_query = (
select(
func.min(AuditLog.created_at).label('min_date'),
func.max(AuditLog.created_at).label('max_date')
)
.where(and_(*filters))
)
date_result = await db.execute(date_range_query)
date_row = date_result.one()
logger.info(
"Successfully retrieved audit log statistics",
tenant_id=tenant_id,
total_events=total_events
)
return AuditLogStatsResponse(
total_events=total_events,
events_by_action=events_by_action,
events_by_severity=events_by_severity,
events_by_resource_type=events_by_resource_type,
date_range={
"min": date_row.min_date,
"max": date_row.max_date
}
)
except Exception as e:
logger.error(
"Failed to retrieve audit log statistics",
error=str(e),
tenant_id=tenant_id
)
raise HTTPException(
status_code=500,
detail=f"Failed to retrieve audit log statistics: {str(e)}"
)

View File

@@ -0,0 +1,160 @@
# services/sales/app/api/batch.py
"""
Sales Batch API - Batch operations for enterprise dashboards
Phase 2 optimization: Eliminate N+1 query patterns by fetching data for
multiple tenants in a single request.
"""
from fastapi import APIRouter, Depends, HTTPException, Body, Path
from typing import List, Dict, Any
from datetime import date
from uuid import UUID
from pydantic import BaseModel, Field
import structlog
import asyncio
from app.services.sales_service import SalesService
from shared.auth.decorators import get_current_user_dep
from shared.routing import RouteBuilder
from shared.auth.access_control import require_user_role
route_builder = RouteBuilder('sales')
router = APIRouter(tags=["sales-batch"])
logger = structlog.get_logger()
def get_sales_service():
"""Dependency injection for SalesService"""
return SalesService()
class SalesSummaryBatchRequest(BaseModel):
"""Request model for batch sales summary"""
tenant_ids: List[str] = Field(..., description="List of tenant IDs", max_length=100)
start_date: date = Field(..., description="Start date for sales period")
end_date: date = Field(..., description="End date for sales period")
class SalesSummary(BaseModel):
"""Sales summary for a single tenant"""
tenant_id: str
total_revenue: float
total_orders: int
average_order_value: float
period_start: str
period_end: str
@router.post("/api/v1/batch/sales-summary", response_model=Dict[str, SalesSummary])
async def get_sales_summary_batch(
request: SalesSummaryBatchRequest = Body(...),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
sales_service: SalesService = Depends(get_sales_service)
):
"""
Get sales summary for multiple tenants in a single request.
Optimized for enterprise dashboards to eliminate N+1 query patterns.
Fetches sales data for all tenants in parallel.
Args:
request: Batch request with tenant IDs and date range
Returns:
Dictionary mapping tenant_id -> sales summary
Example:
POST /api/v1/sales/batch/sales-summary
{
"tenant_ids": ["tenant-1", "tenant-2", "tenant-3"],
"start_date": "2025-01-01",
"end_date": "2025-01-31"
}
Response:
{
"tenant-1": {"tenant_id": "tenant-1", "total_revenue": 50000, ...},
"tenant-2": {"tenant_id": "tenant-2", "total_revenue": 45000", ...},
"tenant-3": {"tenant_id": "tenant-3", "total_revenue": 52000, ...}
}
"""
try:
if len(request.tenant_ids) > 100:
raise HTTPException(
status_code=400,
detail="Maximum 100 tenant IDs allowed per batch request"
)
if not request.tenant_ids:
return {}
logger.info(
"Batch fetching sales summaries",
tenant_count=len(request.tenant_ids),
start_date=str(request.start_date),
end_date=str(request.end_date)
)
async def fetch_tenant_sales(tenant_id: str) -> tuple[str, SalesSummary]:
"""Fetch sales summary for a single tenant"""
try:
tenant_uuid = UUID(tenant_id)
summary = await sales_service.get_sales_analytics(
tenant_uuid,
request.start_date,
request.end_date
)
return tenant_id, SalesSummary(
tenant_id=tenant_id,
total_revenue=float(summary.get('total_revenue', 0)),
total_orders=int(summary.get('total_orders', 0)),
average_order_value=float(summary.get('average_order_value', 0)),
period_start=str(request.start_date),
period_end=str(request.end_date)
)
except Exception as e:
logger.warning(
"Failed to fetch sales for tenant in batch",
tenant_id=tenant_id,
error=str(e)
)
return tenant_id, SalesSummary(
tenant_id=tenant_id,
total_revenue=0.0,
total_orders=0,
average_order_value=0.0,
period_start=str(request.start_date),
period_end=str(request.end_date)
)
# Fetch all tenant sales in parallel
tasks = [fetch_tenant_sales(tid) for tid in request.tenant_ids]
results = await asyncio.gather(*tasks, return_exceptions=True)
# Build result dictionary
result_dict = {}
for result in results:
if isinstance(result, Exception):
logger.error("Exception in batch sales fetch", error=str(result))
continue
tenant_id, summary = result
result_dict[tenant_id] = summary
logger.info(
"Batch sales summaries retrieved",
requested_count=len(request.tenant_ids),
successful_count=len(result_dict)
)
return result_dict
except HTTPException:
raise
except Exception as e:
logger.error("Error in batch sales summary", error=str(e), exc_info=True)
raise HTTPException(
status_code=500,
detail=f"Failed to fetch batch sales summaries: {str(e)}"
)

View File

@@ -0,0 +1,314 @@
"""
Internal Demo Cloning API for Sales Service
Service-to-service endpoint for cloning sales data
"""
from fastapi import APIRouter, Depends, HTTPException, Header
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select, delete, func
import structlog
import uuid
from datetime import datetime, timezone, timedelta
from typing import Optional
import os
from decimal import Decimal
import sys
import json
from pathlib import Path
sys.path.insert(0, str(Path(__file__).parent.parent.parent.parent))
from shared.utils.demo_dates import adjust_date_for_demo, resolve_time_marker
from app.core.database import get_db
from app.models.sales import SalesData
from app.core.config import settings
logger = structlog.get_logger()
router = APIRouter(prefix="/internal/demo", tags=["internal"])
# Base demo tenant IDs
DEMO_TENANT_PROFESSIONAL = "a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6"
def parse_date_field(
field_value: any,
session_time: datetime,
field_name: str = "date"
) -> Optional[datetime]:
"""
Parse a date field from JSON, supporting BASE_TS markers and ISO timestamps.
Args:
field_value: The date field value (can be BASE_TS marker, ISO string, or None)
session_time: Session creation time (timezone-aware UTC)
field_name: Name of the field (for logging)
Returns:
Timezone-aware UTC datetime or None
"""
if field_value is None:
return None
# Handle BASE_TS markers
if isinstance(field_value, str) and field_value.startswith("BASE_TS"):
try:
return resolve_time_marker(field_value, session_time)
except (ValueError, AttributeError) as e:
logger.warning(
"Failed to resolve BASE_TS marker",
field_name=field_name,
marker=field_value,
error=str(e)
)
return None
# Handle ISO timestamps (legacy format - convert to absolute datetime)
if isinstance(field_value, str) and ('T' in field_value or 'Z' in field_value):
try:
parsed_date = datetime.fromisoformat(field_value.replace('Z', '+00:00'))
# Adjust relative to session time
return adjust_date_for_demo(parsed_date, session_time)
except (ValueError, AttributeError) as e:
logger.warning(
"Failed to parse ISO timestamp",
field_name=field_name,
value=field_value,
error=str(e)
)
return None
logger.warning(
"Unknown date format",
field_name=field_name,
value=field_value,
value_type=type(field_value).__name__
)
return None
@router.post("/clone")
async def clone_demo_data(
base_tenant_id: str,
virtual_tenant_id: str,
demo_account_type: str,
session_id: Optional[str] = None,
session_created_at: Optional[str] = None,
db: AsyncSession = Depends(get_db)
):
"""
Clone sales service data for a virtual demo tenant
Clones:
- Sales history records from template tenant
- Adjusts dates to recent timeframe
- Updates product references to new virtual tenant
Args:
base_tenant_id: Template tenant UUID to clone from
virtual_tenant_id: Target virtual tenant UUID
demo_account_type: Type of demo account
session_id: Originating session ID for tracing
session_created_at: ISO timestamp when demo session was created (for date adjustment)
Returns:
Cloning status and record counts
"""
start_time = datetime.now(timezone.utc)
# Parse session_created_at or fallback to now
if session_created_at:
try:
session_time = datetime.fromisoformat(session_created_at.replace('Z', '+00:00'))
except (ValueError, AttributeError) as e:
logger.warning(
"Invalid session_created_at format, using current time",
session_created_at=session_created_at,
error=str(e)
)
session_time = datetime.now(timezone.utc)
else:
logger.warning("session_created_at not provided, using current time")
session_time = datetime.now(timezone.utc)
logger.info(
"Starting sales data cloning",
base_tenant_id=base_tenant_id,
virtual_tenant_id=virtual_tenant_id,
demo_account_type=demo_account_type,
session_id=session_id,
session_time=session_time.isoformat()
)
try:
# Validate UUIDs
base_uuid = uuid.UUID(base_tenant_id)
virtual_uuid = uuid.UUID(virtual_tenant_id)
# Track cloning statistics
stats = {
"sales_records": 0,
}
# Load seed data from JSON files
from shared.utils.seed_data_paths import get_seed_data_path
if demo_account_type == "professional":
json_file = get_seed_data_path("professional", "09-sales.json")
elif demo_account_type == "enterprise":
json_file = get_seed_data_path("enterprise", "09-sales.json")
elif demo_account_type == "enterprise_child":
json_file = get_seed_data_path("enterprise", "09-sales.json", child_id=base_tenant_id)
else:
raise ValueError(f"Invalid demo account type: {demo_account_type}")
# Load JSON data
with open(json_file, 'r', encoding='utf-8') as f:
seed_data = json.load(f)
logger.info(
"Loaded sales seed data",
sales_records=len(seed_data.get('sales_data', []))
)
# Load Sales Data from seed data
for sale_data in seed_data.get('sales_data', []):
# Parse date field (supports BASE_TS markers and ISO timestamps)
# Different demo types may use different field names for the date
# Prioritize in order: date, sale_date, sales_date
date_value = (sale_data.get('date') or
sale_data.get('sale_date') or
sale_data.get('sales_date'))
adjusted_date = parse_date_field(
date_value,
session_time,
"date"
)
# Ensure date is not None for NOT NULL constraint by using session_time as fallback
if adjusted_date is None:
adjusted_date = session_time
# Create new sales record with adjusted date
# Map different possible JSON field names to the correct model field names
new_sale = SalesData(
id=uuid.uuid4(),
tenant_id=virtual_uuid,
date=adjusted_date,
inventory_product_id=sale_data.get('inventory_product_id') or sale_data.get('product_id'), # inventory_product_id is the model field
quantity_sold=sale_data.get('quantity_sold') or sale_data.get('quantity', 0.0), # quantity_sold is the model field
unit_price=sale_data.get('unit_price', 0.0), # unit_price is the model field
revenue=sale_data.get('revenue') or sale_data.get('total_revenue') or sale_data.get('total_amount', 0.0), # revenue is the model field
cost_of_goods=sale_data.get('cost_of_goods', 0.0), # cost_of_goods is the model field
discount_applied=sale_data.get('discount_applied', 0.0), # discount_applied is the model field
location_id=sale_data.get('location_id'),
sales_channel=sale_data.get('sales_channel', 'IN_STORE'), # sales_channel is the model field
source="demo_clone", # Mark as seeded
is_validated=sale_data.get('is_validated', True),
validation_notes=sale_data.get('validation_notes'),
notes=sale_data.get('notes'),
weather_condition=sale_data.get('weather_condition'),
is_holiday=sale_data.get('is_holiday', False),
is_weekend=sale_data.get('is_weekend', False),
created_at=session_time,
updated_at=session_time
)
db.add(new_sale)
stats["sales_records"] += 1
# Commit all changes
await db.commit()
total_records = sum(stats.values())
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
logger.info(
"Sales data cloning completed",
virtual_tenant_id=virtual_tenant_id,
total_records=total_records,
stats=stats,
duration_ms=duration_ms
)
return {
"service": "sales",
"status": "completed",
"records_cloned": total_records,
"duration_ms": duration_ms,
"details": stats
}
except ValueError as e:
logger.error("Invalid UUID format", error=str(e))
raise HTTPException(status_code=400, detail=f"Invalid UUID: {str(e)}")
except Exception as e:
logger.error(
"Failed to clone sales data",
error=str(e),
virtual_tenant_id=virtual_tenant_id,
exc_info=True
)
# Rollback on error
await db.rollback()
return {
"service": "sales",
"status": "failed",
"records_cloned": 0,
"duration_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000),
"error": str(e)
}
@router.get("/clone/health")
async def clone_health_check():
"""
Health check for internal cloning endpoint
Used by orchestrator to verify service availability
"""
return {
"service": "sales",
"clone_endpoint": "available",
"version": "2.0.0"
}
@router.delete("/tenant/{virtual_tenant_id}")
async def delete_demo_data(
virtual_tenant_id: str,
db: AsyncSession = Depends(get_db)
):
"""Delete all sales data for a virtual demo tenant"""
logger.info("Deleting sales data for virtual tenant", virtual_tenant_id=virtual_tenant_id)
start_time = datetime.now(timezone.utc)
try:
virtual_uuid = uuid.UUID(virtual_tenant_id)
# Count records
sales_count = await db.scalar(select(func.count(SalesData.id)).where(SalesData.tenant_id == virtual_uuid))
# Delete sales data
await db.execute(delete(SalesData).where(SalesData.tenant_id == virtual_uuid))
await db.commit()
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
logger.info("Sales data deleted successfully", virtual_tenant_id=virtual_tenant_id, duration_ms=duration_ms)
return {
"service": "sales",
"status": "deleted",
"virtual_tenant_id": virtual_tenant_id,
"records_deleted": {
"sales": sales_count,
"total": sales_count
},
"duration_ms": duration_ms
}
except Exception as e:
logger.error("Failed to delete sales data", error=str(e), exc_info=True)
await db.rollback()
raise HTTPException(status_code=500, detail=str(e))

View File

@@ -0,0 +1,520 @@
# services/sales/app/api/sales_operations.py
"""
Sales Operations API - Business operations and complex workflows
"""
from fastapi import APIRouter, Depends, HTTPException, Query, Path, UploadFile, File, Form
from sqlalchemy.ext.asyncio import AsyncSession
from typing import List, Optional, Dict, Any
from uuid import UUID
from datetime import datetime
import structlog
import json
from app.schemas.sales import SalesDataResponse
from app.services.sales_service import SalesService
from app.services.data_import_service import DataImportService
from app.core.database import get_db
from shared.auth.decorators import get_current_user_dep
from shared.auth.access_control import require_user_role
from shared.routing import RouteBuilder
route_builder = RouteBuilder('sales')
router = APIRouter(tags=["sales-operations"])
logger = structlog.get_logger()
def get_sales_service():
"""Dependency injection for SalesService"""
return SalesService()
def get_import_service():
"""Dependency injection for DataImportService"""
return DataImportService()
@router.post(
route_builder.build_operations_route("validate-record"),
response_model=SalesDataResponse
)
async def validate_sales_record(
tenant_id: UUID = Path(..., description="Tenant ID"),
record_id: UUID = Path(..., description="Sales record ID"),
validation_notes: Optional[str] = Query(None, description="Validation notes"),
sales_service: SalesService = Depends(get_sales_service)
):
"""Mark a sales record as validated"""
try:
validated_record = await sales_service.validate_sales_record(record_id, tenant_id, validation_notes)
logger.info("Validated sales record", record_id=record_id, tenant_id=tenant_id)
return validated_record
except ValueError as ve:
logger.warning("Error validating sales record", error=str(ve), record_id=record_id)
raise HTTPException(status_code=400, detail=str(ve))
except Exception as e:
logger.error("Failed to validate sales record", error=str(e), record_id=record_id, tenant_id=tenant_id)
raise HTTPException(status_code=500, detail=f"Failed to validate sales record: {str(e)}")
@router.get(
route_builder.build_nested_resource_route("inventory-products", "inventory_product_id", "sales"),
response_model=List[SalesDataResponse]
)
async def get_product_sales(
tenant_id: UUID = Path(..., description="Tenant ID"),
inventory_product_id: UUID = Path(..., description="Inventory product ID"),
start_date: Optional[datetime] = Query(None, description="Start date filter"),
end_date: Optional[datetime] = Query(None, description="End date filter"),
sales_service: SalesService = Depends(get_sales_service)
):
"""Get sales records for a specific product (cross-service query)"""
try:
records = await sales_service.get_product_sales(tenant_id, inventory_product_id, start_date, end_date)
logger.info("Retrieved product sales", count=len(records), inventory_product_id=inventory_product_id, tenant_id=tenant_id)
return records
except Exception as e:
logger.error("Failed to get product sales", error=str(e), tenant_id=tenant_id, inventory_product_id=inventory_product_id)
raise HTTPException(status_code=500, detail=f"Failed to get product sales: {str(e)}")
@router.post(
route_builder.build_operations_route("import/validate-json")
)
async def validate_json_data(
tenant_id: UUID = Path(..., description="Tenant ID"),
data: Dict[str, Any] = None,
current_user: Dict[str, Any] = Depends(get_current_user_dep),
import_service: DataImportService = Depends(get_import_service)
):
"""Validate JSON sales data"""
try:
if not data:
raise HTTPException(status_code=400, detail="No data provided")
logger.info("Validating JSON data", tenant_id=tenant_id, record_count=len(data.get("records", [])))
if "records" in data:
validation_data = {
"tenant_id": str(tenant_id),
"data": json.dumps(data.get("records", [])),
"data_format": "json"
}
else:
validation_data = data.copy()
validation_data["tenant_id"] = str(tenant_id)
if "data_format" not in validation_data:
validation_data["data_format"] = "json"
validation_result = await import_service.validate_import_data(validation_data)
logger.info("JSON validation completed", tenant_id=tenant_id, valid=validation_result.is_valid)
return {
"is_valid": validation_result.is_valid,
"total_records": validation_result.total_records,
"valid_records": validation_result.valid_records,
"invalid_records": validation_result.invalid_records,
"errors": validation_result.errors,
"warnings": validation_result.warnings,
"summary": validation_result.summary
}
except Exception as e:
logger.error("Failed to validate JSON data", error=str(e), tenant_id=tenant_id)
raise HTTPException(status_code=500, detail=f"Failed to validate data: {str(e)}")
@router.post(
route_builder.build_operations_route("import/validate")
)
async def validate_sales_data_universal(
tenant_id: UUID = Path(..., description="Tenant ID"),
file: Optional[UploadFile] = File(None),
data: Optional[Dict[str, Any]] = None,
file_format: Optional[str] = Form(None),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
import_service: DataImportService = Depends(get_import_service)
):
"""Universal validation endpoint for sales data - supports files and JSON"""
try:
logger.info("=== VALIDATION ENDPOINT CALLED ===",
tenant_id=tenant_id,
file_present=file is not None,
file_filename=file.filename if file else None,
data_present=data is not None,
file_format=file_format)
if file and file.filename:
logger.info("Processing file upload branch", tenant_id=tenant_id, filename=file.filename)
filename = file.filename.lower()
if filename.endswith('.csv'):
detected_format = 'csv'
elif filename.endswith('.xlsx') or filename.endswith('.xls'):
detected_format = 'excel'
elif filename.endswith('.json'):
detected_format = 'json'
else:
detected_format = file_format or 'csv'
content = await file.read()
if detected_format in ['xlsx', 'xls', 'excel']:
import base64
file_content = base64.b64encode(content).decode('utf-8')
else:
file_content = content.decode('utf-8')
validation_data = {
"tenant_id": str(tenant_id),
"data": file_content,
"data_format": detected_format,
"filename": file.filename
}
elif data:
logger.info("Processing JSON data branch", tenant_id=tenant_id, data_keys=list(data.keys()) if data else [])
validation_data = data.copy()
validation_data["tenant_id"] = str(tenant_id)
if "data_format" not in validation_data:
validation_data["data_format"] = "json"
else:
logger.error("No file or data provided", tenant_id=tenant_id, file_present=file is not None, data_present=data is not None)
raise HTTPException(status_code=400, detail="No file or data provided for validation")
logger.info("About to call validate_import_data", validation_data_keys=list(validation_data.keys()), data_size=len(validation_data.get("data", "")))
validation_result = await import_service.validate_import_data(validation_data)
logger.info("Validation completed", is_valid=validation_result.is_valid, errors_count=len(validation_result.errors))
logger.info("Validation completed",
tenant_id=tenant_id,
valid=validation_result.is_valid,
total_records=validation_result.total_records)
return {
"is_valid": validation_result.is_valid,
"total_records": validation_result.total_records,
"valid_records": validation_result.valid_records,
"invalid_records": validation_result.invalid_records,
"errors": validation_result.errors,
"warnings": validation_result.warnings,
"summary": validation_result.summary,
"unique_products": validation_result.unique_products,
"product_list": validation_result.product_list,
"message": "Validation completed successfully" if validation_result.is_valid else "Validation found errors",
"details": {
"total_records": validation_result.total_records,
"format": validation_data.get("data_format", "unknown")
}
}
except HTTPException:
# Re-raise HTTP exceptions as-is (don't convert to 500)
raise
except Exception as e:
error_msg = str(e) if e else "Unknown error occurred during validation"
logger.error("Failed to validate sales data", error=error_msg, tenant_id=tenant_id, exc_info=True)
raise HTTPException(status_code=500, detail=f"Failed to validate data: {error_msg}")
@router.post(
route_builder.build_operations_route("import/validate-csv")
)
async def validate_csv_data_legacy(
tenant_id: UUID = Path(..., description="Tenant ID"),
file: UploadFile = File(...),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
import_service: DataImportService = Depends(get_import_service)
):
"""Legacy CSV validation endpoint - redirects to universal validator"""
return await validate_sales_data_universal(
tenant_id=tenant_id,
file=file,
current_user=current_user,
import_service=import_service
)
@router.post(
route_builder.build_operations_route("import")
)
async def import_sales_data(
tenant_id: UUID = Path(..., description="Tenant ID"),
data: Optional[Dict[str, Any]] = None,
file: Optional[UploadFile] = File(None),
file_format: Optional[str] = Form(None),
update_existing: bool = Form(False, description="Whether to update existing records"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
import_service: DataImportService = Depends(get_import_service)
):
"""Enhanced import sales data - supports multiple file formats and JSON"""
try:
if file:
if not file.filename:
raise HTTPException(status_code=400, detail="No file provided")
logger.info("Starting enhanced file import", tenant_id=tenant_id, filename=file.filename)
filename = file.filename.lower()
if filename.endswith('.csv'):
detected_format = 'csv'
elif filename.endswith('.xlsx') or filename.endswith('.xls'):
detected_format = 'excel'
elif filename.endswith('.json'):
detected_format = 'json'
else:
detected_format = file_format or 'csv'
content = await file.read()
if detected_format in ['xlsx', 'xls', 'excel']:
import base64
file_content = base64.b64encode(content).decode('utf-8')
else:
file_content = content.decode('utf-8')
import_result = await import_service.process_import(
str(tenant_id),
file_content,
detected_format,
filename=file.filename
)
elif data:
logger.info("Starting enhanced JSON data import", tenant_id=tenant_id, record_count=len(data.get("records", [])))
if "records" in data:
records_json = json.dumps(data.get("records", []))
import_result = await import_service.process_import(
str(tenant_id),
records_json,
"json"
)
else:
import_result = await import_service.process_import(
str(tenant_id),
data.get("data", ""),
data.get("data_format", "json")
)
else:
raise HTTPException(status_code=400, detail="No data or file provided")
logger.info("Enhanced import completed",
tenant_id=tenant_id,
created=import_result.records_created,
updated=import_result.records_updated,
failed=import_result.records_failed,
processing_time=import_result.processing_time_seconds)
response = {
"success": import_result.success,
"records_processed": import_result.records_processed,
"records_created": import_result.records_created,
"records_updated": import_result.records_updated,
"records_failed": import_result.records_failed,
"errors": import_result.errors,
"warnings": import_result.warnings,
"processing_time_seconds": import_result.processing_time_seconds,
"records_imported": import_result.records_created,
"message": f"Successfully imported {import_result.records_created} records" if import_result.success else "Import completed with errors"
}
if file:
response["file_info"] = {
"name": file.filename,
"format": detected_format,
"size_bytes": len(content) if 'content' in locals() else 0
}
return response
except Exception as e:
logger.error("Failed to import sales data", error=str(e), tenant_id=tenant_id, exc_info=True)
raise HTTPException(status_code=500, detail=f"Failed to import data: {str(e)}")
@router.post(
route_builder.build_operations_route("import/csv")
)
async def import_csv_data(
tenant_id: UUID = Path(..., description="Tenant ID"),
file: UploadFile = File(...),
update_existing: bool = Form(False, description="Whether to update existing records"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
import_service: DataImportService = Depends(get_import_service)
):
"""Import CSV sales data file"""
try:
if not file.filename.endswith('.csv'):
raise HTTPException(status_code=400, detail="File must be a CSV file")
logger.info("Starting CSV data import", tenant_id=tenant_id, filename=file.filename)
content = await file.read()
file_content = content.decode('utf-8')
import_result = await import_service.process_import(
tenant_id,
file_content,
"csv",
filename=file.filename
)
logger.info("CSV import completed",
tenant_id=tenant_id,
filename=file.filename,
created=import_result.records_created,
updated=import_result.records_updated,
failed=import_result.records_failed)
return {
"success": import_result.success,
"records_processed": import_result.records_processed,
"records_created": import_result.records_created,
"records_updated": import_result.records_updated,
"records_failed": import_result.records_failed,
"errors": import_result.errors,
"warnings": import_result.warnings,
"processing_time_seconds": import_result.processing_time_seconds
}
except Exception as e:
logger.error("Failed to import CSV data", error=str(e), tenant_id=tenant_id)
raise HTTPException(status_code=500, detail=f"Failed to import CSV data: {str(e)}")
@router.get(
route_builder.build_operations_route("import/template")
)
async def get_import_template(
tenant_id: UUID = Path(..., description="Tenant ID"),
format: str = "csv"
):
"""Get sales data import template"""
try:
if format not in ["csv", "json"]:
raise HTTPException(status_code=400, detail="Format must be 'csv' or 'json'")
if format == "csv":
template = "date,product_name,product_category,product_sku,quantity_sold,unit_price,revenue,cost_of_goods,discount_applied,location_id,sales_channel,source,notes,weather_condition,is_holiday,is_weekend"
else:
template = {
"records": [
{
"date": "2024-01-01T10:00:00Z",
"product_name": "Sample Product",
"product_category": "Sample Category",
"product_sku": "SAMPLE001",
"quantity_sold": 1,
"unit_price": 10.50,
"revenue": 10.50,
"cost_of_goods": 5.25,
"discount_applied": 0.0,
"location_id": "LOC001",
"sales_channel": "in_store",
"source": "manual",
"notes": "Sample sales record",
"weather_condition": "sunny",
"is_holiday": False,
"is_weekend": False
}
]
}
return {"template": template, "format": format}
except Exception as e:
logger.error("Failed to get import template", error=str(e), tenant_id=tenant_id)
raise HTTPException(status_code=500, detail=f"Failed to get import template: {str(e)}")
# ============================================================================
# Tenant Data Deletion Operations (Internal Service Only)
# ============================================================================
from shared.auth.access_control import service_only_access
from shared.services.tenant_deletion import TenantDataDeletionResult
from app.services.tenant_deletion_service import SalesTenantDeletionService
@router.delete(
route_builder.build_base_route("tenant/{tenant_id}", include_tenant_prefix=False),
response_model=dict
)
@service_only_access
async def delete_tenant_data(
tenant_id: str = Path(..., description="Tenant ID to delete data for"),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Delete all sales data for a tenant (Internal service only)
"""
try:
logger.info("sales.tenant_deletion.api_called", tenant_id=tenant_id)
deletion_service = SalesTenantDeletionService(db)
result = await deletion_service.safe_delete_tenant_data(tenant_id)
if not result.success:
raise HTTPException(
status_code=500,
detail=f"Tenant data deletion failed: {', '.join(result.errors)}"
)
return {
"message": "Tenant data deletion completed successfully",
"summary": result.to_dict()
}
except HTTPException:
raise
except Exception as e:
logger.error("sales.tenant_deletion.api_error", tenant_id=tenant_id, error=str(e), exc_info=True)
raise HTTPException(status_code=500, detail=f"Failed to delete tenant data: {str(e)}")
@router.get(
route_builder.build_base_route("tenant/{tenant_id}/deletion-preview", include_tenant_prefix=False),
response_model=dict
)
@service_only_access
async def preview_tenant_data_deletion(
tenant_id: str = Path(..., description="Tenant ID to preview deletion for"),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Preview what data would be deleted for a tenant (dry-run)
"""
try:
logger.info("sales.tenant_deletion.preview_called", tenant_id=tenant_id)
deletion_service = SalesTenantDeletionService(db)
preview_data = await deletion_service.get_tenant_data_preview(tenant_id)
result = TenantDataDeletionResult(tenant_id=tenant_id, service_name=deletion_service.service_name)
result.deleted_counts = preview_data
result.success = True
if not result.success:
raise HTTPException(
status_code=500,
detail=f"Tenant deletion preview failed: {', '.join(result.errors)}"
)
return {
"tenant_id": tenant_id,
"service": "sales-service",
"data_counts": result.deleted_counts,
"total_items": sum(result.deleted_counts.values())
}
except HTTPException:
raise
except Exception as e:
logger.error("sales.tenant_deletion.preview_error", tenant_id=tenant_id, error=str(e), exc_info=True)
raise HTTPException(status_code=500, detail=f"Failed to preview tenant data deletion: {str(e)}")

View File

@@ -0,0 +1,244 @@
# services/sales/app/api/sales_records.py
"""
Sales Records API - Atomic CRUD operations on SalesData model
"""
from fastapi import APIRouter, Depends, HTTPException, Query, Path, status
from typing import List, Optional, Dict, Any
from uuid import UUID
from datetime import datetime
import structlog
from app.schemas.sales import (
SalesDataCreate,
SalesDataUpdate,
SalesDataResponse,
SalesDataQuery
)
from app.services.sales_service import SalesService
from app.models import AuditLog
from shared.auth.decorators import get_current_user_dep
from shared.auth.access_control import require_user_role
from shared.routing import RouteBuilder
from shared.security import create_audit_logger, AuditSeverity, AuditAction
route_builder = RouteBuilder('sales')
router = APIRouter(tags=["sales-records"])
logger = structlog.get_logger()
# Initialize audit logger
audit_logger = create_audit_logger("sales-service", AuditLog)
def get_sales_service():
"""Dependency injection for SalesService"""
return SalesService()
@router.post(
route_builder.build_base_route("sales"),
response_model=SalesDataResponse,
status_code=status.HTTP_201_CREATED
)
@require_user_role(['admin', 'owner', 'member'])
async def create_sales_record(
sales_data: SalesDataCreate,
tenant_id: UUID = Path(..., description="Tenant ID"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
sales_service: SalesService = Depends(get_sales_service)
):
"""Create a new sales record"""
try:
logger.info(
"Creating sales record",
product=sales_data.product_name,
quantity=sales_data.quantity_sold,
tenant_id=tenant_id,
user_id=current_user.get("user_id")
)
record = await sales_service.create_sales_record(
sales_data,
tenant_id,
user_id=UUID(current_user["user_id"]) if current_user.get("user_id") else None
)
logger.info("Successfully created sales record", record_id=record.id, tenant_id=tenant_id)
return record
except ValueError as ve:
logger.warning("Validation error creating sales record", error=str(ve), tenant_id=tenant_id)
raise HTTPException(status_code=400, detail=str(ve))
except Exception as e:
logger.error("Failed to create sales record", error=str(e), tenant_id=tenant_id)
raise HTTPException(status_code=500, detail=f"Failed to create sales record: {str(e)}")
@router.get(
route_builder.build_base_route("sales"),
response_model=List[SalesDataResponse]
)
async def get_sales_records(
tenant_id: UUID = Path(..., description="Tenant ID"),
start_date: Optional[datetime] = Query(None, description="Start date filter"),
end_date: Optional[datetime] = Query(None, description="End date filter"),
product_name: Optional[str] = Query(None, description="Product name filter"),
product_category: Optional[str] = Query(None, description="Product category filter"),
location_id: Optional[str] = Query(None, description="Location filter"),
sales_channel: Optional[str] = Query(None, description="Sales channel filter"),
source: Optional[str] = Query(None, description="Data source filter"),
is_validated: Optional[bool] = Query(None, description="Validation status filter"),
limit: int = Query(50, ge=1, le=1000, description="Number of records to return"),
offset: int = Query(0, ge=0, description="Number of records to skip"),
order_by: str = Query("date", description="Field to order by"),
order_direction: str = Query("desc", description="Order direction (asc/desc)"),
sales_service: SalesService = Depends(get_sales_service)
):
"""Get sales records for a tenant with filtering and pagination"""
try:
query_params = SalesDataQuery(
start_date=start_date,
end_date=end_date,
product_name=product_name,
product_category=product_category,
location_id=location_id,
sales_channel=sales_channel,
source=source,
is_validated=is_validated,
limit=limit,
offset=offset,
order_by=order_by,
order_direction=order_direction
)
records = await sales_service.get_sales_records(tenant_id, query_params)
logger.info("Retrieved sales records", count=len(records), tenant_id=tenant_id)
return records
except Exception as e:
logger.error("Failed to get sales records", error=str(e), tenant_id=tenant_id)
raise HTTPException(status_code=500, detail=f"Failed to get sales records: {str(e)}")
@router.get(
route_builder.build_resource_detail_route("sales", "record_id"),
response_model=SalesDataResponse
)
async def get_sales_record(
tenant_id: UUID = Path(..., description="Tenant ID"),
record_id: UUID = Path(..., description="Sales record ID"),
sales_service: SalesService = Depends(get_sales_service)
):
"""Get a specific sales record"""
try:
record = await sales_service.get_sales_record(record_id, tenant_id)
if not record:
raise HTTPException(status_code=404, detail="Sales record not found")
return record
except HTTPException:
raise
except Exception as e:
logger.error("Failed to get sales record", error=str(e), record_id=record_id, tenant_id=tenant_id)
raise HTTPException(status_code=500, detail=f"Failed to get sales record: {str(e)}")
@router.put(
route_builder.build_resource_detail_route("sales", "record_id"),
response_model=SalesDataResponse
)
async def update_sales_record(
update_data: SalesDataUpdate,
tenant_id: UUID = Path(..., description="Tenant ID"),
record_id: UUID = Path(..., description="Sales record ID"),
sales_service: SalesService = Depends(get_sales_service)
):
"""Update a sales record"""
try:
updated_record = await sales_service.update_sales_record(record_id, update_data, tenant_id)
logger.info("Updated sales record", record_id=record_id, tenant_id=tenant_id)
return updated_record
except ValueError as ve:
logger.warning("Validation error updating sales record", error=str(ve), record_id=record_id)
raise HTTPException(status_code=400, detail=str(ve))
except Exception as e:
logger.error("Failed to update sales record", error=str(e), record_id=record_id, tenant_id=tenant_id)
raise HTTPException(status_code=500, detail=f"Failed to update sales record: {str(e)}")
@router.delete(
route_builder.build_resource_detail_route("sales", "record_id")
)
@require_user_role(['admin', 'owner'])
async def delete_sales_record(
tenant_id: UUID = Path(..., description="Tenant ID"),
record_id: UUID = Path(..., description="Sales record ID"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
sales_service: SalesService = Depends(get_sales_service)
):
"""Delete a sales record (Admin+ only)"""
try:
# Get record details before deletion for audit log
record = await sales_service.get_sales_record(record_id, tenant_id)
success = await sales_service.delete_sales_record(record_id, tenant_id)
if not success:
raise HTTPException(status_code=404, detail="Sales record not found")
# Log audit event for sales record deletion
try:
from app.core.database import get_db
db = next(get_db())
await audit_logger.log_deletion(
db_session=db,
tenant_id=str(tenant_id),
user_id=current_user["user_id"],
resource_type="sales_record",
resource_id=str(record_id),
resource_data={
"product_name": record.product_name if record else None,
"quantity_sold": record.quantity_sold if record else None,
"sale_date": record.date.isoformat() if record and record.date else None
} if record else None,
description=f"Deleted sales record for {record.product_name if record else 'unknown product'}",
endpoint=f"/sales/{record_id}",
method="DELETE"
)
except Exception as audit_error:
logger.warning("Failed to log audit event", error=str(audit_error))
logger.info("Deleted sales record", record_id=record_id, tenant_id=tenant_id)
return {"message": "Sales record deleted successfully"}
except ValueError as ve:
logger.warning("Error deleting sales record", error=str(ve), record_id=record_id)
raise HTTPException(status_code=400, detail=str(ve))
except HTTPException:
raise
except Exception as e:
logger.error("Failed to delete sales record", error=str(e), record_id=record_id, tenant_id=tenant_id)
raise HTTPException(status_code=500, detail=f"Failed to delete sales record: {str(e)}")
@router.get(
route_builder.build_base_route("categories"),
response_model=List[str]
)
async def get_product_categories(
tenant_id: UUID = Path(..., description="Tenant ID"),
sales_service: SalesService = Depends(get_sales_service)
):
"""Get distinct product categories from sales data"""
try:
categories = await sales_service.get_product_categories(tenant_id)
return categories
except Exception as e:
logger.error("Failed to get product categories", error=str(e), tenant_id=tenant_id)
raise HTTPException(status_code=500, detail=f"Failed to get product categories: {str(e)}")