Files
bakery-ia/services/sales/app/api/sales_operations.py

434 lines
17 KiB
Python
Raw Normal View History

2025-10-06 15:27:01 +02:00
# services/sales/app/api/sales_operations.py
2025-08-12 18:17:30 +02:00
"""
2025-10-06 15:27:01 +02:00
Sales Operations API - Business operations and complex workflows
2025-08-12 18:17:30 +02:00
"""
2025-10-06 15:27:01 +02:00
from fastapi import APIRouter, Depends, HTTPException, Query, Path, UploadFile, File, Form
from typing import List, Optional, Dict, Any
2025-08-12 18:17:30 +02:00
from uuid import UUID
2025-10-06 15:27:01 +02:00
from datetime import datetime
2025-08-12 18:17:30 +02:00
import structlog
import json
2025-10-06 15:27:01 +02:00
from app.schemas.sales import SalesDataResponse
from app.services.sales_service import SalesService
2025-08-12 18:17:30 +02:00
from app.services.data_import_service import DataImportService
from shared.auth.decorators import get_current_user_dep
2025-10-06 15:27:01 +02:00
from shared.auth.access_control import require_user_role
from shared.routing import RouteBuilder
2025-08-12 18:17:30 +02:00
2025-10-06 15:27:01 +02:00
route_builder = RouteBuilder('sales')
router = APIRouter(tags=["sales-operations"])
2025-08-12 18:17:30 +02:00
logger = structlog.get_logger()
2025-10-06 15:27:01 +02:00
def get_sales_service():
"""Dependency injection for SalesService"""
return SalesService()
2025-08-12 18:17:30 +02:00
def get_import_service():
"""Dependency injection for DataImportService"""
return DataImportService()
2025-10-06 15:27:01 +02:00
@router.post(
route_builder.build_operations_route("validate-record"),
response_model=SalesDataResponse
)
async def validate_sales_record(
tenant_id: UUID = Path(..., description="Tenant ID"),
record_id: UUID = Path(..., description="Sales record ID"),
validation_notes: Optional[str] = Query(None, description="Validation notes"),
sales_service: SalesService = Depends(get_sales_service)
):
"""Mark a sales record as validated"""
try:
validated_record = await sales_service.validate_sales_record(record_id, tenant_id, validation_notes)
logger.info("Validated sales record", record_id=record_id, tenant_id=tenant_id)
return validated_record
except ValueError as ve:
logger.warning("Error validating sales record", error=str(ve), record_id=record_id)
raise HTTPException(status_code=400, detail=str(ve))
except Exception as e:
logger.error("Failed to validate sales record", error=str(e), record_id=record_id, tenant_id=tenant_id)
raise HTTPException(status_code=500, detail=f"Failed to validate sales record: {str(e)}")
@router.get(
route_builder.build_nested_resource_route("inventory-products", "inventory_product_id", "sales"),
response_model=List[SalesDataResponse]
)
async def get_product_sales(
tenant_id: UUID = Path(..., description="Tenant ID"),
inventory_product_id: UUID = Path(..., description="Inventory product ID"),
start_date: Optional[datetime] = Query(None, description="Start date filter"),
end_date: Optional[datetime] = Query(None, description="End date filter"),
sales_service: SalesService = Depends(get_sales_service)
):
"""Get sales records for a specific product (cross-service query)"""
try:
records = await sales_service.get_product_sales(tenant_id, inventory_product_id, start_date, end_date)
logger.info("Retrieved product sales", count=len(records), inventory_product_id=inventory_product_id, tenant_id=tenant_id)
return records
except Exception as e:
logger.error("Failed to get product sales", error=str(e), tenant_id=tenant_id, inventory_product_id=inventory_product_id)
raise HTTPException(status_code=500, detail=f"Failed to get product sales: {str(e)}")
@router.post(
route_builder.build_operations_route("import/validate-json")
)
2025-08-12 18:17:30 +02:00
async def validate_json_data(
tenant_id: UUID = Path(..., description="Tenant ID"),
data: Dict[str, Any] = None,
current_user: Dict[str, Any] = Depends(get_current_user_dep),
import_service: DataImportService = Depends(get_import_service)
):
"""Validate JSON sales data"""
try:
if not data:
raise HTTPException(status_code=400, detail="No data provided")
2025-10-06 15:27:01 +02:00
2025-08-12 18:17:30 +02:00
logger.info("Validating JSON data", tenant_id=tenant_id, record_count=len(data.get("records", [])))
2025-10-06 15:27:01 +02:00
2025-08-12 18:17:30 +02:00
if "records" in data:
validation_data = {
"tenant_id": str(tenant_id),
"data": json.dumps(data.get("records", [])),
"data_format": "json"
}
else:
validation_data = data.copy()
validation_data["tenant_id"] = str(tenant_id)
if "data_format" not in validation_data:
validation_data["data_format"] = "json"
2025-10-06 15:27:01 +02:00
2025-08-12 18:17:30 +02:00
validation_result = await import_service.validate_import_data(validation_data)
2025-10-06 15:27:01 +02:00
2025-08-12 18:17:30 +02:00
logger.info("JSON validation completed", tenant_id=tenant_id, valid=validation_result.is_valid)
2025-10-06 15:27:01 +02:00
2025-08-12 18:17:30 +02:00
return {
"is_valid": validation_result.is_valid,
"total_records": validation_result.total_records,
"valid_records": validation_result.valid_records,
"invalid_records": validation_result.invalid_records,
"errors": validation_result.errors,
"warnings": validation_result.warnings,
"summary": validation_result.summary
}
2025-10-06 15:27:01 +02:00
2025-08-12 18:17:30 +02:00
except Exception as e:
logger.error("Failed to validate JSON data", error=str(e), tenant_id=tenant_id)
raise HTTPException(status_code=500, detail=f"Failed to validate data: {str(e)}")
2025-10-06 15:27:01 +02:00
@router.post(
route_builder.build_operations_route("import/validate")
)
2025-08-12 18:17:30 +02:00
async def validate_sales_data_universal(
tenant_id: UUID = Path(..., description="Tenant ID"),
file: Optional[UploadFile] = File(None),
data: Optional[Dict[str, Any]] = None,
file_format: Optional[str] = Form(None),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
import_service: DataImportService = Depends(get_import_service)
):
"""Universal validation endpoint for sales data - supports files and JSON"""
try:
2025-10-06 15:27:01 +02:00
logger.info("=== VALIDATION ENDPOINT CALLED ===",
tenant_id=tenant_id,
file_present=file is not None,
file_filename=file.filename if file else None,
data_present=data is not None,
file_format=file_format)
2025-10-06 15:27:01 +02:00
if file and file.filename:
logger.info("Processing file upload branch", tenant_id=tenant_id, filename=file.filename)
2025-10-06 15:27:01 +02:00
2025-08-12 18:17:30 +02:00
filename = file.filename.lower()
if filename.endswith('.csv'):
detected_format = 'csv'
elif filename.endswith('.xlsx') or filename.endswith('.xls'):
detected_format = 'excel'
elif filename.endswith('.json'):
detected_format = 'json'
else:
2025-10-06 15:27:01 +02:00
detected_format = file_format or 'csv'
2025-08-12 18:17:30 +02:00
content = await file.read()
2025-10-06 15:27:01 +02:00
2025-08-12 18:17:30 +02:00
if detected_format in ['xlsx', 'xls', 'excel']:
import base64
file_content = base64.b64encode(content).decode('utf-8')
else:
file_content = content.decode('utf-8')
2025-10-06 15:27:01 +02:00
2025-08-12 18:17:30 +02:00
validation_data = {
"tenant_id": str(tenant_id),
"data": file_content,
"data_format": detected_format,
"filename": file.filename
}
2025-10-06 15:27:01 +02:00
2025-08-12 18:17:30 +02:00
elif data:
logger.info("Processing JSON data branch", tenant_id=tenant_id, data_keys=list(data.keys()) if data else [])
2025-10-06 15:27:01 +02:00
2025-08-12 18:17:30 +02:00
validation_data = data.copy()
validation_data["tenant_id"] = str(tenant_id)
if "data_format" not in validation_data:
validation_data["data_format"] = "json"
2025-10-06 15:27:01 +02:00
2025-08-12 18:17:30 +02:00
else:
logger.error("No file or data provided", tenant_id=tenant_id, file_present=file is not None, data_present=data is not None)
2025-08-12 18:17:30 +02:00
raise HTTPException(status_code=400, detail="No file or data provided for validation")
2025-10-06 15:27:01 +02:00
logger.info("About to call validate_import_data", validation_data_keys=list(validation_data.keys()), data_size=len(validation_data.get("data", "")))
2025-08-12 18:17:30 +02:00
validation_result = await import_service.validate_import_data(validation_data)
logger.info("Validation completed", is_valid=validation_result.is_valid, errors_count=len(validation_result.errors))
2025-10-06 15:27:01 +02:00
logger.info("Validation completed",
tenant_id=tenant_id,
2025-08-12 18:17:30 +02:00
valid=validation_result.is_valid,
total_records=validation_result.total_records)
2025-10-06 15:27:01 +02:00
2025-08-12 18:17:30 +02:00
return {
"is_valid": validation_result.is_valid,
"total_records": validation_result.total_records,
"valid_records": validation_result.valid_records,
"invalid_records": validation_result.invalid_records,
"errors": validation_result.errors,
"warnings": validation_result.warnings,
"summary": validation_result.summary,
"unique_products": validation_result.unique_products,
"product_list": validation_result.product_list,
2025-08-12 18:17:30 +02:00
"message": "Validation completed successfully" if validation_result.is_valid else "Validation found errors",
"details": {
"total_records": validation_result.total_records,
"format": validation_data.get("data_format", "unknown")
}
}
2025-10-06 15:27:01 +02:00
except HTTPException:
# Re-raise HTTP exceptions as-is (don't convert to 500)
raise
2025-08-12 18:17:30 +02:00
except Exception as e:
error_msg = str(e) if e else "Unknown error occurred during validation"
logger.error("Failed to validate sales data", error=error_msg, tenant_id=tenant_id, exc_info=True)
raise HTTPException(status_code=500, detail=f"Failed to validate data: {error_msg}")
2025-08-12 18:17:30 +02:00
2025-10-06 15:27:01 +02:00
@router.post(
route_builder.build_operations_route("import/validate-csv")
)
2025-08-12 18:17:30 +02:00
async def validate_csv_data_legacy(
tenant_id: UUID = Path(..., description="Tenant ID"),
file: UploadFile = File(...),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
import_service: DataImportService = Depends(get_import_service)
):
"""Legacy CSV validation endpoint - redirects to universal validator"""
return await validate_sales_data_universal(
tenant_id=tenant_id,
file=file,
current_user=current_user,
import_service=import_service
)
2025-10-06 15:27:01 +02:00
@router.post(
route_builder.build_operations_route("import")
)
2025-08-12 18:17:30 +02:00
async def import_sales_data(
tenant_id: UUID = Path(..., description="Tenant ID"),
data: Optional[Dict[str, Any]] = None,
file: Optional[UploadFile] = File(None),
file_format: Optional[str] = Form(None),
update_existing: bool = Form(False, description="Whether to update existing records"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
import_service: DataImportService = Depends(get_import_service)
):
"""Enhanced import sales data - supports multiple file formats and JSON"""
try:
if file:
if not file.filename:
raise HTTPException(status_code=400, detail="No file provided")
2025-10-06 15:27:01 +02:00
2025-08-12 18:17:30 +02:00
logger.info("Starting enhanced file import", tenant_id=tenant_id, filename=file.filename)
2025-10-06 15:27:01 +02:00
2025-08-12 18:17:30 +02:00
filename = file.filename.lower()
if filename.endswith('.csv'):
detected_format = 'csv'
elif filename.endswith('.xlsx') or filename.endswith('.xls'):
detected_format = 'excel'
elif filename.endswith('.json'):
detected_format = 'json'
else:
2025-10-06 15:27:01 +02:00
detected_format = file_format or 'csv'
2025-08-12 18:17:30 +02:00
content = await file.read()
2025-10-06 15:27:01 +02:00
2025-08-12 18:17:30 +02:00
if detected_format in ['xlsx', 'xls', 'excel']:
import base64
file_content = base64.b64encode(content).decode('utf-8')
else:
file_content = content.decode('utf-8')
2025-10-06 15:27:01 +02:00
2025-08-12 18:17:30 +02:00
import_result = await import_service.process_import(
2025-10-06 15:27:01 +02:00
str(tenant_id),
2025-08-12 18:17:30 +02:00
file_content,
detected_format,
filename=file.filename
)
2025-10-06 15:27:01 +02:00
2025-08-12 18:17:30 +02:00
elif data:
logger.info("Starting enhanced JSON data import", tenant_id=tenant_id, record_count=len(data.get("records", [])))
2025-10-06 15:27:01 +02:00
2025-08-12 18:17:30 +02:00
if "records" in data:
records_json = json.dumps(data.get("records", []))
import_result = await import_service.process_import(
str(tenant_id),
records_json,
"json"
)
else:
import_result = await import_service.process_import(
str(tenant_id),
data.get("data", ""),
data.get("data_format", "json")
)
else:
raise HTTPException(status_code=400, detail="No data or file provided")
2025-10-06 15:27:01 +02:00
logger.info("Enhanced import completed",
tenant_id=tenant_id,
2025-08-12 18:17:30 +02:00
created=import_result.records_created,
updated=import_result.records_updated,
failed=import_result.records_failed,
processing_time=import_result.processing_time_seconds)
2025-10-06 15:27:01 +02:00
2025-08-12 18:17:30 +02:00
response = {
"success": import_result.success,
"records_processed": import_result.records_processed,
"records_created": import_result.records_created,
"records_updated": import_result.records_updated,
"records_failed": import_result.records_failed,
"errors": import_result.errors,
"warnings": import_result.warnings,
"processing_time_seconds": import_result.processing_time_seconds,
2025-10-06 15:27:01 +02:00
"records_imported": import_result.records_created,
2025-08-12 18:17:30 +02:00
"message": f"Successfully imported {import_result.records_created} records" if import_result.success else "Import completed with errors"
}
2025-10-06 15:27:01 +02:00
2025-08-12 18:17:30 +02:00
if file:
response["file_info"] = {
"name": file.filename,
"format": detected_format,
"size_bytes": len(content) if 'content' in locals() else 0
}
2025-10-06 15:27:01 +02:00
2025-08-12 18:17:30 +02:00
return response
2025-10-06 15:27:01 +02:00
2025-08-12 18:17:30 +02:00
except Exception as e:
logger.error("Failed to import sales data", error=str(e), tenant_id=tenant_id, exc_info=True)
raise HTTPException(status_code=500, detail=f"Failed to import data: {str(e)}")
2025-10-06 15:27:01 +02:00
@router.post(
route_builder.build_operations_route("import/csv")
)
2025-08-12 18:17:30 +02:00
async def import_csv_data(
tenant_id: UUID = Path(..., description="Tenant ID"),
file: UploadFile = File(...),
update_existing: bool = Form(False, description="Whether to update existing records"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
import_service: DataImportService = Depends(get_import_service)
):
"""Import CSV sales data file"""
try:
if not file.filename.endswith('.csv'):
raise HTTPException(status_code=400, detail="File must be a CSV file")
2025-10-06 15:27:01 +02:00
2025-08-12 18:17:30 +02:00
logger.info("Starting CSV data import", tenant_id=tenant_id, filename=file.filename)
2025-10-06 15:27:01 +02:00
2025-08-12 18:17:30 +02:00
content = await file.read()
file_content = content.decode('utf-8')
2025-10-06 15:27:01 +02:00
2025-08-12 18:17:30 +02:00
import_result = await import_service.process_import(
tenant_id,
file_content,
"csv",
filename=file.filename
)
2025-10-06 15:27:01 +02:00
logger.info("CSV import completed",
tenant_id=tenant_id,
2025-08-12 18:17:30 +02:00
filename=file.filename,
created=import_result.records_created,
updated=import_result.records_updated,
failed=import_result.records_failed)
2025-10-06 15:27:01 +02:00
2025-08-12 18:17:30 +02:00
return {
"success": import_result.success,
"records_processed": import_result.records_processed,
"records_created": import_result.records_created,
"records_updated": import_result.records_updated,
"records_failed": import_result.records_failed,
"errors": import_result.errors,
"warnings": import_result.warnings,
"processing_time_seconds": import_result.processing_time_seconds
}
2025-10-06 15:27:01 +02:00
2025-08-12 18:17:30 +02:00
except Exception as e:
logger.error("Failed to import CSV data", error=str(e), tenant_id=tenant_id)
raise HTTPException(status_code=500, detail=f"Failed to import CSV data: {str(e)}")
2025-10-06 15:27:01 +02:00
@router.get(
route_builder.build_operations_route("import/template")
)
2025-08-12 18:17:30 +02:00
async def get_import_template(
tenant_id: UUID = Path(..., description="Tenant ID"),
format: str = "csv"
2025-08-12 18:17:30 +02:00
):
"""Get sales data import template"""
try:
if format not in ["csv", "json"]:
raise HTTPException(status_code=400, detail="Format must be 'csv' or 'json'")
2025-10-06 15:27:01 +02:00
2025-08-12 18:17:30 +02:00
if format == "csv":
template = "date,product_name,product_category,product_sku,quantity_sold,unit_price,revenue,cost_of_goods,discount_applied,location_id,sales_channel,source,notes,weather_condition,is_holiday,is_weekend"
else:
template = {
"records": [
{
"date": "2024-01-01T10:00:00Z",
"product_name": "Sample Product",
2025-10-06 15:27:01 +02:00
"product_category": "Sample Category",
2025-08-12 18:17:30 +02:00
"product_sku": "SAMPLE001",
"quantity_sold": 1,
"unit_price": 10.50,
"revenue": 10.50,
"cost_of_goods": 5.25,
"discount_applied": 0.0,
"location_id": "LOC001",
"sales_channel": "in_store",
"source": "manual",
"notes": "Sample sales record",
"weather_condition": "sunny",
"is_holiday": False,
"is_weekend": False
}
]
}
2025-10-06 15:27:01 +02:00
2025-08-12 18:17:30 +02:00
return {"template": template, "format": format}
2025-10-06 15:27:01 +02:00
2025-08-12 18:17:30 +02:00
except Exception as e:
logger.error("Failed to get import template", error=str(e), tenant_id=tenant_id)
2025-10-06 15:27:01 +02:00
raise HTTPException(status_code=500, detail=f"Failed to get import template: {str(e)}")