Files
bakery-ia/services/data/app/api/sales.py
2025-08-08 09:08:41 +02:00

500 lines
20 KiB
Python

"""
Enhanced Sales API Endpoints
Updated to use repository pattern and enhanced services with dependency injection
"""
from fastapi import APIRouter, Depends, HTTPException, UploadFile, File, Form, Query, Response, Path
from fastapi.responses import StreamingResponse
from typing import List, Optional, Dict, Any
from uuid import UUID
from datetime import datetime
import structlog
from app.schemas.sales import (
SalesDataCreate,
SalesDataResponse,
SalesDataQuery,
SalesDataImport,
SalesImportResult,
SalesValidationResult,
SalesValidationRequest,
SalesExportRequest
)
from app.services.sales_service import SalesService
from app.services.data_import_service import EnhancedDataImportService
from app.services.messaging import (
publish_sales_created,
publish_data_imported,
publish_export_completed
)
from shared.database.base import create_database_manager
from shared.auth.decorators import get_current_user_dep
router = APIRouter(tags=["enhanced-sales"])
logger = structlog.get_logger()
def get_sales_service():
"""Dependency injection for SalesService"""
from app.core.config import settings
database_manager = create_database_manager(settings.DATABASE_URL, "data-service")
return SalesService(database_manager)
def get_import_service():
"""Dependency injection for EnhancedDataImportService"""
from app.core.config import settings
database_manager = create_database_manager(settings.DATABASE_URL, "data-service")
return EnhancedDataImportService(database_manager)
@router.post("/tenants/{tenant_id}/sales", response_model=SalesDataResponse)
async def create_sales_record(
sales_data: SalesDataCreate,
tenant_id: UUID = Path(..., description="Tenant ID"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
sales_service: SalesService = Depends(get_sales_service)
):
"""Create a new sales record using repository pattern"""
try:
logger.info("Creating sales record with repository pattern",
product=sales_data.product_name,
quantity=sales_data.quantity_sold,
tenant_id=tenant_id,
user_id=current_user["user_id"])
# Override tenant_id from URL path
sales_data.tenant_id = tenant_id
record = await sales_service.create_sales_record(sales_data, str(tenant_id))
# Publish event (non-blocking)
try:
await publish_sales_created({
"tenant_id": str(tenant_id),
"product_name": sales_data.product_name,
"quantity_sold": sales_data.quantity_sold,
"revenue": sales_data.revenue,
"source": sales_data.source,
"created_by": current_user["user_id"],
"timestamp": datetime.utcnow().isoformat()
})
except Exception as pub_error:
logger.warning("Failed to publish sales created event", error=str(pub_error))
logger.info("Successfully created sales record using repository",
record_id=record.id,
tenant_id=tenant_id)
return record
except Exception as e:
logger.error("Failed to create sales record",
error=str(e),
tenant_id=tenant_id)
raise HTTPException(status_code=500, detail=f"Failed to create sales record: {str(e)}")
@router.get("/tenants/{tenant_id}/sales", response_model=List[SalesDataResponse])
async def get_sales_data(
tenant_id: UUID = Path(..., description="Tenant ID"),
start_date: Optional[datetime] = Query(None, description="Start date filter"),
end_date: Optional[datetime] = Query(None, description="End date filter"),
product_name: Optional[str] = Query(None, description="Product name filter"),
limit: Optional[int] = Query(1000, le=5000, description="Maximum number of records to return"),
offset: Optional[int] = Query(0, ge=0, description="Number of records to skip"),
product_names: Optional[List[str]] = Query(None, description="Multiple product name filters"),
location_ids: Optional[List[str]] = Query(None, description="Location ID filters"),
sources: Optional[List[str]] = Query(None, description="Source filters"),
min_quantity: Optional[int] = Query(None, description="Minimum quantity filter"),
max_quantity: Optional[int] = Query(None, description="Maximum quantity filter"),
min_revenue: Optional[float] = Query(None, description="Minimum revenue filter"),
max_revenue: Optional[float] = Query(None, description="Maximum revenue filter"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
sales_service: SalesService = Depends(get_sales_service)
):
"""Get sales data using repository pattern with enhanced filtering"""
try:
logger.debug("Querying sales data with repository pattern",
tenant_id=tenant_id,
start_date=start_date,
end_date=end_date,
limit=limit,
offset=offset)
# Create enhanced query
query = SalesDataQuery(
tenant_id=tenant_id,
start_date=start_date,
end_date=end_date,
product_names=[product_name] if product_name else product_names,
location_ids=location_ids,
sources=sources,
min_quantity=min_quantity,
max_quantity=max_quantity,
min_revenue=min_revenue,
max_revenue=max_revenue,
limit=limit,
offset=offset
)
records = await sales_service.get_sales_data(query)
logger.debug("Successfully retrieved sales data using repository",
count=len(records),
tenant_id=tenant_id)
return records
except Exception as e:
logger.error("Failed to query sales data",
error=str(e),
tenant_id=tenant_id)
raise HTTPException(status_code=500, detail=f"Failed to query sales data: {str(e)}")
@router.get("/tenants/{tenant_id}/sales/analytics")
async def get_sales_analytics(
tenant_id: UUID = Path(..., description="Tenant ID"),
start_date: Optional[datetime] = Query(None, description="Start date"),
end_date: Optional[datetime] = Query(None, description="End date"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
sales_service: SalesService = Depends(get_sales_service)
):
"""Get sales analytics using repository pattern"""
try:
logger.debug("Getting sales analytics with repository pattern",
tenant_id=tenant_id,
start_date=start_date,
end_date=end_date)
analytics = await sales_service.get_sales_analytics(
str(tenant_id), start_date, end_date
)
logger.debug("Analytics generated successfully using repository", tenant_id=tenant_id)
return analytics
except Exception as e:
logger.error("Failed to generate sales analytics",
error=str(e),
tenant_id=tenant_id)
raise HTTPException(status_code=500, detail=f"Failed to generate analytics: {str(e)}")
@router.get("/tenants/{tenant_id}/sales/aggregation")
async def get_sales_aggregation(
tenant_id: UUID = Path(..., description="Tenant ID"),
start_date: Optional[datetime] = Query(None, description="Start date"),
end_date: Optional[datetime] = Query(None, description="End date"),
group_by: str = Query("daily", description="Aggregation period: daily, weekly, monthly"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
sales_service: SalesService = Depends(get_sales_service)
):
"""Get sales aggregation data using repository pattern"""
try:
logger.debug("Getting sales aggregation with repository pattern",
tenant_id=tenant_id,
group_by=group_by)
aggregation = await sales_service.get_sales_aggregation(
str(tenant_id), start_date, end_date, group_by
)
logger.debug("Aggregation generated successfully using repository",
tenant_id=tenant_id,
group_by=group_by)
return aggregation
except Exception as e:
logger.error("Failed to get sales aggregation",
error=str(e),
tenant_id=tenant_id)
raise HTTPException(status_code=500, detail=f"Failed to get aggregation: {str(e)}")
@router.post("/tenants/{tenant_id}/sales/import", response_model=SalesImportResult)
async def import_sales_data(
tenant_id: UUID = Path(..., description="Tenant ID"),
file: UploadFile = File(...),
file_format: str = Form(...),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
import_service: EnhancedDataImportService = Depends(get_import_service)
):
"""Import sales data using enhanced repository pattern"""
try:
logger.info("Importing sales data with enhanced repository pattern",
tenant_id=tenant_id,
format=file_format,
filename=file.filename,
user_id=current_user["user_id"])
# Read file content
content = await file.read()
file_content = content.decode('utf-8')
# Process using enhanced import service
result = await import_service.process_import(
str(tenant_id),
file_content,
file_format,
filename=file.filename
)
if result.success:
# Publish event
try:
await publish_data_imported({
"tenant_id": str(tenant_id),
"type": "file_import",
"format": file_format,
"filename": file.filename,
"records_created": result.records_created,
"imported_by": current_user["user_id"],
"timestamp": datetime.utcnow().isoformat()
})
except Exception as pub_error:
logger.warning("Failed to publish import event", error=str(pub_error))
logger.info("Import completed with enhanced repository pattern",
success=result.success,
records_created=result.records_created,
tenant_id=tenant_id)
return result
except Exception as e:
logger.error("Failed to import sales data",
error=str(e),
tenant_id=tenant_id)
raise HTTPException(status_code=500, detail=f"Failed to import sales data: {str(e)}")
@router.post("/tenants/{tenant_id}/sales/import/validate", response_model=SalesValidationResult)
async def validate_import_data(
tenant_id: UUID = Path(..., description="Tenant ID"),
file: UploadFile = File(..., description="File to validate"),
file_format: str = Form(default="csv", description="File format: csv, json, excel"),
validate_only: bool = Form(default=True, description="Only validate, don't import"),
source: str = Form(default="onboarding_upload", description="Source of the upload"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
import_service: EnhancedDataImportService = Depends(get_import_service)
):
"""Validate import data using enhanced repository pattern"""
try:
logger.info("Validating import data with enhanced repository pattern",
tenant_id=tenant_id,
format=file_format,
filename=file.filename,
user_id=current_user["user_id"])
# Read file content
content = await file.read()
file_content = content.decode('utf-8')
# Create validation data structure
validation_data = {
"tenant_id": str(tenant_id),
"data": file_content,
"data_format": file_format,
"source": source,
"validate_only": validate_only
}
# Use enhanced validation service
validation_result = await import_service.validate_import_data(validation_data)
logger.info("Validation completed with enhanced repository pattern",
is_valid=validation_result.is_valid,
total_records=validation_result.total_records,
tenant_id=tenant_id)
return validation_result
except Exception as e:
logger.error("Failed to validate import data",
error=str(e),
tenant_id=tenant_id)
raise HTTPException(status_code=500, detail=f"Failed to validate import data: {str(e)}")
@router.post("/tenants/{tenant_id}/sales/import/validate-json", response_model=SalesValidationResult)
async def validate_import_data_json(
tenant_id: UUID = Path(..., description="Tenant ID"),
request: SalesValidationRequest = ...,
current_user: Dict[str, Any] = Depends(get_current_user_dep),
import_service: EnhancedDataImportService = Depends(get_import_service)
):
"""Validate import data from JSON request for onboarding flow"""
try:
logger.info("Starting JSON-based data validation",
tenant_id=str(tenant_id),
data_format=request.data_format,
data_length=len(request.data),
validate_only=request.validate_only)
# Create validation data structure
validation_data = {
"tenant_id": str(tenant_id),
"data": request.data, # Fixed: use 'data' not 'content'
"data_format": request.data_format,
"filename": f"onboarding_data.{request.data_format}",
"source": request.source,
"validate_only": request.validate_only
}
# Use enhanced validation service
validation_result = await import_service.validate_import_data(validation_data)
logger.info("JSON validation completed",
is_valid=validation_result.is_valid,
total_records=validation_result.total_records,
tenant_id=tenant_id)
return validation_result
except Exception as e:
logger.error("Failed to validate JSON import data",
error=str(e),
tenant_id=tenant_id)
raise HTTPException(status_code=500, detail=f"Failed to validate import data: {str(e)}")
@router.post("/tenants/{tenant_id}/sales/export")
async def export_sales_data(
tenant_id: UUID = Path(..., description="Tenant ID"),
export_format: str = Query("csv", description="Export format: csv, excel, json"),
start_date: Optional[datetime] = Query(None, description="Start date"),
end_date: Optional[datetime] = Query(None, description="End date"),
products: Optional[List[str]] = Query(None, description="Filter by products"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
sales_service: SalesService = Depends(get_sales_service)
):
"""Export sales data using repository pattern"""
try:
logger.info("Exporting sales data with repository pattern",
tenant_id=tenant_id,
format=export_format,
user_id=current_user["user_id"])
export_result = await sales_service.export_sales_data(
str(tenant_id), export_format, start_date, end_date, products
)
if not export_result:
raise HTTPException(status_code=404, detail="No data found for export")
# Publish export event
try:
await publish_export_completed({
"tenant_id": str(tenant_id),
"format": export_format,
"exported_by": current_user["user_id"],
"record_count": export_result.get("record_count", 0),
"timestamp": datetime.utcnow().isoformat()
})
except Exception as pub_error:
logger.warning("Failed to publish export event", error=str(pub_error))
logger.info("Export completed successfully using repository",
tenant_id=tenant_id,
format=export_format)
return StreamingResponse(
iter([export_result["content"]]),
media_type=export_result["media_type"],
headers={"Content-Disposition": f"attachment; filename={export_result['filename']}"}
)
except HTTPException:
raise
except Exception as e:
logger.error("Failed to export sales data",
error=str(e),
tenant_id=tenant_id)
raise HTTPException(status_code=500, detail=f"Failed to export sales data: {str(e)}")
@router.delete("/tenants/{tenant_id}/sales/{record_id}")
async def delete_sales_record(
tenant_id: UUID = Path(..., description="Tenant ID"),
record_id: str = Path(..., description="Sales record ID"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
sales_service: SalesService = Depends(get_sales_service)
):
"""Delete a sales record using repository pattern"""
try:
logger.info("Deleting sales record with repository pattern",
record_id=record_id,
tenant_id=tenant_id,
user_id=current_user["user_id"])
success = await sales_service.delete_sales_record(record_id, str(tenant_id))
if not success:
raise HTTPException(status_code=404, detail="Sales record not found")
logger.info("Sales record deleted successfully using repository",
record_id=record_id,
tenant_id=tenant_id)
return {"status": "success", "message": "Sales record deleted successfully"}
except HTTPException:
raise
except Exception as e:
logger.error("Failed to delete sales record",
error=str(e),
tenant_id=tenant_id)
raise HTTPException(status_code=500, detail=f"Failed to delete sales record: {str(e)}")
@router.get("/tenants/{tenant_id}/sales/products")
async def get_products_list(
tenant_id: UUID = Path(..., description="Tenant ID"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
sales_service: SalesService = Depends(get_sales_service)
):
"""Get list of products using repository pattern"""
try:
logger.debug("Getting products list with repository pattern", tenant_id=tenant_id)
products = await sales_service.get_products_list(str(tenant_id))
logger.debug("Products list retrieved using repository",
count=len(products),
tenant_id=tenant_id)
return products
except Exception as e:
logger.error("Failed to get products list",
error=str(e),
tenant_id=tenant_id)
raise HTTPException(status_code=500, detail=f"Failed to get products list: {str(e)}")
@router.get("/tenants/{tenant_id}/sales/statistics")
async def get_sales_statistics(
tenant_id: UUID = Path(..., description="Tenant ID"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
sales_service: SalesService = Depends(get_sales_service)
):
"""Get comprehensive sales statistics using repository pattern"""
try:
logger.debug("Getting sales statistics with repository pattern", tenant_id=tenant_id)
# Get analytics which includes comprehensive statistics
analytics = await sales_service.get_sales_analytics(str(tenant_id))
# Create enhanced statistics response
statistics = {
"tenant_id": str(tenant_id),
"analytics": analytics,
"generated_at": datetime.utcnow().isoformat()
}
logger.debug("Sales statistics retrieved using repository", tenant_id=tenant_id)
return statistics
except Exception as e:
logger.error("Failed to get sales statistics",
error=str(e),
tenant_id=tenant_id)
raise HTTPException(status_code=500, detail=f"Failed to get statistics: {str(e)}")