Files
bakery-ia/services/data/app/api/sales.py

461 lines
17 KiB
Python
Raw Normal View History

2025-07-18 11:51:43 +02:00
# ================================================================
2025-07-20 07:24:04 +02:00
# services/data/app/api/sales.py - UPDATED WITH UNIFIED AUTH
2025-07-18 11:51:43 +02:00
# ================================================================
2025-07-20 07:24:04 +02:00
"""Sales data API endpoints with unified authentication"""
2025-07-18 11:51:43 +02:00
2025-07-19 12:09:10 +02:00
from fastapi import APIRouter, Depends, HTTPException, UploadFile, File, Form, Query, Response
from fastapi.responses import StreamingResponse
2025-07-18 11:51:43 +02:00
from sqlalchemy.ext.asyncio import AsyncSession
2025-07-20 07:24:04 +02:00
from typing import List, Optional, Dict, Any
2025-07-18 11:51:43 +02:00
import uuid
from datetime import datetime
2025-07-19 12:09:10 +02:00
import base64
import structlog
2025-07-18 11:51:43 +02:00
from app.core.database import get_db
from app.schemas.sales import (
SalesDataCreate,
SalesDataResponse,
SalesDataQuery,
2025-07-19 12:09:10 +02:00
SalesDataImport,
SalesImportResult,
SalesValidationResult,
SalesExportRequest
2025-07-18 11:51:43 +02:00
)
2025-07-20 07:24:04 +02:00
from app.services.sales_service import SalesService
from app.services.data_import_service import DataImportService
from app.services.messaging import (
publish_sales_created,
publish_data_imported,
publish_export_completed
)
2025-07-18 11:51:43 +02:00
2025-07-20 07:24:04 +02:00
# Import unified authentication from shared library
from shared.auth.decorators import (
get_current_user_dep,
get_current_tenant_id_dep
)
2025-07-23 17:51:39 +02:00
router = APIRouter(tags=["sales"])
2025-07-19 12:09:10 +02:00
logger = structlog.get_logger()
2025-07-18 11:51:43 +02:00
@router.post("/", response_model=SalesDataResponse)
async def create_sales_record(
sales_data: SalesDataCreate,
2025-07-20 07:24:04 +02:00
tenant_id: str = Depends(get_current_tenant_id_dep),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
2025-07-18 11:51:43 +02:00
):
"""Create a new sales record"""
try:
2025-07-20 07:24:04 +02:00
logger.debug("Creating sales record",
product=sales_data.product_name,
quantity=sales_data.quantity_sold,
tenant_id=tenant_id,
user_id=current_user["user_id"])
# Override tenant_id from token/header
sales_data.tenant_id = tenant_id
2025-07-19 12:09:10 +02:00
2025-07-18 11:51:43 +02:00
record = await SalesService.create_sales_record(sales_data, db)
2025-07-20 07:24:04 +02:00
# Publish event (non-blocking)
2025-07-19 12:09:10 +02:00
try:
2025-07-19 12:51:28 +02:00
await publish_sales_created({
2025-07-20 07:24:04 +02:00
"tenant_id": tenant_id,
2025-07-19 12:09:10 +02:00
"product_name": sales_data.product_name,
"quantity_sold": sales_data.quantity_sold,
"revenue": sales_data.revenue,
"source": sales_data.source,
2025-07-20 07:24:04 +02:00
"created_by": current_user["user_id"],
2025-07-19 12:09:10 +02:00
"timestamp": datetime.utcnow().isoformat()
})
except Exception as pub_error:
logger.warning("Failed to publish sales created event", error=str(pub_error))
2025-07-20 07:24:04 +02:00
# Continue - event failure shouldn't break API
2025-07-18 11:51:43 +02:00
2025-07-20 07:24:04 +02:00
logger.info("Successfully created sales record",
record_id=record.id,
tenant_id=tenant_id)
2025-07-18 11:51:43 +02:00
return record
2025-07-19 12:09:10 +02:00
2025-07-18 11:51:43 +02:00
except Exception as e:
2025-07-20 07:24:04 +02:00
logger.error("Failed to create sales record",
error=str(e),
tenant_id=tenant_id)
2025-07-19 12:09:10 +02:00
raise HTTPException(status_code=500, detail=f"Failed to create sales record: {str(e)}")
2025-07-18 11:51:43 +02:00
2025-07-20 07:24:04 +02:00
@router.post("/bulk", response_model=List[SalesDataResponse])
async def create_bulk_sales(
sales_data: List[SalesDataCreate],
tenant_id: str = Depends(get_current_tenant_id_dep),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Create multiple sales records"""
try:
logger.debug("Creating bulk sales records",
count=len(sales_data),
tenant_id=tenant_id)
# Override tenant_id for all records
for record in sales_data:
record.tenant_id = tenant_id
records = await SalesService.create_bulk_sales(sales_data, db)
# Publish event
try:
await publish_data_imported({
"tenant_id": tenant_id,
"type": "bulk_create",
"records_created": len(records),
"created_by": current_user["user_id"],
"timestamp": datetime.utcnow().isoformat()
})
except Exception as pub_error:
logger.warning("Failed to publish bulk import event", error=str(pub_error))
logger.info("Successfully created bulk sales records",
count=len(records),
tenant_id=tenant_id)
return records
except Exception as e:
logger.error("Failed to create bulk sales records",
error=str(e),
tenant_id=tenant_id)
raise HTTPException(status_code=500, detail=f"Failed to create bulk sales records: {str(e)}")
@router.get("/", response_model=List[SalesDataResponse])
2025-07-18 11:51:43 +02:00
async def get_sales_data(
2025-07-20 07:24:04 +02:00
start_date: Optional[datetime] = Query(None),
end_date: Optional[datetime] = Query(None),
product_name: Optional[str] = Query(None),
tenant_id: str = Depends(get_current_tenant_id_dep),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
2025-07-18 11:51:43 +02:00
):
2025-07-20 07:24:04 +02:00
"""Get sales data with filters"""
2025-07-18 11:51:43 +02:00
try:
2025-07-20 07:24:04 +02:00
logger.debug("Querying sales data",
tenant_id=tenant_id,
start_date=start_date,
end_date=end_date,
product_name=product_name)
query = SalesDataQuery(
tenant_id=tenant_id,
start_date=start_date,
end_date=end_date,
product_name=product_name
)
2025-07-19 12:09:10 +02:00
2025-07-18 11:51:43 +02:00
records = await SalesService.get_sales_data(query, db)
2025-07-19 12:09:10 +02:00
2025-07-20 07:24:04 +02:00
logger.debug("Successfully retrieved sales data",
count=len(records),
tenant_id=tenant_id)
2025-07-18 11:51:43 +02:00
return records
2025-07-19 12:09:10 +02:00
2025-07-18 11:51:43 +02:00
except Exception as e:
2025-07-20 07:24:04 +02:00
logger.error("Failed to query sales data",
error=str(e),
tenant_id=tenant_id)
2025-07-19 12:09:10 +02:00
raise HTTPException(status_code=500, detail=f"Failed to query sales data: {str(e)}")
2025-07-18 11:51:43 +02:00
2025-07-19 12:09:10 +02:00
@router.post("/import", response_model=SalesImportResult)
2025-07-18 11:51:43 +02:00
async def import_sales_data(
file: UploadFile = File(...),
2025-07-20 07:24:04 +02:00
file_format: str = Form(...),
tenant_id: str = Depends(get_current_tenant_id_dep),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
2025-07-18 11:51:43 +02:00
):
"""Import sales data from file"""
try:
2025-07-20 07:24:04 +02:00
logger.info("Importing sales data",
tenant_id=tenant_id,
format=file_format,
filename=file.filename,
user_id=current_user["user_id"])
2025-07-19 12:09:10 +02:00
2025-07-18 11:51:43 +02:00
# Read file content
content = await file.read()
file_content = content.decode('utf-8')
# Process import
result = await DataImportService.process_upload(
2025-07-20 07:24:04 +02:00
tenant_id,
file_content,
file_format,
db,
2025-07-23 18:57:27 +02:00
filename=file.filename
2025-07-18 11:51:43 +02:00
)
if result["success"]:
2025-07-20 07:24:04 +02:00
# Publish event
2025-07-19 12:09:10 +02:00
try:
2025-07-20 07:24:04 +02:00
await publish_data_imported({
2025-07-19 12:09:10 +02:00
"tenant_id": tenant_id,
2025-07-20 07:24:04 +02:00
"type": "file_import",
2025-07-19 12:09:10 +02:00
"format": file_format,
"filename": file.filename,
"records_created": result["records_created"],
2025-07-20 07:24:04 +02:00
"imported_by": current_user["user_id"],
2025-07-19 12:09:10 +02:00
"timestamp": datetime.utcnow().isoformat()
})
except Exception as pub_error:
2025-07-20 07:24:04 +02:00
logger.warning("Failed to publish import event", error=str(pub_error))
2025-07-19 12:09:10 +02:00
2025-07-20 07:24:04 +02:00
logger.info("Import completed",
success=result["success"],
records_created=result.get("records_created", 0),
tenant_id=tenant_id)
2025-07-18 11:51:43 +02:00
return result
except Exception as e:
2025-07-20 07:24:04 +02:00
logger.error("Failed to import sales data",
error=str(e),
tenant_id=tenant_id)
2025-07-19 12:09:10 +02:00
raise HTTPException(status_code=500, detail=f"Failed to import sales data: {str(e)}")
2025-07-18 11:51:43 +02:00
2025-07-19 12:09:10 +02:00
@router.post("/import/validate", response_model=SalesValidationResult)
2025-07-18 11:51:43 +02:00
async def validate_import_data(
import_data: SalesDataImport,
2025-07-20 07:24:04 +02:00
tenant_id: str = Depends(get_current_tenant_id_dep),
current_user: Dict[str, Any] = Depends(get_current_user_dep)
2025-07-18 11:51:43 +02:00
):
"""Validate import data before processing"""
try:
2025-07-20 07:24:04 +02:00
logger.debug("Validating import data", tenant_id=tenant_id)
# Override tenant_id
import_data.tenant_id = tenant_id
2025-07-19 12:09:10 +02:00
2025-07-18 11:51:43 +02:00
validation = await DataImportService.validate_import_data(
import_data.model_dump()
)
2025-07-19 12:09:10 +02:00
2025-07-20 07:24:04 +02:00
logger.debug("Validation completed",
is_valid=validation.get("is_valid", False),
tenant_id=tenant_id)
2025-07-18 11:51:43 +02:00
return validation
except Exception as e:
2025-07-20 07:24:04 +02:00
logger.error("Failed to validate import data",
error=str(e),
tenant_id=tenant_id)
2025-07-19 12:09:10 +02:00
raise HTTPException(status_code=500, detail=f"Failed to validate import data: {str(e)}")
2025-07-18 11:51:43 +02:00
@router.get("/import/template/{format_type}")
async def get_import_template(
format_type: str,
2025-07-20 07:24:04 +02:00
current_user: Dict[str, Any] = Depends(get_current_user_dep)
2025-07-18 11:51:43 +02:00
):
"""Get import template for specified format"""
try:
2025-07-20 07:24:04 +02:00
logger.debug("Getting import template",
format=format_type,
user_id=current_user["user_id"])
2025-07-19 12:09:10 +02:00
2025-07-18 11:51:43 +02:00
template = await DataImportService.get_import_template(format_type)
if "error" in template:
2025-07-19 12:09:10 +02:00
logger.warning("Template generation error", error=template["error"])
2025-07-18 11:51:43 +02:00
raise HTTPException(status_code=400, detail=template["error"])
2025-07-19 12:09:10 +02:00
logger.debug("Template generated successfully", format=format_type)
2025-07-18 11:51:43 +02:00
if format_type.lower() == "csv":
return Response(
content=template["template"],
media_type="text/csv",
headers={"Content-Disposition": f"attachment; filename={template['filename']}"}
)
elif format_type.lower() == "json":
return Response(
content=template["template"],
media_type="application/json",
headers={"Content-Disposition": f"attachment; filename={template['filename']}"}
)
elif format_type.lower() in ["excel", "xlsx"]:
return Response(
content=base64.b64decode(template["template"]),
media_type=template["content_type"],
headers={"Content-Disposition": f"attachment; filename={template['filename']}"}
)
else:
return template
2025-07-19 12:09:10 +02:00
except HTTPException:
raise
2025-07-18 11:51:43 +02:00
except Exception as e:
2025-07-19 12:09:10 +02:00
logger.error("Failed to generate import template", error=str(e))
raise HTTPException(status_code=500, detail=f"Failed to generate template: {str(e)}")
2025-07-18 11:51:43 +02:00
2025-07-20 07:24:04 +02:00
@router.get("/analytics")
2025-07-19 12:09:10 +02:00
async def get_sales_analytics(
start_date: Optional[datetime] = Query(None, description="Start date"),
end_date: Optional[datetime] = Query(None, description="End date"),
2025-07-20 07:24:04 +02:00
tenant_id: str = Depends(get_current_tenant_id_dep),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
2025-07-18 11:51:43 +02:00
):
2025-07-19 12:09:10 +02:00
"""Get sales analytics for tenant"""
2025-07-18 11:51:43 +02:00
try:
2025-07-20 07:24:04 +02:00
logger.debug("Getting sales analytics",
tenant_id=tenant_id,
start_date=start_date,
end_date=end_date)
2025-07-18 11:51:43 +02:00
2025-07-19 12:09:10 +02:00
analytics = await SalesService.get_sales_analytics(
tenant_id, start_date, end_date, db
2025-07-18 11:51:43 +02:00
)
2025-07-19 12:09:10 +02:00
logger.debug("Analytics generated successfully", tenant_id=tenant_id)
return analytics
2025-07-18 11:51:43 +02:00
except Exception as e:
2025-07-20 07:24:04 +02:00
logger.error("Failed to generate sales analytics",
error=str(e),
tenant_id=tenant_id)
2025-07-19 12:09:10 +02:00
raise HTTPException(status_code=500, detail=f"Failed to generate analytics: {str(e)}")
2025-07-18 11:51:43 +02:00
2025-07-20 07:24:04 +02:00
@router.post("/export")
2025-07-19 12:09:10 +02:00
async def export_sales_data(
export_format: str = Query("csv", description="Export format: csv, excel, json"),
start_date: Optional[datetime] = Query(None, description="Start date"),
end_date: Optional[datetime] = Query(None, description="End date"),
products: Optional[List[str]] = Query(None, description="Filter by products"),
2025-07-20 07:24:04 +02:00
tenant_id: str = Depends(get_current_tenant_id_dep),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
2025-07-18 11:51:43 +02:00
):
2025-07-19 12:09:10 +02:00
"""Export sales data in specified format"""
2025-07-18 11:51:43 +02:00
try:
2025-07-20 07:24:04 +02:00
logger.info("Exporting sales data",
tenant_id=tenant_id,
format=export_format,
user_id=current_user["user_id"])
2025-07-18 11:51:43 +02:00
2025-07-19 12:09:10 +02:00
export_result = await SalesService.export_sales_data(
tenant_id, export_format, start_date, end_date, products, db
2025-07-18 11:51:43 +02:00
)
2025-07-19 12:09:10 +02:00
if not export_result:
raise HTTPException(status_code=404, detail="No data found for export")
2025-07-18 11:51:43 +02:00
2025-07-20 07:24:04 +02:00
# Publish export event
try:
await publish_export_completed({
"tenant_id": tenant_id,
"format": export_format,
"exported_by": current_user["user_id"],
"record_count": export_result.get("record_count", 0),
"timestamp": datetime.utcnow().isoformat()
})
except Exception as pub_error:
logger.warning("Failed to publish export event", error=str(pub_error))
logger.info("Export completed successfully",
tenant_id=tenant_id,
format=export_format)
2025-07-18 11:51:43 +02:00
2025-07-19 12:09:10 +02:00
return StreamingResponse(
iter([export_result["content"]]),
media_type=export_result["media_type"],
headers={"Content-Disposition": f"attachment; filename={export_result['filename']}"}
2025-07-18 11:51:43 +02:00
)
2025-07-19 12:09:10 +02:00
except HTTPException:
raise
2025-07-18 11:51:43 +02:00
except Exception as e:
2025-07-20 07:24:04 +02:00
logger.error("Failed to export sales data",
error=str(e),
tenant_id=tenant_id)
2025-07-19 12:09:10 +02:00
raise HTTPException(status_code=500, detail=f"Failed to export sales data: {str(e)}")
2025-07-18 11:51:43 +02:00
2025-07-19 12:09:10 +02:00
@router.delete("/{record_id}")
async def delete_sales_record(
record_id: str,
2025-07-20 07:24:04 +02:00
tenant_id: str = Depends(get_current_tenant_id_dep),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
2025-07-18 11:51:43 +02:00
):
2025-07-19 12:09:10 +02:00
"""Delete a sales record"""
2025-07-18 11:51:43 +02:00
try:
2025-07-20 07:24:04 +02:00
logger.info("Deleting sales record",
record_id=record_id,
tenant_id=tenant_id,
user_id=current_user["user_id"])
# Verify record belongs to tenant before deletion
record = await SalesService.get_sales_record(record_id, db)
if not record or record.tenant_id != tenant_id:
raise HTTPException(status_code=404, detail="Sales record not found")
2025-07-18 11:51:43 +02:00
2025-07-19 12:09:10 +02:00
success = await SalesService.delete_sales_record(record_id, db)
2025-07-18 11:51:43 +02:00
2025-07-19 12:09:10 +02:00
if not success:
raise HTTPException(status_code=404, detail="Sales record not found")
2025-07-18 11:51:43 +02:00
2025-07-20 07:24:04 +02:00
logger.info("Sales record deleted successfully",
record_id=record_id,
tenant_id=tenant_id)
2025-07-19 12:09:10 +02:00
return {"status": "success", "message": "Sales record deleted successfully"}
2025-07-18 11:51:43 +02:00
2025-07-19 12:09:10 +02:00
except HTTPException:
raise
2025-07-18 11:51:43 +02:00
except Exception as e:
2025-07-20 07:24:04 +02:00
logger.error("Failed to delete sales record",
error=str(e),
tenant_id=tenant_id)
raise HTTPException(status_code=500, detail=f"Failed to delete sales record: {str(e)}")
@router.get("/summary")
async def get_sales_summary(
period: str = Query("daily", description="Summary period: daily, weekly, monthly"),
tenant_id: str = Depends(get_current_tenant_id_dep),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Get sales summary for specified period"""
try:
logger.debug("Getting sales summary",
tenant_id=tenant_id,
period=period)
summary = await SalesService.get_sales_summary(tenant_id, period, db)
logger.debug("Summary generated successfully", tenant_id=tenant_id)
return summary
except Exception as e:
logger.error("Failed to generate sales summary",
error=str(e),
tenant_id=tenant_id)
raise HTTPException(status_code=500, detail=f"Failed to generate summary: {str(e)}")
@router.get("/products")
async def get_products_list(
tenant_id: str = Depends(get_current_tenant_id_dep),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Get list of all products with sales data"""
try:
logger.debug("Getting products list", tenant_id=tenant_id)
products = await SalesService.get_products_list(tenant_id, db)
logger.debug("Products list retrieved",
count=len(products),
tenant_id=tenant_id)
return products
except Exception as e:
logger.error("Failed to get products list",
error=str(e),
tenant_id=tenant_id)
raise HTTPException(status_code=500, detail=f"Failed to get products list: {str(e)}")