REFACTOR - Database logic

This commit is contained in:
Urtzi Alfaro
2025-08-08 09:08:41 +02:00
parent 0154365bfc
commit 488bb3ef93
113 changed files with 22842 additions and 6503 deletions

View File

@@ -1,18 +1,15 @@
# ================================================================
# services/data/app/api/sales.py - FIXED FOR NEW TENANT-SCOPED ARCHITECTURE
# ================================================================
"""Sales data API endpoints with tenant-scoped URLs"""
"""
Enhanced Sales API Endpoints
Updated to use repository pattern and enhanced services with dependency injection
"""
from fastapi import APIRouter, Depends, HTTPException, UploadFile, File, Form, Query, Response, Path
from fastapi.responses import StreamingResponse
from sqlalchemy.ext.asyncio import AsyncSession
from typing import List, Optional, Dict, Any
from uuid import UUID
from datetime import datetime
import base64
import structlog
from app.core.database import get_db
from app.schemas.sales import (
SalesDataCreate,
SalesDataResponse,
@@ -20,50 +17,61 @@ from app.schemas.sales import (
SalesDataImport,
SalesImportResult,
SalesValidationResult,
SalesValidationRequest,
SalesExportRequest
)
from app.services.sales_service import SalesService
from app.services.data_import_service import DataImportService
from app.services.data_import_service import EnhancedDataImportService
from app.services.messaging import (
publish_sales_created,
publish_data_imported,
publish_export_completed
)
# Import unified authentication from shared library
from shared.database.base import create_database_manager
from shared.auth.decorators import get_current_user_dep
router = APIRouter(tags=["sales"])
router = APIRouter(tags=["enhanced-sales"])
logger = structlog.get_logger()
# ================================================================
# TENANT-SCOPED SALES ENDPOINTS
# ================================================================
def get_sales_service():
"""Dependency injection for SalesService"""
from app.core.config import settings
database_manager = create_database_manager(settings.DATABASE_URL, "data-service")
return SalesService(database_manager)
def get_import_service():
"""Dependency injection for EnhancedDataImportService"""
from app.core.config import settings
database_manager = create_database_manager(settings.DATABASE_URL, "data-service")
return EnhancedDataImportService(database_manager)
@router.post("/tenants/{tenant_id}/sales", response_model=SalesDataResponse)
async def create_sales_record(
sales_data: SalesDataCreate,
tenant_id: UUID = Path(..., description="Tenant ID"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
sales_service: SalesService = Depends(get_sales_service)
):
"""Create a new sales record for tenant"""
"""Create a new sales record using repository pattern"""
try:
logger.debug("Creating sales record",
product=sales_data.product_name,
quantity=sales_data.quantity_sold,
tenant_id=tenant_id,
user_id=current_user["user_id"])
logger.info("Creating sales record with repository pattern",
product=sales_data.product_name,
quantity=sales_data.quantity_sold,
tenant_id=tenant_id,
user_id=current_user["user_id"])
# Override tenant_id from URL path (gateway already verified access)
# Override tenant_id from URL path
sales_data.tenant_id = tenant_id
record = await SalesService.create_sales_record(sales_data, db)
record = await sales_service.create_sales_record(sales_data, str(tenant_id))
# Publish event (non-blocking)
try:
await publish_sales_created({
"tenant_id": tenant_id,
"tenant_id": str(tenant_id),
"product_name": sales_data.product_name,
"quantity_sold": sales_data.quantity_sold,
"revenue": sales_data.revenue,
@@ -73,9 +81,8 @@ async def create_sales_record(
})
except Exception as pub_error:
logger.warning("Failed to publish sales created event", error=str(pub_error))
# Continue - event failure shouldn't break API
logger.info("Successfully created sales record",
logger.info("Successfully created sales record using repository",
record_id=record.id,
tenant_id=tenant_id)
return record
@@ -86,47 +93,6 @@ async def create_sales_record(
tenant_id=tenant_id)
raise HTTPException(status_code=500, detail=f"Failed to create sales record: {str(e)}")
@router.post("/tenants/{tenant_id}/sales/bulk", response_model=List[SalesDataResponse])
async def create_bulk_sales(
sales_data: List[SalesDataCreate],
tenant_id: UUID = Path(..., description="Tenant ID"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Create multiple sales records for tenant"""
try:
logger.debug("Creating bulk sales records",
count=len(sales_data),
tenant_id=tenant_id)
# Override tenant_id for all records
for record in sales_data:
record.tenant_id = tenant_id
records = await SalesService.create_bulk_sales(sales_data, db)
# Publish event
try:
await publish_data_imported({
"tenant_id": tenant_id,
"type": "bulk_create",
"records_created": len(records),
"created_by": current_user["user_id"],
"timestamp": datetime.utcnow().isoformat()
})
except Exception as pub_error:
logger.warning("Failed to publish bulk import event", error=str(pub_error))
logger.info("Successfully created bulk sales records",
count=len(records),
tenant_id=tenant_id)
return records
except Exception as e:
logger.error("Failed to create bulk sales records",
error=str(e),
tenant_id=tenant_id)
raise HTTPException(status_code=500, detail=f"Failed to create bulk sales records: {str(e)}")
@router.get("/tenants/{tenant_id}/sales", response_model=List[SalesDataResponse])
async def get_sales_data(
@@ -134,10 +100,8 @@ async def get_sales_data(
start_date: Optional[datetime] = Query(None, description="Start date filter"),
end_date: Optional[datetime] = Query(None, description="End date filter"),
product_name: Optional[str] = Query(None, description="Product name filter"),
# ✅ FIX: Add missing pagination parameters
limit: Optional[int] = Query(1000, le=5000, description="Maximum number of records to return"),
offset: Optional[int] = Query(0, ge=0, description="Number of records to skip"),
# ✅ FIX: Add additional filtering parameters
product_names: Optional[List[str]] = Query(None, description="Multiple product name filters"),
location_ids: Optional[List[str]] = Query(None, description="Location ID filters"),
sources: Optional[List[str]] = Query(None, description="Source filters"),
@@ -146,19 +110,18 @@ async def get_sales_data(
min_revenue: Optional[float] = Query(None, description="Minimum revenue filter"),
max_revenue: Optional[float] = Query(None, description="Maximum revenue filter"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
sales_service: SalesService = Depends(get_sales_service)
):
"""Get sales data for tenant with filters and pagination"""
"""Get sales data using repository pattern with enhanced filtering"""
try:
logger.debug("Querying sales data",
logger.debug("Querying sales data with repository pattern",
tenant_id=tenant_id,
start_date=start_date,
end_date=end_date,
product_name=product_name,
limit=limit,
offset=offset)
# ✅ FIX: Create complete SalesDataQuery with all parameters
# Create enhanced query
query = SalesDataQuery(
tenant_id=tenant_id,
start_date=start_date,
@@ -170,17 +133,15 @@ async def get_sales_data(
max_quantity=max_quantity,
min_revenue=min_revenue,
max_revenue=max_revenue,
limit=limit, # ✅ Now properly passed from query params
offset=offset # ✅ Now properly passed from query params
limit=limit,
offset=offset
)
records = await SalesService.get_sales_data(query, db)
records = await sales_service.get_sales_data(query)
logger.debug("Successfully retrieved sales data",
logger.debug("Successfully retrieved sales data using repository",
count=len(records),
tenant_id=tenant_id,
limit=limit,
offset=offset)
tenant_id=tenant_id)
return records
except Exception as e:
@@ -189,17 +150,78 @@ async def get_sales_data(
tenant_id=tenant_id)
raise HTTPException(status_code=500, detail=f"Failed to query sales data: {str(e)}")
@router.get("/tenants/{tenant_id}/sales/analytics")
async def get_sales_analytics(
tenant_id: UUID = Path(..., description="Tenant ID"),
start_date: Optional[datetime] = Query(None, description="Start date"),
end_date: Optional[datetime] = Query(None, description="End date"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
sales_service: SalesService = Depends(get_sales_service)
):
"""Get sales analytics using repository pattern"""
try:
logger.debug("Getting sales analytics with repository pattern",
tenant_id=tenant_id,
start_date=start_date,
end_date=end_date)
analytics = await sales_service.get_sales_analytics(
str(tenant_id), start_date, end_date
)
logger.debug("Analytics generated successfully using repository", tenant_id=tenant_id)
return analytics
except Exception as e:
logger.error("Failed to generate sales analytics",
error=str(e),
tenant_id=tenant_id)
raise HTTPException(status_code=500, detail=f"Failed to generate analytics: {str(e)}")
@router.get("/tenants/{tenant_id}/sales/aggregation")
async def get_sales_aggregation(
tenant_id: UUID = Path(..., description="Tenant ID"),
start_date: Optional[datetime] = Query(None, description="Start date"),
end_date: Optional[datetime] = Query(None, description="End date"),
group_by: str = Query("daily", description="Aggregation period: daily, weekly, monthly"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
sales_service: SalesService = Depends(get_sales_service)
):
"""Get sales aggregation data using repository pattern"""
try:
logger.debug("Getting sales aggregation with repository pattern",
tenant_id=tenant_id,
group_by=group_by)
aggregation = await sales_service.get_sales_aggregation(
str(tenant_id), start_date, end_date, group_by
)
logger.debug("Aggregation generated successfully using repository",
tenant_id=tenant_id,
group_by=group_by)
return aggregation
except Exception as e:
logger.error("Failed to get sales aggregation",
error=str(e),
tenant_id=tenant_id)
raise HTTPException(status_code=500, detail=f"Failed to get aggregation: {str(e)}")
@router.post("/tenants/{tenant_id}/sales/import", response_model=SalesImportResult)
async def import_sales_data(
tenant_id: UUID = Path(..., description="Tenant ID"),
file: UploadFile = File(...),
file_format: str = Form(...),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
import_service: EnhancedDataImportService = Depends(get_import_service)
):
"""Import sales data from file for tenant - FIXED VERSION"""
"""Import sales data using enhanced repository pattern"""
try:
logger.info("Importing sales data",
logger.info("Importing sales data with enhanced repository pattern",
tenant_id=tenant_id,
format=file_format,
filename=file.filename,
@@ -209,33 +231,32 @@ async def import_sales_data(
content = await file.read()
file_content = content.decode('utf-8')
# ✅ FIX: tenant_id comes from URL path, not file upload
result = await DataImportService.process_upload(
tenant_id,
# Process using enhanced import service
result = await import_service.process_import(
str(tenant_id),
file_content,
file_format,
db,
filename=file.filename
)
if result["success"]:
if result.success:
# Publish event
try:
await publish_data_imported({
"tenant_id": str(tenant_id), # Ensure string conversion
"tenant_id": str(tenant_id),
"type": "file_import",
"format": file_format,
"filename": file.filename,
"records_created": result["records_created"],
"records_created": result.records_created,
"imported_by": current_user["user_id"],
"timestamp": datetime.utcnow().isoformat()
})
except Exception as pub_error:
logger.warning("Failed to publish import event", error=str(pub_error))
logger.info("Import completed",
success=result["success"],
records_created=result.get("records_created", 0),
logger.info("Import completed with enhanced repository pattern",
success=result.success,
records_created=result.records_created,
tenant_id=tenant_id)
return result
@@ -245,6 +266,7 @@ async def import_sales_data(
tenant_id=tenant_id)
raise HTTPException(status_code=500, detail=f"Failed to import sales data: {str(e)}")
@router.post("/tenants/{tenant_id}/sales/import/validate", response_model=SalesValidationResult)
async def validate_import_data(
tenant_id: UUID = Path(..., description="Tenant ID"),
@@ -252,44 +274,36 @@ async def validate_import_data(
file_format: str = Form(default="csv", description="File format: csv, json, excel"),
validate_only: bool = Form(default=True, description="Only validate, don't import"),
source: str = Form(default="onboarding_upload", description="Source of the upload"),
current_user: Dict[str, Any] = Depends(get_current_user_dep)
current_user: Dict[str, Any] = Depends(get_current_user_dep),
import_service: EnhancedDataImportService = Depends(get_import_service)
):
"""
✅ FIXED: Validate import data using FormData (same as import endpoint)
Now both validation and import endpoints use the same FormData approach
"""
"""Validate import data using enhanced repository pattern"""
try:
logger.info("Validating import data",
logger.info("Validating import data with enhanced repository pattern",
tenant_id=tenant_id,
format=file_format,
filename=file.filename,
user_id=current_user["user_id"])
# ✅ STEP 1: Read file content (same as import endpoint)
# Read file content
content = await file.read()
file_content = content.decode('utf-8')
# ✅ STEP 2: Create validation data structure
# This matches the SalesDataImport schema but gets data from FormData
# Create validation data structure
validation_data = {
"tenant_id": str(tenant_id), # From URL path
"data": file_content, # From uploaded file
"data_format": file_format, # From form field
"source": source, # From form field
"validate_only": validate_only # From form field
"tenant_id": str(tenant_id),
"data": file_content,
"data_format": file_format,
"source": source,
"validate_only": validate_only
}
logger.debug("Validation data prepared",
tenant_id=tenant_id,
data_length=len(file_content),
format=file_format)
# Use enhanced validation service
validation_result = await import_service.validate_import_data(validation_data)
# ✅ STEP 3: Use existing validation service
validation_result = await DataImportService.validate_import_data(validation_data)
logger.info("Validation completed",
is_valid=validation_result.get("is_valid", False),
total_records=validation_result.get("total_records", 0),
logger.info("Validation completed with enhanced repository pattern",
is_valid=validation_result.is_valid,
total_records=validation_result.total_records,
tenant_id=tenant_id)
return validation_result
@@ -300,85 +314,49 @@ async def validate_import_data(
tenant_id=tenant_id)
raise HTTPException(status_code=500, detail=f"Failed to validate import data: {str(e)}")
@router.get("/tenants/{tenant_id}/sales/import/template/{format_type}")
async def get_import_template(
tenant_id: UUID = Path(..., description="Tenant ID"),
format_type: str = Path(..., description="Template format: csv, json, excel"),
current_user: Dict[str, Any] = Depends(get_current_user_dep)
):
"""Get import template for specified format"""
try:
logger.debug("Getting import template",
format=format_type,
tenant_id=tenant_id,
user_id=current_user["user_id"])
template = await DataImportService.get_import_template(format_type)
if "error" in template:
logger.warning("Template generation error", error=template["error"])
raise HTTPException(status_code=400, detail=template["error"])
logger.debug("Template generated successfully",
format=format_type,
tenant_id=tenant_id)
if format_type.lower() == "csv":
return Response(
content=template["template"],
media_type="text/csv",
headers={"Content-Disposition": f"attachment; filename={template['filename']}"}
)
elif format_type.lower() == "json":
return Response(
content=template["template"],
media_type="application/json",
headers={"Content-Disposition": f"attachment; filename={template['filename']}"}
)
elif format_type.lower() in ["excel", "xlsx"]:
return Response(
content=base64.b64decode(template["template"]),
media_type=template["content_type"],
headers={"Content-Disposition": f"attachment; filename={template['filename']}"}
)
else:
return template
except HTTPException:
raise
except Exception as e:
logger.error("Failed to generate import template",
error=str(e),
tenant_id=tenant_id)
raise HTTPException(status_code=500, detail=f"Failed to generate template: {str(e)}")
@router.get("/tenants/{tenant_id}/sales/analytics")
async def get_sales_analytics(
@router.post("/tenants/{tenant_id}/sales/import/validate-json", response_model=SalesValidationResult)
async def validate_import_data_json(
tenant_id: UUID = Path(..., description="Tenant ID"),
start_date: Optional[datetime] = Query(None, description="Start date"),
end_date: Optional[datetime] = Query(None, description="End date"),
request: SalesValidationRequest = ...,
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
import_service: EnhancedDataImportService = Depends(get_import_service)
):
"""Get sales analytics for tenant"""
"""Validate import data from JSON request for onboarding flow"""
try:
logger.debug("Getting sales analytics",
tenant_id=tenant_id,
start_date=start_date,
end_date=end_date)
logger.info("Starting JSON-based data validation",
tenant_id=str(tenant_id),
data_format=request.data_format,
data_length=len(request.data),
validate_only=request.validate_only)
analytics = await SalesService.get_sales_analytics(
tenant_id, start_date, end_date, db
)
# Create validation data structure
validation_data = {
"tenant_id": str(tenant_id),
"data": request.data, # Fixed: use 'data' not 'content'
"data_format": request.data_format,
"filename": f"onboarding_data.{request.data_format}",
"source": request.source,
"validate_only": request.validate_only
}
logger.debug("Analytics generated successfully", tenant_id=tenant_id)
return analytics
# Use enhanced validation service
validation_result = await import_service.validate_import_data(validation_data)
logger.info("JSON validation completed",
is_valid=validation_result.is_valid,
total_records=validation_result.total_records,
tenant_id=tenant_id)
return validation_result
except Exception as e:
logger.error("Failed to generate sales analytics",
logger.error("Failed to validate JSON import data",
error=str(e),
tenant_id=tenant_id)
raise HTTPException(status_code=500, detail=f"Failed to generate analytics: {str(e)}")
raise HTTPException(status_code=500, detail=f"Failed to validate import data: {str(e)}")
@router.post("/tenants/{tenant_id}/sales/export")
async def export_sales_data(
@@ -388,17 +366,17 @@ async def export_sales_data(
end_date: Optional[datetime] = Query(None, description="End date"),
products: Optional[List[str]] = Query(None, description="Filter by products"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
sales_service: SalesService = Depends(get_sales_service)
):
"""Export sales data in specified format for tenant"""
"""Export sales data using repository pattern"""
try:
logger.info("Exporting sales data",
logger.info("Exporting sales data with repository pattern",
tenant_id=tenant_id,
format=export_format,
user_id=current_user["user_id"])
export_result = await SalesService.export_sales_data(
tenant_id, export_format, start_date, end_date, products, db
export_result = await sales_service.export_sales_data(
str(tenant_id), export_format, start_date, end_date, products
)
if not export_result:
@@ -407,7 +385,7 @@ async def export_sales_data(
# Publish export event
try:
await publish_export_completed({
"tenant_id": tenant_id,
"tenant_id": str(tenant_id),
"format": export_format,
"exported_by": current_user["user_id"],
"record_count": export_result.get("record_count", 0),
@@ -416,7 +394,7 @@ async def export_sales_data(
except Exception as pub_error:
logger.warning("Failed to publish export event", error=str(pub_error))
logger.info("Export completed successfully",
logger.info("Export completed successfully using repository",
tenant_id=tenant_id,
format=export_format)
@@ -434,31 +412,27 @@ async def export_sales_data(
tenant_id=tenant_id)
raise HTTPException(status_code=500, detail=f"Failed to export sales data: {str(e)}")
@router.delete("/tenants/{tenant_id}/sales/{record_id}")
async def delete_sales_record(
tenant_id: UUID = Path(..., description="Tenant ID"),
record_id: str = Path(..., description="Sales record ID"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
sales_service: SalesService = Depends(get_sales_service)
):
"""Delete a sales record for tenant"""
"""Delete a sales record using repository pattern"""
try:
logger.info("Deleting sales record",
logger.info("Deleting sales record with repository pattern",
record_id=record_id,
tenant_id=tenant_id,
user_id=current_user["user_id"])
# Verify record belongs to tenant before deletion
record = await SalesService.get_sales_record(record_id, db)
if not record or record.tenant_id != tenant_id:
raise HTTPException(status_code=404, detail="Sales record not found")
success = await SalesService.delete_sales_record(record_id, db)
success = await sales_service.delete_sales_record(record_id, str(tenant_id))
if not success:
raise HTTPException(status_code=404, detail="Sales record not found")
logger.info("Sales record deleted successfully",
logger.info("Sales record deleted successfully using repository",
record_id=record_id,
tenant_id=tenant_id)
return {"status": "success", "message": "Sales record deleted successfully"}
@@ -471,43 +445,20 @@ async def delete_sales_record(
tenant_id=tenant_id)
raise HTTPException(status_code=500, detail=f"Failed to delete sales record: {str(e)}")
@router.get("/tenants/{tenant_id}/sales/summary")
async def get_sales_summary(
tenant_id: UUID = Path(..., description="Tenant ID"),
period: str = Query("daily", description="Summary period: daily, weekly, monthly"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Get sales summary for specified period for tenant"""
try:
logger.debug("Getting sales summary",
tenant_id=tenant_id,
period=period)
summary = await SalesService.get_sales_summary(tenant_id, period, db)
logger.debug("Summary generated successfully", tenant_id=tenant_id)
return summary
except Exception as e:
logger.error("Failed to generate sales summary",
error=str(e),
tenant_id=tenant_id)
raise HTTPException(status_code=500, detail=f"Failed to generate summary: {str(e)}")
@router.get("/tenants/{tenant_id}/sales/products")
async def get_products_list(
tenant_id: UUID = Path(..., description="Tenant ID"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
sales_service: SalesService = Depends(get_sales_service)
):
"""Get list of all products with sales data for tenant"""
"""Get list of products using repository pattern"""
try:
logger.debug("Getting products list", tenant_id=tenant_id)
logger.debug("Getting products list with repository pattern", tenant_id=tenant_id)
products = await SalesService.get_products_list(tenant_id, db)
products = await sales_service.get_products_list(str(tenant_id))
logger.debug("Products list retrieved",
logger.debug("Products list retrieved using repository",
count=len(products),
tenant_id=tenant_id)
return products
@@ -518,76 +469,32 @@ async def get_products_list(
tenant_id=tenant_id)
raise HTTPException(status_code=500, detail=f"Failed to get products list: {str(e)}")
@router.get("/tenants/{tenant_id}/sales/{record_id}", response_model=SalesDataResponse)
async def get_sales_record(
tenant_id: UUID = Path(..., description="Tenant ID"),
record_id: str = Path(..., description="Sales record ID"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Get a specific sales record for tenant"""
try:
logger.debug("Getting sales record",
record_id=record_id,
tenant_id=tenant_id)
record = await SalesService.get_sales_record(record_id, db)
if not record or record.tenant_id != tenant_id:
raise HTTPException(status_code=404, detail="Sales record not found")
logger.debug("Sales record retrieved",
record_id=record_id,
tenant_id=tenant_id)
return record
except HTTPException:
raise
except Exception as e:
logger.error("Failed to get sales record",
error=str(e),
tenant_id=tenant_id,
record_id=record_id)
raise HTTPException(status_code=500, detail=f"Failed to get sales record: {str(e)}")
@router.put("/tenants/{tenant_id}/sales/{record_id}", response_model=SalesDataResponse)
async def update_sales_record(
sales_data: SalesDataCreate,
record_id: str = Path(..., description="Sales record ID"),
@router.get("/tenants/{tenant_id}/sales/statistics")
async def get_sales_statistics(
tenant_id: UUID = Path(..., description="Tenant ID"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
sales_service: SalesService = Depends(get_sales_service)
):
"""Update a sales record for tenant"""
"""Get comprehensive sales statistics using repository pattern"""
try:
logger.info("Updating sales record",
record_id=record_id,
tenant_id=tenant_id,
user_id=current_user["user_id"])
logger.debug("Getting sales statistics with repository pattern", tenant_id=tenant_id)
# Verify record exists and belongs to tenant
existing_record = await SalesService.get_sales_record(record_id, db)
if not existing_record or existing_record.tenant_id != tenant_id:
raise HTTPException(status_code=404, detail="Sales record not found")
# Get analytics which includes comprehensive statistics
analytics = await sales_service.get_sales_analytics(str(tenant_id))
# Override tenant_id from URL path
sales_data.tenant_id = tenant_id
# Create enhanced statistics response
statistics = {
"tenant_id": str(tenant_id),
"analytics": analytics,
"generated_at": datetime.utcnow().isoformat()
}
updated_record = await SalesService.update_sales_record(record_id, sales_data, db)
logger.debug("Sales statistics retrieved using repository", tenant_id=tenant_id)
return statistics
if not updated_record:
raise HTTPException(status_code=404, detail="Sales record not found")
logger.info("Sales record updated successfully",
record_id=record_id,
tenant_id=tenant_id)
return updated_record
except HTTPException:
raise
except Exception as e:
logger.error("Failed to update sales record",
logger.error("Failed to get sales statistics",
error=str(e),
tenant_id=tenant_id,
record_id=record_id)
raise HTTPException(status_code=500, detail=f"Failed to update sales record: {str(e)}")
tenant_id=tenant_id)
raise HTTPException(status_code=500, detail=f"Failed to get statistics: {str(e)}")