2025-08-08 09:08:41 +02:00
|
|
|
"""
|
|
|
|
|
Enhanced Sales API Endpoints
|
|
|
|
|
Updated to use repository pattern and enhanced services with dependency injection
|
|
|
|
|
"""
|
2025-07-18 11:51:43 +02:00
|
|
|
|
2025-07-26 18:46:52 +02:00
|
|
|
from fastapi import APIRouter, Depends, HTTPException, UploadFile, File, Form, Query, Response, Path
|
2025-07-19 12:09:10 +02:00
|
|
|
from fastapi.responses import StreamingResponse
|
2025-07-20 07:24:04 +02:00
|
|
|
from typing import List, Optional, Dict, Any
|
2025-07-26 18:46:52 +02:00
|
|
|
from uuid import UUID
|
2025-07-18 11:51:43 +02:00
|
|
|
from datetime import datetime
|
2025-07-19 12:09:10 +02:00
|
|
|
import structlog
|
2025-07-18 11:51:43 +02:00
|
|
|
|
|
|
|
|
from app.schemas.sales import (
|
|
|
|
|
SalesDataCreate,
|
|
|
|
|
SalesDataResponse,
|
|
|
|
|
SalesDataQuery,
|
2025-07-19 12:09:10 +02:00
|
|
|
SalesDataImport,
|
|
|
|
|
SalesImportResult,
|
|
|
|
|
SalesValidationResult,
|
2025-08-08 09:08:41 +02:00
|
|
|
SalesValidationRequest,
|
2025-07-19 12:09:10 +02:00
|
|
|
SalesExportRequest
|
2025-07-18 11:51:43 +02:00
|
|
|
)
|
2025-07-20 07:24:04 +02:00
|
|
|
from app.services.sales_service import SalesService
|
2025-08-08 09:08:41 +02:00
|
|
|
from app.services.data_import_service import EnhancedDataImportService
|
2025-07-20 07:24:04 +02:00
|
|
|
from app.services.messaging import (
|
|
|
|
|
publish_sales_created,
|
|
|
|
|
publish_data_imported,
|
|
|
|
|
publish_export_completed
|
|
|
|
|
)
|
2025-08-08 09:08:41 +02:00
|
|
|
from shared.database.base import create_database_manager
|
2025-07-26 18:46:52 +02:00
|
|
|
from shared.auth.decorators import get_current_user_dep
|
2025-07-20 07:24:04 +02:00
|
|
|
|
2025-08-08 09:08:41 +02:00
|
|
|
router = APIRouter(tags=["enhanced-sales"])
|
2025-07-19 12:09:10 +02:00
|
|
|
logger = structlog.get_logger()
|
2025-07-18 11:51:43 +02:00
|
|
|
|
2025-08-08 09:08:41 +02:00
|
|
|
|
|
|
|
|
def get_sales_service():
|
|
|
|
|
"""Dependency injection for SalesService"""
|
|
|
|
|
from app.core.config import settings
|
|
|
|
|
database_manager = create_database_manager(settings.DATABASE_URL, "data-service")
|
|
|
|
|
return SalesService(database_manager)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_import_service():
|
|
|
|
|
"""Dependency injection for EnhancedDataImportService"""
|
|
|
|
|
from app.core.config import settings
|
|
|
|
|
database_manager = create_database_manager(settings.DATABASE_URL, "data-service")
|
|
|
|
|
return EnhancedDataImportService(database_manager)
|
|
|
|
|
|
2025-07-26 18:46:52 +02:00
|
|
|
|
|
|
|
|
@router.post("/tenants/{tenant_id}/sales", response_model=SalesDataResponse)
|
2025-07-18 11:51:43 +02:00
|
|
|
async def create_sales_record(
|
|
|
|
|
sales_data: SalesDataCreate,
|
2025-07-26 18:46:52 +02:00
|
|
|
tenant_id: UUID = Path(..., description="Tenant ID"),
|
2025-07-20 07:24:04 +02:00
|
|
|
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
2025-08-08 09:08:41 +02:00
|
|
|
sales_service: SalesService = Depends(get_sales_service)
|
2025-07-18 11:51:43 +02:00
|
|
|
):
|
2025-08-08 09:08:41 +02:00
|
|
|
"""Create a new sales record using repository pattern"""
|
2025-07-18 11:51:43 +02:00
|
|
|
try:
|
2025-08-08 09:08:41 +02:00
|
|
|
logger.info("Creating sales record with repository pattern",
|
|
|
|
|
product=sales_data.product_name,
|
|
|
|
|
quantity=sales_data.quantity_sold,
|
|
|
|
|
tenant_id=tenant_id,
|
|
|
|
|
user_id=current_user["user_id"])
|
2025-07-20 07:24:04 +02:00
|
|
|
|
2025-08-08 09:08:41 +02:00
|
|
|
# Override tenant_id from URL path
|
2025-07-20 07:24:04 +02:00
|
|
|
sales_data.tenant_id = tenant_id
|
2025-07-19 12:09:10 +02:00
|
|
|
|
2025-08-08 09:08:41 +02:00
|
|
|
record = await sales_service.create_sales_record(sales_data, str(tenant_id))
|
2025-07-18 11:51:43 +02:00
|
|
|
|
2025-07-20 07:24:04 +02:00
|
|
|
# Publish event (non-blocking)
|
2025-07-19 12:09:10 +02:00
|
|
|
try:
|
2025-07-19 12:51:28 +02:00
|
|
|
await publish_sales_created({
|
2025-08-08 09:08:41 +02:00
|
|
|
"tenant_id": str(tenant_id),
|
2025-07-19 12:09:10 +02:00
|
|
|
"product_name": sales_data.product_name,
|
|
|
|
|
"quantity_sold": sales_data.quantity_sold,
|
|
|
|
|
"revenue": sales_data.revenue,
|
|
|
|
|
"source": sales_data.source,
|
2025-07-20 07:24:04 +02:00
|
|
|
"created_by": current_user["user_id"],
|
2025-07-19 12:09:10 +02:00
|
|
|
"timestamp": datetime.utcnow().isoformat()
|
|
|
|
|
})
|
|
|
|
|
except Exception as pub_error:
|
|
|
|
|
logger.warning("Failed to publish sales created event", error=str(pub_error))
|
2025-07-18 11:51:43 +02:00
|
|
|
|
2025-08-08 09:08:41 +02:00
|
|
|
logger.info("Successfully created sales record using repository",
|
2025-07-20 07:24:04 +02:00
|
|
|
record_id=record.id,
|
|
|
|
|
tenant_id=tenant_id)
|
2025-07-18 11:51:43 +02:00
|
|
|
return record
|
2025-07-19 12:09:10 +02:00
|
|
|
|
2025-07-18 11:51:43 +02:00
|
|
|
except Exception as e:
|
2025-07-20 07:24:04 +02:00
|
|
|
logger.error("Failed to create sales record",
|
|
|
|
|
error=str(e),
|
|
|
|
|
tenant_id=tenant_id)
|
2025-07-19 12:09:10 +02:00
|
|
|
raise HTTPException(status_code=500, detail=f"Failed to create sales record: {str(e)}")
|
2025-07-18 11:51:43 +02:00
|
|
|
|
2025-07-20 07:24:04 +02:00
|
|
|
|
2025-07-26 18:46:52 +02:00
|
|
|
@router.get("/tenants/{tenant_id}/sales", response_model=List[SalesDataResponse])
|
2025-07-18 11:51:43 +02:00
|
|
|
async def get_sales_data(
|
2025-07-26 18:46:52 +02:00
|
|
|
tenant_id: UUID = Path(..., description="Tenant ID"),
|
|
|
|
|
start_date: Optional[datetime] = Query(None, description="Start date filter"),
|
|
|
|
|
end_date: Optional[datetime] = Query(None, description="End date filter"),
|
|
|
|
|
product_name: Optional[str] = Query(None, description="Product name filter"),
|
2025-07-29 12:45:39 +02:00
|
|
|
limit: Optional[int] = Query(1000, le=5000, description="Maximum number of records to return"),
|
|
|
|
|
offset: Optional[int] = Query(0, ge=0, description="Number of records to skip"),
|
|
|
|
|
product_names: Optional[List[str]] = Query(None, description="Multiple product name filters"),
|
|
|
|
|
location_ids: Optional[List[str]] = Query(None, description="Location ID filters"),
|
|
|
|
|
sources: Optional[List[str]] = Query(None, description="Source filters"),
|
|
|
|
|
min_quantity: Optional[int] = Query(None, description="Minimum quantity filter"),
|
|
|
|
|
max_quantity: Optional[int] = Query(None, description="Maximum quantity filter"),
|
|
|
|
|
min_revenue: Optional[float] = Query(None, description="Minimum revenue filter"),
|
|
|
|
|
max_revenue: Optional[float] = Query(None, description="Maximum revenue filter"),
|
2025-07-20 07:24:04 +02:00
|
|
|
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
2025-08-08 09:08:41 +02:00
|
|
|
sales_service: SalesService = Depends(get_sales_service)
|
2025-07-18 11:51:43 +02:00
|
|
|
):
|
2025-08-08 09:08:41 +02:00
|
|
|
"""Get sales data using repository pattern with enhanced filtering"""
|
2025-07-18 11:51:43 +02:00
|
|
|
try:
|
2025-08-08 09:08:41 +02:00
|
|
|
logger.debug("Querying sales data with repository pattern",
|
2025-07-20 07:24:04 +02:00
|
|
|
tenant_id=tenant_id,
|
|
|
|
|
start_date=start_date,
|
|
|
|
|
end_date=end_date,
|
2025-07-29 12:45:39 +02:00
|
|
|
limit=limit,
|
|
|
|
|
offset=offset)
|
2025-07-20 07:24:04 +02:00
|
|
|
|
2025-08-08 09:08:41 +02:00
|
|
|
# Create enhanced query
|
2025-07-20 07:24:04 +02:00
|
|
|
query = SalesDataQuery(
|
|
|
|
|
tenant_id=tenant_id,
|
|
|
|
|
start_date=start_date,
|
|
|
|
|
end_date=end_date,
|
2025-07-29 12:45:39 +02:00
|
|
|
product_names=[product_name] if product_name else product_names,
|
|
|
|
|
location_ids=location_ids,
|
|
|
|
|
sources=sources,
|
|
|
|
|
min_quantity=min_quantity,
|
|
|
|
|
max_quantity=max_quantity,
|
|
|
|
|
min_revenue=min_revenue,
|
|
|
|
|
max_revenue=max_revenue,
|
2025-08-08 09:08:41 +02:00
|
|
|
limit=limit,
|
|
|
|
|
offset=offset
|
2025-07-20 07:24:04 +02:00
|
|
|
)
|
2025-07-19 12:09:10 +02:00
|
|
|
|
2025-08-08 09:08:41 +02:00
|
|
|
records = await sales_service.get_sales_data(query)
|
2025-07-19 12:09:10 +02:00
|
|
|
|
2025-08-08 09:08:41 +02:00
|
|
|
logger.debug("Successfully retrieved sales data using repository",
|
2025-07-20 07:24:04 +02:00
|
|
|
count=len(records),
|
2025-08-08 09:08:41 +02:00
|
|
|
tenant_id=tenant_id)
|
2025-07-18 11:51:43 +02:00
|
|
|
return records
|
2025-07-19 12:09:10 +02:00
|
|
|
|
2025-07-18 11:51:43 +02:00
|
|
|
except Exception as e:
|
2025-07-20 07:24:04 +02:00
|
|
|
logger.error("Failed to query sales data",
|
|
|
|
|
error=str(e),
|
|
|
|
|
tenant_id=tenant_id)
|
2025-07-19 12:09:10 +02:00
|
|
|
raise HTTPException(status_code=500, detail=f"Failed to query sales data: {str(e)}")
|
2025-07-18 11:51:43 +02:00
|
|
|
|
2025-08-08 09:08:41 +02:00
|
|
|
|
|
|
|
|
@router.get("/tenants/{tenant_id}/sales/analytics")
|
|
|
|
|
async def get_sales_analytics(
|
|
|
|
|
tenant_id: UUID = Path(..., description="Tenant ID"),
|
|
|
|
|
start_date: Optional[datetime] = Query(None, description="Start date"),
|
|
|
|
|
end_date: Optional[datetime] = Query(None, description="End date"),
|
|
|
|
|
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
|
|
|
|
sales_service: SalesService = Depends(get_sales_service)
|
|
|
|
|
):
|
|
|
|
|
"""Get sales analytics using repository pattern"""
|
|
|
|
|
try:
|
|
|
|
|
logger.debug("Getting sales analytics with repository pattern",
|
|
|
|
|
tenant_id=tenant_id,
|
|
|
|
|
start_date=start_date,
|
|
|
|
|
end_date=end_date)
|
|
|
|
|
|
|
|
|
|
analytics = await sales_service.get_sales_analytics(
|
|
|
|
|
str(tenant_id), start_date, end_date
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
logger.debug("Analytics generated successfully using repository", tenant_id=tenant_id)
|
|
|
|
|
return analytics
|
|
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error("Failed to generate sales analytics",
|
|
|
|
|
error=str(e),
|
|
|
|
|
tenant_id=tenant_id)
|
|
|
|
|
raise HTTPException(status_code=500, detail=f"Failed to generate analytics: {str(e)}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@router.get("/tenants/{tenant_id}/sales/aggregation")
|
|
|
|
|
async def get_sales_aggregation(
|
|
|
|
|
tenant_id: UUID = Path(..., description="Tenant ID"),
|
|
|
|
|
start_date: Optional[datetime] = Query(None, description="Start date"),
|
|
|
|
|
end_date: Optional[datetime] = Query(None, description="End date"),
|
|
|
|
|
group_by: str = Query("daily", description="Aggregation period: daily, weekly, monthly"),
|
|
|
|
|
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
|
|
|
|
sales_service: SalesService = Depends(get_sales_service)
|
|
|
|
|
):
|
|
|
|
|
"""Get sales aggregation data using repository pattern"""
|
|
|
|
|
try:
|
|
|
|
|
logger.debug("Getting sales aggregation with repository pattern",
|
|
|
|
|
tenant_id=tenant_id,
|
|
|
|
|
group_by=group_by)
|
|
|
|
|
|
|
|
|
|
aggregation = await sales_service.get_sales_aggregation(
|
|
|
|
|
str(tenant_id), start_date, end_date, group_by
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
logger.debug("Aggregation generated successfully using repository",
|
|
|
|
|
tenant_id=tenant_id,
|
|
|
|
|
group_by=group_by)
|
|
|
|
|
return aggregation
|
|
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error("Failed to get sales aggregation",
|
|
|
|
|
error=str(e),
|
|
|
|
|
tenant_id=tenant_id)
|
|
|
|
|
raise HTTPException(status_code=500, detail=f"Failed to get aggregation: {str(e)}")
|
|
|
|
|
|
|
|
|
|
|
2025-07-26 18:46:52 +02:00
|
|
|
@router.post("/tenants/{tenant_id}/sales/import", response_model=SalesImportResult)
|
2025-07-18 11:51:43 +02:00
|
|
|
async def import_sales_data(
|
2025-07-26 18:46:52 +02:00
|
|
|
tenant_id: UUID = Path(..., description="Tenant ID"),
|
2025-07-18 11:51:43 +02:00
|
|
|
file: UploadFile = File(...),
|
2025-07-20 07:24:04 +02:00
|
|
|
file_format: str = Form(...),
|
|
|
|
|
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
2025-08-08 09:08:41 +02:00
|
|
|
import_service: EnhancedDataImportService = Depends(get_import_service)
|
2025-07-18 11:51:43 +02:00
|
|
|
):
|
2025-08-08 09:08:41 +02:00
|
|
|
"""Import sales data using enhanced repository pattern"""
|
2025-07-18 11:51:43 +02:00
|
|
|
try:
|
2025-08-08 09:08:41 +02:00
|
|
|
logger.info("Importing sales data with enhanced repository pattern",
|
2025-07-20 07:24:04 +02:00
|
|
|
tenant_id=tenant_id,
|
|
|
|
|
format=file_format,
|
|
|
|
|
filename=file.filename,
|
|
|
|
|
user_id=current_user["user_id"])
|
2025-07-19 12:09:10 +02:00
|
|
|
|
2025-07-18 11:51:43 +02:00
|
|
|
# Read file content
|
|
|
|
|
content = await file.read()
|
|
|
|
|
file_content = content.decode('utf-8')
|
|
|
|
|
|
2025-08-08 09:08:41 +02:00
|
|
|
# Process using enhanced import service
|
|
|
|
|
result = await import_service.process_import(
|
|
|
|
|
str(tenant_id),
|
2025-07-20 07:24:04 +02:00
|
|
|
file_content,
|
|
|
|
|
file_format,
|
2025-07-23 18:57:27 +02:00
|
|
|
filename=file.filename
|
2025-07-18 11:51:43 +02:00
|
|
|
)
|
|
|
|
|
|
2025-08-08 09:08:41 +02:00
|
|
|
if result.success:
|
2025-07-20 07:24:04 +02:00
|
|
|
# Publish event
|
2025-07-19 12:09:10 +02:00
|
|
|
try:
|
2025-07-20 07:24:04 +02:00
|
|
|
await publish_data_imported({
|
2025-08-08 09:08:41 +02:00
|
|
|
"tenant_id": str(tenant_id),
|
2025-07-20 07:24:04 +02:00
|
|
|
"type": "file_import",
|
2025-07-19 12:09:10 +02:00
|
|
|
"format": file_format,
|
|
|
|
|
"filename": file.filename,
|
2025-08-08 09:08:41 +02:00
|
|
|
"records_created": result.records_created,
|
2025-07-20 07:24:04 +02:00
|
|
|
"imported_by": current_user["user_id"],
|
2025-07-19 12:09:10 +02:00
|
|
|
"timestamp": datetime.utcnow().isoformat()
|
|
|
|
|
})
|
|
|
|
|
except Exception as pub_error:
|
2025-07-20 07:24:04 +02:00
|
|
|
logger.warning("Failed to publish import event", error=str(pub_error))
|
2025-07-19 12:09:10 +02:00
|
|
|
|
2025-08-08 09:08:41 +02:00
|
|
|
logger.info("Import completed with enhanced repository pattern",
|
|
|
|
|
success=result.success,
|
|
|
|
|
records_created=result.records_created,
|
2025-07-20 07:24:04 +02:00
|
|
|
tenant_id=tenant_id)
|
2025-07-18 11:51:43 +02:00
|
|
|
return result
|
|
|
|
|
|
|
|
|
|
except Exception as e:
|
2025-07-20 07:24:04 +02:00
|
|
|
logger.error("Failed to import sales data",
|
|
|
|
|
error=str(e),
|
|
|
|
|
tenant_id=tenant_id)
|
2025-07-19 12:09:10 +02:00
|
|
|
raise HTTPException(status_code=500, detail=f"Failed to import sales data: {str(e)}")
|
2025-07-18 11:51:43 +02:00
|
|
|
|
2025-08-08 09:08:41 +02:00
|
|
|
|
2025-07-26 18:46:52 +02:00
|
|
|
@router.post("/tenants/{tenant_id}/sales/import/validate", response_model=SalesValidationResult)
|
2025-07-18 11:51:43 +02:00
|
|
|
async def validate_import_data(
|
2025-07-26 18:46:52 +02:00
|
|
|
tenant_id: UUID = Path(..., description="Tenant ID"),
|
2025-08-04 08:42:35 +02:00
|
|
|
file: UploadFile = File(..., description="File to validate"),
|
|
|
|
|
file_format: str = Form(default="csv", description="File format: csv, json, excel"),
|
|
|
|
|
validate_only: bool = Form(default=True, description="Only validate, don't import"),
|
|
|
|
|
source: str = Form(default="onboarding_upload", description="Source of the upload"),
|
2025-08-08 09:08:41 +02:00
|
|
|
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
|
|
|
|
import_service: EnhancedDataImportService = Depends(get_import_service)
|
2025-07-18 11:51:43 +02:00
|
|
|
):
|
2025-08-08 09:08:41 +02:00
|
|
|
"""Validate import data using enhanced repository pattern"""
|
2025-07-18 11:51:43 +02:00
|
|
|
try:
|
2025-08-08 09:08:41 +02:00
|
|
|
logger.info("Validating import data with enhanced repository pattern",
|
2025-08-04 08:42:35 +02:00
|
|
|
tenant_id=tenant_id,
|
|
|
|
|
format=file_format,
|
|
|
|
|
filename=file.filename,
|
|
|
|
|
user_id=current_user["user_id"])
|
2025-07-20 07:24:04 +02:00
|
|
|
|
2025-08-08 09:08:41 +02:00
|
|
|
# Read file content
|
2025-08-04 08:42:35 +02:00
|
|
|
content = await file.read()
|
|
|
|
|
file_content = content.decode('utf-8')
|
2025-07-19 12:09:10 +02:00
|
|
|
|
2025-08-08 09:08:41 +02:00
|
|
|
# Create validation data structure
|
2025-08-04 08:42:35 +02:00
|
|
|
validation_data = {
|
2025-08-08 09:08:41 +02:00
|
|
|
"tenant_id": str(tenant_id),
|
|
|
|
|
"data": file_content,
|
|
|
|
|
"data_format": file_format,
|
|
|
|
|
"source": source,
|
|
|
|
|
"validate_only": validate_only
|
2025-08-04 08:42:35 +02:00
|
|
|
}
|
2025-07-19 12:09:10 +02:00
|
|
|
|
2025-08-08 09:08:41 +02:00
|
|
|
# Use enhanced validation service
|
|
|
|
|
validation_result = await import_service.validate_import_data(validation_data)
|
2025-08-04 08:42:35 +02:00
|
|
|
|
2025-08-08 09:08:41 +02:00
|
|
|
logger.info("Validation completed with enhanced repository pattern",
|
|
|
|
|
is_valid=validation_result.is_valid,
|
|
|
|
|
total_records=validation_result.total_records,
|
2025-08-04 08:42:35 +02:00
|
|
|
tenant_id=tenant_id)
|
2025-07-26 18:46:52 +02:00
|
|
|
|
2025-08-04 08:42:35 +02:00
|
|
|
return validation_result
|
2025-07-18 11:51:43 +02:00
|
|
|
|
|
|
|
|
except Exception as e:
|
2025-07-20 07:24:04 +02:00
|
|
|
logger.error("Failed to validate import data",
|
|
|
|
|
error=str(e),
|
|
|
|
|
tenant_id=tenant_id)
|
2025-07-19 12:09:10 +02:00
|
|
|
raise HTTPException(status_code=500, detail=f"Failed to validate import data: {str(e)}")
|
2025-07-18 11:51:43 +02:00
|
|
|
|
|
|
|
|
|
2025-08-08 09:08:41 +02:00
|
|
|
@router.post("/tenants/{tenant_id}/sales/import/validate-json", response_model=SalesValidationResult)
|
|
|
|
|
async def validate_import_data_json(
|
2025-07-26 18:46:52 +02:00
|
|
|
tenant_id: UUID = Path(..., description="Tenant ID"),
|
2025-08-08 09:08:41 +02:00
|
|
|
request: SalesValidationRequest = ...,
|
2025-07-20 07:24:04 +02:00
|
|
|
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
2025-08-08 09:08:41 +02:00
|
|
|
import_service: EnhancedDataImportService = Depends(get_import_service)
|
2025-07-18 11:51:43 +02:00
|
|
|
):
|
2025-08-08 09:08:41 +02:00
|
|
|
"""Validate import data from JSON request for onboarding flow"""
|
|
|
|
|
|
2025-07-18 11:51:43 +02:00
|
|
|
try:
|
2025-08-08 09:08:41 +02:00
|
|
|
logger.info("Starting JSON-based data validation",
|
|
|
|
|
tenant_id=str(tenant_id),
|
|
|
|
|
data_format=request.data_format,
|
|
|
|
|
data_length=len(request.data),
|
|
|
|
|
validate_only=request.validate_only)
|
2025-07-18 11:51:43 +02:00
|
|
|
|
2025-08-08 09:08:41 +02:00
|
|
|
# Create validation data structure
|
|
|
|
|
validation_data = {
|
|
|
|
|
"tenant_id": str(tenant_id),
|
|
|
|
|
"data": request.data, # Fixed: use 'data' not 'content'
|
|
|
|
|
"data_format": request.data_format,
|
|
|
|
|
"filename": f"onboarding_data.{request.data_format}",
|
|
|
|
|
"source": request.source,
|
|
|
|
|
"validate_only": request.validate_only
|
|
|
|
|
}
|
2025-07-18 11:51:43 +02:00
|
|
|
|
2025-08-08 09:08:41 +02:00
|
|
|
# Use enhanced validation service
|
|
|
|
|
validation_result = await import_service.validate_import_data(validation_data)
|
|
|
|
|
|
|
|
|
|
logger.info("JSON validation completed",
|
|
|
|
|
is_valid=validation_result.is_valid,
|
|
|
|
|
total_records=validation_result.total_records,
|
|
|
|
|
tenant_id=tenant_id)
|
|
|
|
|
|
|
|
|
|
return validation_result
|
2025-07-18 11:51:43 +02:00
|
|
|
|
|
|
|
|
except Exception as e:
|
2025-08-08 09:08:41 +02:00
|
|
|
logger.error("Failed to validate JSON import data",
|
2025-07-20 07:24:04 +02:00
|
|
|
error=str(e),
|
|
|
|
|
tenant_id=tenant_id)
|
2025-08-08 09:08:41 +02:00
|
|
|
raise HTTPException(status_code=500, detail=f"Failed to validate import data: {str(e)}")
|
|
|
|
|
|
2025-07-18 11:51:43 +02:00
|
|
|
|
2025-07-26 18:46:52 +02:00
|
|
|
@router.post("/tenants/{tenant_id}/sales/export")
|
2025-07-19 12:09:10 +02:00
|
|
|
async def export_sales_data(
|
2025-07-26 18:46:52 +02:00
|
|
|
tenant_id: UUID = Path(..., description="Tenant ID"),
|
2025-07-19 12:09:10 +02:00
|
|
|
export_format: str = Query("csv", description="Export format: csv, excel, json"),
|
|
|
|
|
start_date: Optional[datetime] = Query(None, description="Start date"),
|
|
|
|
|
end_date: Optional[datetime] = Query(None, description="End date"),
|
|
|
|
|
products: Optional[List[str]] = Query(None, description="Filter by products"),
|
2025-07-20 07:24:04 +02:00
|
|
|
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
2025-08-08 09:08:41 +02:00
|
|
|
sales_service: SalesService = Depends(get_sales_service)
|
2025-07-18 11:51:43 +02:00
|
|
|
):
|
2025-08-08 09:08:41 +02:00
|
|
|
"""Export sales data using repository pattern"""
|
2025-07-18 11:51:43 +02:00
|
|
|
try:
|
2025-08-08 09:08:41 +02:00
|
|
|
logger.info("Exporting sales data with repository pattern",
|
2025-07-20 07:24:04 +02:00
|
|
|
tenant_id=tenant_id,
|
|
|
|
|
format=export_format,
|
|
|
|
|
user_id=current_user["user_id"])
|
2025-07-18 11:51:43 +02:00
|
|
|
|
2025-08-08 09:08:41 +02:00
|
|
|
export_result = await sales_service.export_sales_data(
|
|
|
|
|
str(tenant_id), export_format, start_date, end_date, products
|
2025-07-18 11:51:43 +02:00
|
|
|
)
|
|
|
|
|
|
2025-07-19 12:09:10 +02:00
|
|
|
if not export_result:
|
|
|
|
|
raise HTTPException(status_code=404, detail="No data found for export")
|
2025-07-18 11:51:43 +02:00
|
|
|
|
2025-07-20 07:24:04 +02:00
|
|
|
# Publish export event
|
|
|
|
|
try:
|
|
|
|
|
await publish_export_completed({
|
2025-08-08 09:08:41 +02:00
|
|
|
"tenant_id": str(tenant_id),
|
2025-07-20 07:24:04 +02:00
|
|
|
"format": export_format,
|
|
|
|
|
"exported_by": current_user["user_id"],
|
|
|
|
|
"record_count": export_result.get("record_count", 0),
|
|
|
|
|
"timestamp": datetime.utcnow().isoformat()
|
|
|
|
|
})
|
|
|
|
|
except Exception as pub_error:
|
|
|
|
|
logger.warning("Failed to publish export event", error=str(pub_error))
|
|
|
|
|
|
2025-08-08 09:08:41 +02:00
|
|
|
logger.info("Export completed successfully using repository",
|
2025-07-20 07:24:04 +02:00
|
|
|
tenant_id=tenant_id,
|
|
|
|
|
format=export_format)
|
2025-07-18 11:51:43 +02:00
|
|
|
|
2025-07-19 12:09:10 +02:00
|
|
|
return StreamingResponse(
|
|
|
|
|
iter([export_result["content"]]),
|
|
|
|
|
media_type=export_result["media_type"],
|
|
|
|
|
headers={"Content-Disposition": f"attachment; filename={export_result['filename']}"}
|
2025-07-18 11:51:43 +02:00
|
|
|
)
|
|
|
|
|
|
2025-07-19 12:09:10 +02:00
|
|
|
except HTTPException:
|
|
|
|
|
raise
|
2025-07-18 11:51:43 +02:00
|
|
|
except Exception as e:
|
2025-07-20 07:24:04 +02:00
|
|
|
logger.error("Failed to export sales data",
|
|
|
|
|
error=str(e),
|
|
|
|
|
tenant_id=tenant_id)
|
2025-07-19 12:09:10 +02:00
|
|
|
raise HTTPException(status_code=500, detail=f"Failed to export sales data: {str(e)}")
|
2025-07-18 11:51:43 +02:00
|
|
|
|
2025-08-08 09:08:41 +02:00
|
|
|
|
2025-07-26 18:46:52 +02:00
|
|
|
@router.delete("/tenants/{tenant_id}/sales/{record_id}")
|
2025-07-19 12:09:10 +02:00
|
|
|
async def delete_sales_record(
|
2025-07-26 18:46:52 +02:00
|
|
|
tenant_id: UUID = Path(..., description="Tenant ID"),
|
|
|
|
|
record_id: str = Path(..., description="Sales record ID"),
|
2025-07-20 07:24:04 +02:00
|
|
|
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
2025-08-08 09:08:41 +02:00
|
|
|
sales_service: SalesService = Depends(get_sales_service)
|
2025-07-18 11:51:43 +02:00
|
|
|
):
|
2025-08-08 09:08:41 +02:00
|
|
|
"""Delete a sales record using repository pattern"""
|
2025-07-18 11:51:43 +02:00
|
|
|
try:
|
2025-08-08 09:08:41 +02:00
|
|
|
logger.info("Deleting sales record with repository pattern",
|
2025-07-20 07:24:04 +02:00
|
|
|
record_id=record_id,
|
|
|
|
|
tenant_id=tenant_id,
|
|
|
|
|
user_id=current_user["user_id"])
|
|
|
|
|
|
2025-08-08 09:08:41 +02:00
|
|
|
success = await sales_service.delete_sales_record(record_id, str(tenant_id))
|
2025-07-18 11:51:43 +02:00
|
|
|
|
2025-07-19 12:09:10 +02:00
|
|
|
if not success:
|
|
|
|
|
raise HTTPException(status_code=404, detail="Sales record not found")
|
2025-07-18 11:51:43 +02:00
|
|
|
|
2025-08-08 09:08:41 +02:00
|
|
|
logger.info("Sales record deleted successfully using repository",
|
2025-07-20 07:24:04 +02:00
|
|
|
record_id=record_id,
|
|
|
|
|
tenant_id=tenant_id)
|
2025-07-19 12:09:10 +02:00
|
|
|
return {"status": "success", "message": "Sales record deleted successfully"}
|
2025-07-18 11:51:43 +02:00
|
|
|
|
2025-07-19 12:09:10 +02:00
|
|
|
except HTTPException:
|
|
|
|
|
raise
|
2025-07-18 11:51:43 +02:00
|
|
|
except Exception as e:
|
2025-07-20 07:24:04 +02:00
|
|
|
logger.error("Failed to delete sales record",
|
|
|
|
|
error=str(e),
|
|
|
|
|
tenant_id=tenant_id)
|
|
|
|
|
raise HTTPException(status_code=500, detail=f"Failed to delete sales record: {str(e)}")
|
|
|
|
|
|
|
|
|
|
|
2025-07-26 18:46:52 +02:00
|
|
|
@router.get("/tenants/{tenant_id}/sales/products")
|
2025-07-20 07:24:04 +02:00
|
|
|
async def get_products_list(
|
2025-07-26 18:46:52 +02:00
|
|
|
tenant_id: UUID = Path(..., description="Tenant ID"),
|
2025-07-20 07:24:04 +02:00
|
|
|
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
2025-08-08 09:08:41 +02:00
|
|
|
sales_service: SalesService = Depends(get_sales_service)
|
2025-07-20 07:24:04 +02:00
|
|
|
):
|
2025-08-08 09:08:41 +02:00
|
|
|
"""Get list of products using repository pattern"""
|
2025-07-20 07:24:04 +02:00
|
|
|
try:
|
2025-08-08 09:08:41 +02:00
|
|
|
logger.debug("Getting products list with repository pattern", tenant_id=tenant_id)
|
2025-07-20 07:24:04 +02:00
|
|
|
|
2025-08-08 09:08:41 +02:00
|
|
|
products = await sales_service.get_products_list(str(tenant_id))
|
2025-07-20 07:24:04 +02:00
|
|
|
|
2025-08-08 09:08:41 +02:00
|
|
|
logger.debug("Products list retrieved using repository",
|
2025-07-20 07:24:04 +02:00
|
|
|
count=len(products),
|
|
|
|
|
tenant_id=tenant_id)
|
|
|
|
|
return products
|
|
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error("Failed to get products list",
|
|
|
|
|
error=str(e),
|
|
|
|
|
tenant_id=tenant_id)
|
2025-07-26 18:46:52 +02:00
|
|
|
raise HTTPException(status_code=500, detail=f"Failed to get products list: {str(e)}")
|
|
|
|
|
|
|
|
|
|
|
2025-08-08 09:08:41 +02:00
|
|
|
@router.get("/tenants/{tenant_id}/sales/statistics")
|
|
|
|
|
async def get_sales_statistics(
|
2025-07-26 18:46:52 +02:00
|
|
|
tenant_id: UUID = Path(..., description="Tenant ID"),
|
|
|
|
|
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
2025-08-08 09:08:41 +02:00
|
|
|
sales_service: SalesService = Depends(get_sales_service)
|
2025-07-26 18:46:52 +02:00
|
|
|
):
|
2025-08-08 09:08:41 +02:00
|
|
|
"""Get comprehensive sales statistics using repository pattern"""
|
2025-07-26 18:46:52 +02:00
|
|
|
try:
|
2025-08-08 09:08:41 +02:00
|
|
|
logger.debug("Getting sales statistics with repository pattern", tenant_id=tenant_id)
|
2025-07-26 18:46:52 +02:00
|
|
|
|
2025-08-08 09:08:41 +02:00
|
|
|
# Get analytics which includes comprehensive statistics
|
|
|
|
|
analytics = await sales_service.get_sales_analytics(str(tenant_id))
|
2025-07-26 18:46:52 +02:00
|
|
|
|
2025-08-08 09:08:41 +02:00
|
|
|
# Create enhanced statistics response
|
|
|
|
|
statistics = {
|
|
|
|
|
"tenant_id": str(tenant_id),
|
|
|
|
|
"analytics": analytics,
|
|
|
|
|
"generated_at": datetime.utcnow().isoformat()
|
|
|
|
|
}
|
2025-07-26 18:46:52 +02:00
|
|
|
|
2025-08-08 09:08:41 +02:00
|
|
|
logger.debug("Sales statistics retrieved using repository", tenant_id=tenant_id)
|
|
|
|
|
return statistics
|
2025-07-26 18:46:52 +02:00
|
|
|
|
|
|
|
|
except Exception as e:
|
2025-08-08 09:08:41 +02:00
|
|
|
logger.error("Failed to get sales statistics",
|
2025-07-26 18:46:52 +02:00
|
|
|
error=str(e),
|
2025-08-08 09:08:41 +02:00
|
|
|
tenant_id=tenant_id)
|
|
|
|
|
raise HTTPException(status_code=500, detail=f"Failed to get statistics: {str(e)}")
|