REFACTOR data service
This commit is contained in:
33
services/sales/Dockerfile
Normal file
33
services/sales/Dockerfile
Normal file
@@ -0,0 +1,33 @@
|
||||
# services/sales/Dockerfile
|
||||
FROM python:3.11-slim
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install system dependencies
|
||||
RUN apt-get update && apt-get install -y \
|
||||
gcc \
|
||||
g++ \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy requirements and install Python dependencies
|
||||
COPY services/sales/requirements.txt .
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy shared modules first
|
||||
COPY shared/ /app/shared/
|
||||
|
||||
# Copy application code
|
||||
COPY services/sales/app/ /app/app/
|
||||
|
||||
# Set Python path to include shared modules
|
||||
ENV PYTHONPATH=/app
|
||||
|
||||
# Expose port
|
||||
EXPOSE 8000
|
||||
|
||||
# Health check
|
||||
HEALTHCHECK --interval=30s --timeout=10s --start-period=60s --retries=3 \
|
||||
CMD python -c "import requests; requests.get('http://localhost:8000/health', timeout=5)" || exit 1
|
||||
|
||||
# Run the application
|
||||
CMD ["python", "-m", "uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]
|
||||
1
services/sales/app/__init__.py
Normal file
1
services/sales/app/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# services/sales/app/__init__.py
|
||||
1
services/sales/app/api/__init__.py
Normal file
1
services/sales/app/api/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# services/sales/app/api/__init__.py
|
||||
397
services/sales/app/api/import_data.py
Normal file
397
services/sales/app/api/import_data.py
Normal file
@@ -0,0 +1,397 @@
|
||||
# services/sales/app/api/import_data.py
|
||||
"""
|
||||
Sales Data Import API Endpoints
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, UploadFile, File, Form, Path
|
||||
from typing import Dict, Any, Optional
|
||||
from uuid import UUID
|
||||
import structlog
|
||||
import json
|
||||
|
||||
from app.services.data_import_service import DataImportService
|
||||
from shared.auth.decorators import get_current_user_dep, get_current_tenant_id_dep
|
||||
|
||||
router = APIRouter(tags=["data-import"])
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
def get_import_service():
|
||||
"""Dependency injection for DataImportService"""
|
||||
return DataImportService()
|
||||
|
||||
|
||||
@router.post("/tenants/{tenant_id}/sales/import/validate-json")
|
||||
async def validate_json_data(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
data: Dict[str, Any] = None,
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
current_tenant: str = Depends(get_current_tenant_id_dep),
|
||||
import_service: DataImportService = Depends(get_import_service)
|
||||
):
|
||||
"""Validate JSON sales data"""
|
||||
try:
|
||||
# Verify tenant access
|
||||
if str(tenant_id) != current_tenant:
|
||||
raise HTTPException(status_code=403, detail="Access denied to this tenant")
|
||||
|
||||
if not data:
|
||||
raise HTTPException(status_code=400, detail="No data provided")
|
||||
|
||||
logger.info("Validating JSON data", tenant_id=tenant_id, record_count=len(data.get("records", [])))
|
||||
|
||||
# Validate the data - handle different input formats
|
||||
if "records" in data:
|
||||
# New format with records array
|
||||
validation_data = {
|
||||
"tenant_id": str(tenant_id),
|
||||
"data": json.dumps(data.get("records", [])),
|
||||
"data_format": "json"
|
||||
}
|
||||
else:
|
||||
# Legacy format where the entire payload is the validation data
|
||||
validation_data = data.copy()
|
||||
validation_data["tenant_id"] = str(tenant_id)
|
||||
if "data_format" not in validation_data:
|
||||
validation_data["data_format"] = "json"
|
||||
|
||||
validation_result = await import_service.validate_import_data(validation_data)
|
||||
|
||||
logger.info("JSON validation completed", tenant_id=tenant_id, valid=validation_result.is_valid)
|
||||
|
||||
return {
|
||||
"is_valid": validation_result.is_valid,
|
||||
"total_records": validation_result.total_records,
|
||||
"valid_records": validation_result.valid_records,
|
||||
"invalid_records": validation_result.invalid_records,
|
||||
"errors": validation_result.errors,
|
||||
"warnings": validation_result.warnings,
|
||||
"summary": validation_result.summary
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to validate JSON data", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to validate data: {str(e)}")
|
||||
|
||||
|
||||
@router.post("/tenants/{tenant_id}/sales/import/validate")
|
||||
async def validate_sales_data_universal(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
file: Optional[UploadFile] = File(None),
|
||||
data: Optional[Dict[str, Any]] = None,
|
||||
file_format: Optional[str] = Form(None),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
current_tenant: str = Depends(get_current_tenant_id_dep),
|
||||
import_service: DataImportService = Depends(get_import_service)
|
||||
):
|
||||
"""Universal validation endpoint for sales data - supports files and JSON"""
|
||||
try:
|
||||
# Verify tenant access
|
||||
if str(tenant_id) != current_tenant:
|
||||
raise HTTPException(status_code=403, detail="Access denied to this tenant")
|
||||
|
||||
# Handle file upload validation
|
||||
if file:
|
||||
logger.info("Validating uploaded file", tenant_id=tenant_id, filename=file.filename)
|
||||
|
||||
# Auto-detect format from filename
|
||||
filename = file.filename.lower()
|
||||
if filename.endswith('.csv'):
|
||||
detected_format = 'csv'
|
||||
elif filename.endswith('.xlsx') or filename.endswith('.xls'):
|
||||
detected_format = 'excel'
|
||||
elif filename.endswith('.json'):
|
||||
detected_format = 'json'
|
||||
else:
|
||||
detected_format = file_format or 'csv' # Default to CSV
|
||||
|
||||
# Read file content
|
||||
content = await file.read()
|
||||
|
||||
if detected_format in ['xlsx', 'xls', 'excel']:
|
||||
# For Excel files, encode as base64
|
||||
import base64
|
||||
file_content = base64.b64encode(content).decode('utf-8')
|
||||
else:
|
||||
# For CSV/JSON, decode as text
|
||||
file_content = content.decode('utf-8')
|
||||
|
||||
validation_data = {
|
||||
"tenant_id": str(tenant_id),
|
||||
"data": file_content,
|
||||
"data_format": detected_format,
|
||||
"filename": file.filename
|
||||
}
|
||||
|
||||
# Handle JSON data validation
|
||||
elif data:
|
||||
logger.info("Validating JSON data", tenant_id=tenant_id)
|
||||
|
||||
validation_data = data.copy()
|
||||
validation_data["tenant_id"] = str(tenant_id)
|
||||
if "data_format" not in validation_data:
|
||||
validation_data["data_format"] = "json"
|
||||
|
||||
else:
|
||||
raise HTTPException(status_code=400, detail="No file or data provided for validation")
|
||||
|
||||
# Perform validation
|
||||
validation_result = await import_service.validate_import_data(validation_data)
|
||||
|
||||
logger.info("Validation completed",
|
||||
tenant_id=tenant_id,
|
||||
valid=validation_result.is_valid,
|
||||
total_records=validation_result.total_records)
|
||||
|
||||
return {
|
||||
"is_valid": validation_result.is_valid,
|
||||
"total_records": validation_result.total_records,
|
||||
"valid_records": validation_result.valid_records,
|
||||
"invalid_records": validation_result.invalid_records,
|
||||
"errors": validation_result.errors,
|
||||
"warnings": validation_result.warnings,
|
||||
"summary": validation_result.summary,
|
||||
"message": "Validation completed successfully" if validation_result.is_valid else "Validation found errors",
|
||||
"details": {
|
||||
"total_records": validation_result.total_records,
|
||||
"format": validation_data.get("data_format", "unknown")
|
||||
}
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to validate sales data", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to validate data: {str(e)}")
|
||||
|
||||
|
||||
@router.post("/tenants/{tenant_id}/sales/import/validate-csv")
|
||||
async def validate_csv_data_legacy(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
file: UploadFile = File(...),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
current_tenant: str = Depends(get_current_tenant_id_dep),
|
||||
import_service: DataImportService = Depends(get_import_service)
|
||||
):
|
||||
"""Legacy CSV validation endpoint - redirects to universal validator"""
|
||||
return await validate_sales_data_universal(
|
||||
tenant_id=tenant_id,
|
||||
file=file,
|
||||
current_user=current_user,
|
||||
current_tenant=current_tenant,
|
||||
import_service=import_service
|
||||
)
|
||||
|
||||
|
||||
@router.post("/tenants/{tenant_id}/sales/import")
|
||||
async def import_sales_data(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
data: Optional[Dict[str, Any]] = None,
|
||||
file: Optional[UploadFile] = File(None),
|
||||
file_format: Optional[str] = Form(None),
|
||||
update_existing: bool = Form(False, description="Whether to update existing records"),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
current_tenant: str = Depends(get_current_tenant_id_dep),
|
||||
import_service: DataImportService = Depends(get_import_service)
|
||||
):
|
||||
"""Enhanced import sales data - supports multiple file formats and JSON"""
|
||||
try:
|
||||
# Verify tenant access
|
||||
if str(tenant_id) != current_tenant:
|
||||
raise HTTPException(status_code=403, detail="Access denied to this tenant")
|
||||
|
||||
# Handle file upload (form data)
|
||||
if file:
|
||||
if not file.filename:
|
||||
raise HTTPException(status_code=400, detail="No file provided")
|
||||
|
||||
logger.info("Starting enhanced file import", tenant_id=tenant_id, filename=file.filename)
|
||||
|
||||
# Auto-detect format from filename
|
||||
filename = file.filename.lower()
|
||||
if filename.endswith('.csv'):
|
||||
detected_format = 'csv'
|
||||
elif filename.endswith('.xlsx') or filename.endswith('.xls'):
|
||||
detected_format = 'excel'
|
||||
elif filename.endswith('.json'):
|
||||
detected_format = 'json'
|
||||
else:
|
||||
detected_format = file_format or 'csv' # Default to CSV
|
||||
|
||||
# Read file content
|
||||
content = await file.read()
|
||||
|
||||
if detected_format in ['xlsx', 'xls', 'excel']:
|
||||
# For Excel files, encode as base64
|
||||
import base64
|
||||
file_content = base64.b64encode(content).decode('utf-8')
|
||||
else:
|
||||
# For CSV/JSON, decode as text
|
||||
file_content = content.decode('utf-8')
|
||||
|
||||
# Import the file using enhanced service
|
||||
import_result = await import_service.process_import(
|
||||
str(tenant_id), # Ensure string type
|
||||
file_content,
|
||||
detected_format,
|
||||
filename=file.filename
|
||||
)
|
||||
|
||||
# Handle JSON data
|
||||
elif data:
|
||||
logger.info("Starting enhanced JSON data import", tenant_id=tenant_id, record_count=len(data.get("records", [])))
|
||||
|
||||
# Import the data - handle different input formats
|
||||
if "records" in data:
|
||||
# New format with records array
|
||||
records_json = json.dumps(data.get("records", []))
|
||||
import_result = await import_service.process_import(
|
||||
str(tenant_id),
|
||||
records_json,
|
||||
"json"
|
||||
)
|
||||
else:
|
||||
# Legacy format - data field contains the data directly
|
||||
import_result = await import_service.process_import(
|
||||
str(tenant_id),
|
||||
data.get("data", ""),
|
||||
data.get("data_format", "json")
|
||||
)
|
||||
else:
|
||||
raise HTTPException(status_code=400, detail="No data or file provided")
|
||||
|
||||
logger.info("Enhanced import completed",
|
||||
tenant_id=tenant_id,
|
||||
created=import_result.records_created,
|
||||
updated=import_result.records_updated,
|
||||
failed=import_result.records_failed,
|
||||
processing_time=import_result.processing_time_seconds)
|
||||
|
||||
# Return enhanced response matching frontend expectations
|
||||
response = {
|
||||
"success": import_result.success,
|
||||
"records_processed": import_result.records_processed,
|
||||
"records_created": import_result.records_created,
|
||||
"records_updated": import_result.records_updated,
|
||||
"records_failed": import_result.records_failed,
|
||||
"errors": import_result.errors,
|
||||
"warnings": import_result.warnings,
|
||||
"processing_time_seconds": import_result.processing_time_seconds,
|
||||
"records_imported": import_result.records_created, # Frontend compatibility
|
||||
"message": f"Successfully imported {import_result.records_created} records" if import_result.success else "Import completed with errors"
|
||||
}
|
||||
|
||||
# Add file-specific information if available
|
||||
if file:
|
||||
response["file_info"] = {
|
||||
"name": file.filename,
|
||||
"format": detected_format,
|
||||
"size_bytes": len(content) if 'content' in locals() else 0
|
||||
}
|
||||
|
||||
return response
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to import sales data", error=str(e), tenant_id=tenant_id, exc_info=True)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to import data: {str(e)}")
|
||||
|
||||
|
||||
@router.post("/tenants/{tenant_id}/sales/import/csv")
|
||||
async def import_csv_data(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
file: UploadFile = File(...),
|
||||
update_existing: bool = Form(False, description="Whether to update existing records"),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
current_tenant: str = Depends(get_current_tenant_id_dep),
|
||||
import_service: DataImportService = Depends(get_import_service)
|
||||
):
|
||||
"""Import CSV sales data file"""
|
||||
try:
|
||||
# Verify tenant access
|
||||
if str(tenant_id) != current_tenant:
|
||||
raise HTTPException(status_code=403, detail="Access denied to this tenant")
|
||||
|
||||
if not file.filename.endswith('.csv'):
|
||||
raise HTTPException(status_code=400, detail="File must be a CSV file")
|
||||
|
||||
logger.info("Starting CSV data import", tenant_id=tenant_id, filename=file.filename)
|
||||
|
||||
# Read file content
|
||||
content = await file.read()
|
||||
file_content = content.decode('utf-8')
|
||||
|
||||
# Import the data
|
||||
import_result = await import_service.process_import(
|
||||
tenant_id,
|
||||
file_content,
|
||||
"csv",
|
||||
filename=file.filename
|
||||
)
|
||||
|
||||
logger.info("CSV import completed",
|
||||
tenant_id=tenant_id,
|
||||
filename=file.filename,
|
||||
created=import_result.records_created,
|
||||
updated=import_result.records_updated,
|
||||
failed=import_result.records_failed)
|
||||
|
||||
return {
|
||||
"success": import_result.success,
|
||||
"records_processed": import_result.records_processed,
|
||||
"records_created": import_result.records_created,
|
||||
"records_updated": import_result.records_updated,
|
||||
"records_failed": import_result.records_failed,
|
||||
"errors": import_result.errors,
|
||||
"warnings": import_result.warnings,
|
||||
"processing_time_seconds": import_result.processing_time_seconds
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to import CSV data", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to import CSV data: {str(e)}")
|
||||
|
||||
|
||||
@router.get("/tenants/{tenant_id}/sales/import/template")
|
||||
async def get_import_template(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
format: str = "csv",
|
||||
current_tenant: str = Depends(get_current_tenant_id_dep)
|
||||
):
|
||||
"""Get sales data import template"""
|
||||
try:
|
||||
# Verify tenant access
|
||||
if str(tenant_id) != current_tenant:
|
||||
raise HTTPException(status_code=403, detail="Access denied to this tenant")
|
||||
|
||||
if format not in ["csv", "json"]:
|
||||
raise HTTPException(status_code=400, detail="Format must be 'csv' or 'json'")
|
||||
|
||||
if format == "csv":
|
||||
template = "date,product_name,product_category,product_sku,quantity_sold,unit_price,revenue,cost_of_goods,discount_applied,location_id,sales_channel,source,notes,weather_condition,is_holiday,is_weekend"
|
||||
else:
|
||||
template = {
|
||||
"records": [
|
||||
{
|
||||
"date": "2024-01-01T10:00:00Z",
|
||||
"product_name": "Sample Product",
|
||||
"product_category": "Sample Category",
|
||||
"product_sku": "SAMPLE001",
|
||||
"quantity_sold": 1,
|
||||
"unit_price": 10.50,
|
||||
"revenue": 10.50,
|
||||
"cost_of_goods": 5.25,
|
||||
"discount_applied": 0.0,
|
||||
"location_id": "LOC001",
|
||||
"sales_channel": "in_store",
|
||||
"source": "manual",
|
||||
"notes": "Sample sales record",
|
||||
"weather_condition": "sunny",
|
||||
"is_holiday": False,
|
||||
"is_weekend": False
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
return {"template": template, "format": format}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get import template", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get import template: {str(e)}")
|
||||
325
services/sales/app/api/sales.py
Normal file
325
services/sales/app/api/sales.py
Normal file
@@ -0,0 +1,325 @@
|
||||
# services/sales/app/api/sales.py
|
||||
"""
|
||||
Sales API Endpoints
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, Path
|
||||
from typing import List, Optional, Dict, Any
|
||||
from uuid import UUID
|
||||
from datetime import datetime
|
||||
import structlog
|
||||
|
||||
from app.schemas.sales import (
|
||||
SalesDataCreate,
|
||||
SalesDataUpdate,
|
||||
SalesDataResponse,
|
||||
SalesDataQuery
|
||||
)
|
||||
from app.services.sales_service import SalesService
|
||||
from shared.auth.decorators import get_current_user_dep, get_current_tenant_id_dep
|
||||
|
||||
router = APIRouter(tags=["sales"])
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
def get_sales_service():
|
||||
"""Dependency injection for SalesService"""
|
||||
return SalesService()
|
||||
|
||||
@router.get("/tenants/{tenant_id}/sales/products")
|
||||
async def get_products_list(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
sales_service: SalesService = Depends(get_sales_service)
|
||||
):
|
||||
"""Get list of products using repository pattern"""
|
||||
try:
|
||||
logger.debug("Getting products list with repository pattern", tenant_id=tenant_id)
|
||||
|
||||
products = await sales_service.get_products_list(str(tenant_id))
|
||||
|
||||
logger.debug("Products list retrieved using repository",
|
||||
count=len(products),
|
||||
tenant_id=tenant_id)
|
||||
return products
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get products list",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get products list: {str(e)}")
|
||||
|
||||
@router.post("/tenants/{tenant_id}/sales", response_model=SalesDataResponse)
|
||||
async def create_sales_record(
|
||||
sales_data: SalesDataCreate,
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
current_tenant: str = Depends(get_current_tenant_id_dep),
|
||||
sales_service: SalesService = Depends(get_sales_service)
|
||||
):
|
||||
"""Create a new sales record"""
|
||||
try:
|
||||
# Verify tenant access
|
||||
if str(tenant_id) != current_tenant:
|
||||
raise HTTPException(status_code=403, detail="Access denied to this tenant")
|
||||
|
||||
logger.info(
|
||||
"Creating sales record",
|
||||
product=sales_data.product_name,
|
||||
quantity=sales_data.quantity_sold,
|
||||
tenant_id=tenant_id,
|
||||
user_id=current_user.get("user_id")
|
||||
)
|
||||
|
||||
# Create the record
|
||||
record = await sales_service.create_sales_record(
|
||||
sales_data,
|
||||
tenant_id,
|
||||
user_id=UUID(current_user["user_id"]) if current_user.get("user_id") else None
|
||||
)
|
||||
|
||||
logger.info("Successfully created sales record", record_id=record.id, tenant_id=tenant_id)
|
||||
return record
|
||||
|
||||
except ValueError as ve:
|
||||
logger.warning("Validation error creating sales record", error=str(ve), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=400, detail=str(ve))
|
||||
except Exception as e:
|
||||
logger.error("Failed to create sales record", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to create sales record: {str(e)}")
|
||||
|
||||
|
||||
@router.get("/tenants/{tenant_id}/sales", response_model=List[SalesDataResponse])
|
||||
async def get_sales_records(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
start_date: Optional[datetime] = Query(None, description="Start date filter"),
|
||||
end_date: Optional[datetime] = Query(None, description="End date filter"),
|
||||
product_name: Optional[str] = Query(None, description="Product name filter"),
|
||||
product_category: Optional[str] = Query(None, description="Product category filter"),
|
||||
location_id: Optional[str] = Query(None, description="Location filter"),
|
||||
sales_channel: Optional[str] = Query(None, description="Sales channel filter"),
|
||||
source: Optional[str] = Query(None, description="Data source filter"),
|
||||
is_validated: Optional[bool] = Query(None, description="Validation status filter"),
|
||||
limit: int = Query(50, ge=1, le=1000, description="Number of records to return"),
|
||||
offset: int = Query(0, ge=0, description="Number of records to skip"),
|
||||
order_by: str = Query("date", description="Field to order by"),
|
||||
order_direction: str = Query("desc", description="Order direction (asc/desc)"),
|
||||
current_tenant: str = Depends(get_current_tenant_id_dep),
|
||||
sales_service: SalesService = Depends(get_sales_service)
|
||||
):
|
||||
"""Get sales records for a tenant with filtering and pagination"""
|
||||
try:
|
||||
# Verify tenant access
|
||||
if str(tenant_id) != current_tenant:
|
||||
raise HTTPException(status_code=403, detail="Access denied to this tenant")
|
||||
|
||||
# Build query parameters
|
||||
query_params = SalesDataQuery(
|
||||
start_date=start_date,
|
||||
end_date=end_date,
|
||||
product_name=product_name,
|
||||
product_category=product_category,
|
||||
location_id=location_id,
|
||||
sales_channel=sales_channel,
|
||||
source=source,
|
||||
is_validated=is_validated,
|
||||
limit=limit,
|
||||
offset=offset,
|
||||
order_by=order_by,
|
||||
order_direction=order_direction
|
||||
)
|
||||
|
||||
records = await sales_service.get_sales_records(tenant_id, query_params)
|
||||
|
||||
logger.info("Retrieved sales records", count=len(records), tenant_id=tenant_id)
|
||||
return records
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get sales records", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get sales records: {str(e)}")
|
||||
|
||||
|
||||
|
||||
|
||||
@router.get("/tenants/{tenant_id}/sales/analytics/summary")
|
||||
async def get_sales_analytics(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
start_date: Optional[datetime] = Query(None, description="Start date filter"),
|
||||
end_date: Optional[datetime] = Query(None, description="End date filter"),
|
||||
current_tenant: str = Depends(get_current_tenant_id_dep),
|
||||
sales_service: SalesService = Depends(get_sales_service)
|
||||
):
|
||||
"""Get sales analytics summary for a tenant"""
|
||||
try:
|
||||
# Verify tenant access
|
||||
if str(tenant_id) != current_tenant:
|
||||
raise HTTPException(status_code=403, detail="Access denied to this tenant")
|
||||
|
||||
analytics = await sales_service.get_sales_analytics(tenant_id, start_date, end_date)
|
||||
|
||||
logger.info("Retrieved sales analytics", tenant_id=tenant_id)
|
||||
return analytics
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get sales analytics", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get sales analytics: {str(e)}")
|
||||
|
||||
|
||||
@router.get("/tenants/{tenant_id}/products/{product_name}/sales", response_model=List[SalesDataResponse])
|
||||
async def get_product_sales(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
product_name: str = Path(..., description="Product name"),
|
||||
start_date: Optional[datetime] = Query(None, description="Start date filter"),
|
||||
end_date: Optional[datetime] = Query(None, description="End date filter"),
|
||||
current_tenant: str = Depends(get_current_tenant_id_dep),
|
||||
sales_service: SalesService = Depends(get_sales_service)
|
||||
):
|
||||
"""Get sales records for a specific product"""
|
||||
try:
|
||||
# Verify tenant access
|
||||
if str(tenant_id) != current_tenant:
|
||||
raise HTTPException(status_code=403, detail="Access denied to this tenant")
|
||||
|
||||
records = await sales_service.get_product_sales(tenant_id, product_name, start_date, end_date)
|
||||
|
||||
logger.info("Retrieved product sales", count=len(records), product=product_name, tenant_id=tenant_id)
|
||||
return records
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get product sales", error=str(e), tenant_id=tenant_id, product=product_name)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get product sales: {str(e)}")
|
||||
|
||||
|
||||
@router.get("/tenants/{tenant_id}/sales/categories", response_model=List[str])
|
||||
async def get_product_categories(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
current_tenant: str = Depends(get_current_tenant_id_dep),
|
||||
sales_service: SalesService = Depends(get_sales_service)
|
||||
):
|
||||
"""Get distinct product categories from sales data"""
|
||||
try:
|
||||
# Verify tenant access
|
||||
if str(tenant_id) != current_tenant:
|
||||
raise HTTPException(status_code=403, detail="Access denied to this tenant")
|
||||
|
||||
categories = await sales_service.get_product_categories(tenant_id)
|
||||
|
||||
return categories
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get product categories", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get product categories: {str(e)}")
|
||||
|
||||
|
||||
# ================================================================
|
||||
# PARAMETERIZED ROUTES - Keep these at the end to avoid conflicts
|
||||
# ================================================================
|
||||
|
||||
@router.get("/tenants/{tenant_id}/sales/{record_id}", response_model=SalesDataResponse)
|
||||
async def get_sales_record(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
record_id: UUID = Path(..., description="Sales record ID"),
|
||||
current_tenant: str = Depends(get_current_tenant_id_dep),
|
||||
sales_service: SalesService = Depends(get_sales_service)
|
||||
):
|
||||
"""Get a specific sales record"""
|
||||
try:
|
||||
# Verify tenant access
|
||||
if str(tenant_id) != current_tenant:
|
||||
raise HTTPException(status_code=403, detail="Access denied to this tenant")
|
||||
|
||||
record = await sales_service.get_sales_record(record_id, tenant_id)
|
||||
|
||||
if not record:
|
||||
raise HTTPException(status_code=404, detail="Sales record not found")
|
||||
|
||||
return record
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Failed to get sales record", error=str(e), record_id=record_id, tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get sales record: {str(e)}")
|
||||
|
||||
|
||||
@router.put("/tenants/{tenant_id}/sales/{record_id}", response_model=SalesDataResponse)
|
||||
async def update_sales_record(
|
||||
update_data: SalesDataUpdate,
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
record_id: UUID = Path(..., description="Sales record ID"),
|
||||
current_tenant: str = Depends(get_current_tenant_id_dep),
|
||||
sales_service: SalesService = Depends(get_sales_service)
|
||||
):
|
||||
"""Update a sales record"""
|
||||
try:
|
||||
# Verify tenant access
|
||||
if str(tenant_id) != current_tenant:
|
||||
raise HTTPException(status_code=403, detail="Access denied to this tenant")
|
||||
|
||||
updated_record = await sales_service.update_sales_record(record_id, update_data, tenant_id)
|
||||
|
||||
logger.info("Updated sales record", record_id=record_id, tenant_id=tenant_id)
|
||||
return updated_record
|
||||
|
||||
except ValueError as ve:
|
||||
logger.warning("Validation error updating sales record", error=str(ve), record_id=record_id)
|
||||
raise HTTPException(status_code=400, detail=str(ve))
|
||||
except Exception as e:
|
||||
logger.error("Failed to update sales record", error=str(e), record_id=record_id, tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to update sales record: {str(e)}")
|
||||
|
||||
|
||||
@router.delete("/tenants/{tenant_id}/sales/{record_id}")
|
||||
async def delete_sales_record(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
record_id: UUID = Path(..., description="Sales record ID"),
|
||||
current_tenant: str = Depends(get_current_tenant_id_dep),
|
||||
sales_service: SalesService = Depends(get_sales_service)
|
||||
):
|
||||
"""Delete a sales record"""
|
||||
try:
|
||||
# Verify tenant access
|
||||
if str(tenant_id) != current_tenant:
|
||||
raise HTTPException(status_code=403, detail="Access denied to this tenant")
|
||||
|
||||
success = await sales_service.delete_sales_record(record_id, tenant_id)
|
||||
|
||||
if not success:
|
||||
raise HTTPException(status_code=404, detail="Sales record not found")
|
||||
|
||||
logger.info("Deleted sales record", record_id=record_id, tenant_id=tenant_id)
|
||||
return {"message": "Sales record deleted successfully"}
|
||||
|
||||
except ValueError as ve:
|
||||
logger.warning("Error deleting sales record", error=str(ve), record_id=record_id)
|
||||
raise HTTPException(status_code=400, detail=str(ve))
|
||||
except Exception as e:
|
||||
logger.error("Failed to delete sales record", error=str(e), record_id=record_id, tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to delete sales record: {str(e)}")
|
||||
|
||||
|
||||
@router.post("/tenants/{tenant_id}/sales/{record_id}/validate", response_model=SalesDataResponse)
|
||||
async def validate_sales_record(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
record_id: UUID = Path(..., description="Sales record ID"),
|
||||
validation_notes: Optional[str] = Query(None, description="Validation notes"),
|
||||
current_tenant: str = Depends(get_current_tenant_id_dep),
|
||||
sales_service: SalesService = Depends(get_sales_service)
|
||||
):
|
||||
"""Mark a sales record as validated"""
|
||||
try:
|
||||
# Verify tenant access
|
||||
if str(tenant_id) != current_tenant:
|
||||
raise HTTPException(status_code=403, detail="Access denied to this tenant")
|
||||
|
||||
validated_record = await sales_service.validate_sales_record(record_id, tenant_id, validation_notes)
|
||||
|
||||
logger.info("Validated sales record", record_id=record_id, tenant_id=tenant_id)
|
||||
return validated_record
|
||||
|
||||
except ValueError as ve:
|
||||
logger.warning("Error validating sales record", error=str(ve), record_id=record_id)
|
||||
raise HTTPException(status_code=400, detail=str(ve))
|
||||
except Exception as e:
|
||||
logger.error("Failed to validate sales record", error=str(e), record_id=record_id, tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to validate sales record: {str(e)}")
|
||||
1
services/sales/app/core/__init__.py
Normal file
1
services/sales/app/core/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# services/sales/app/core/__init__.py
|
||||
53
services/sales/app/core/config.py
Normal file
53
services/sales/app/core/config.py
Normal file
@@ -0,0 +1,53 @@
|
||||
# services/sales/app/core/config.py
|
||||
"""
|
||||
Sales Service Configuration
|
||||
"""
|
||||
|
||||
from typing import List
|
||||
from pydantic import Field
|
||||
from shared.config.base import BaseServiceSettings
|
||||
|
||||
|
||||
class Settings(BaseServiceSettings):
|
||||
"""Sales service settings extending base configuration"""
|
||||
|
||||
# Override service-specific settings
|
||||
SERVICE_NAME: str = "sales-service"
|
||||
VERSION: str = "1.0.0"
|
||||
APP_NAME: str = "Bakery Sales Service"
|
||||
DESCRIPTION: str = "Sales data management and analytics service"
|
||||
|
||||
# API Configuration
|
||||
API_V1_STR: str = "/api/v1"
|
||||
|
||||
# Override database URL to use SALES_DATABASE_URL
|
||||
DATABASE_URL: str = Field(
|
||||
default="postgresql+asyncpg://sales_user:sales_pass123@sales-db:5432/sales_db",
|
||||
env="SALES_DATABASE_URL"
|
||||
)
|
||||
|
||||
# Sales-specific Redis database
|
||||
REDIS_DB: int = Field(default=2, env="SALES_REDIS_DB")
|
||||
|
||||
# File upload configuration
|
||||
MAX_UPLOAD_SIZE: int = 10 * 1024 * 1024 # 10MB
|
||||
UPLOAD_PATH: str = Field(default="/tmp/uploads", env="SALES_UPLOAD_PATH")
|
||||
ALLOWED_FILE_EXTENSIONS: List[str] = [".csv", ".xlsx", ".xls"]
|
||||
|
||||
# Pagination
|
||||
DEFAULT_PAGE_SIZE: int = 50
|
||||
MAX_PAGE_SIZE: int = 1000
|
||||
|
||||
# Data validation
|
||||
MIN_QUANTITY: float = 0.01
|
||||
MAX_QUANTITY: float = 10000.0
|
||||
MIN_REVENUE: float = 0.01
|
||||
MAX_REVENUE: float = 100000.0
|
||||
|
||||
# Sales-specific cache TTL (5 minutes)
|
||||
SALES_CACHE_TTL: int = 300
|
||||
PRODUCT_CACHE_TTL: int = 600 # 10 minutes
|
||||
|
||||
|
||||
# Global settings instance
|
||||
settings = Settings()
|
||||
86
services/sales/app/core/database.py
Normal file
86
services/sales/app/core/database.py
Normal file
@@ -0,0 +1,86 @@
|
||||
# services/sales/app/core/database.py
|
||||
"""
|
||||
Sales Service Database Configuration using shared database manager
|
||||
"""
|
||||
|
||||
import structlog
|
||||
from contextlib import asynccontextmanager
|
||||
from typing import AsyncGenerator
|
||||
|
||||
from app.core.config import settings
|
||||
from shared.database.base import DatabaseManager, Base
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
# Create database manager instance
|
||||
database_manager = DatabaseManager(
|
||||
database_url=settings.DATABASE_URL,
|
||||
service_name="sales-service",
|
||||
pool_size=settings.DB_POOL_SIZE,
|
||||
max_overflow=settings.DB_MAX_OVERFLOW,
|
||||
pool_recycle=settings.DB_POOL_RECYCLE,
|
||||
echo=settings.DB_ECHO
|
||||
)
|
||||
|
||||
|
||||
async def get_db():
|
||||
"""
|
||||
Database dependency for FastAPI - using shared database manager
|
||||
"""
|
||||
async for session in database_manager.get_db():
|
||||
yield session
|
||||
|
||||
|
||||
async def init_db():
|
||||
"""Initialize database tables using shared database manager"""
|
||||
try:
|
||||
logger.info("Initializing Sales Service database...")
|
||||
|
||||
# Import all models to ensure they're registered
|
||||
from app.models import sales # noqa: F401
|
||||
|
||||
# Create all tables using database manager
|
||||
await database_manager.create_tables(Base.metadata)
|
||||
|
||||
logger.info("Sales Service database initialized successfully")
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to initialize database", error=str(e))
|
||||
raise
|
||||
|
||||
|
||||
async def close_db():
|
||||
"""Close database connections using shared database manager"""
|
||||
try:
|
||||
await database_manager.close_connections()
|
||||
logger.info("Database connections closed")
|
||||
except Exception as e:
|
||||
logger.error("Error closing database connections", error=str(e))
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def get_db_transaction():
|
||||
"""
|
||||
Context manager for database transactions using shared database manager
|
||||
"""
|
||||
async with database_manager.get_session() as session:
|
||||
try:
|
||||
async with session.begin():
|
||||
yield session
|
||||
except Exception as e:
|
||||
logger.error("Transaction error", error=str(e))
|
||||
raise
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def get_background_session():
|
||||
"""
|
||||
Context manager for background tasks using shared database manager
|
||||
"""
|
||||
async with database_manager.get_background_session() as session:
|
||||
yield session
|
||||
|
||||
|
||||
async def health_check():
|
||||
"""Database health check using shared database manager"""
|
||||
return await database_manager.health_check()
|
||||
152
services/sales/app/main.py
Normal file
152
services/sales/app/main.py
Normal file
@@ -0,0 +1,152 @@
|
||||
# services/sales/app/main.py
|
||||
"""
|
||||
Sales Service Main Application
|
||||
"""
|
||||
|
||||
import structlog
|
||||
from contextlib import asynccontextmanager
|
||||
from fastapi import FastAPI, Request
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from fastapi.responses import JSONResponse
|
||||
|
||||
from app.core.config import settings
|
||||
from app.core.database import init_db, close_db
|
||||
from shared.monitoring import setup_logging, HealthChecker
|
||||
from shared.monitoring.metrics import setup_metrics_early
|
||||
|
||||
# Setup logging first
|
||||
setup_logging("sales-service", settings.LOG_LEVEL)
|
||||
logger = structlog.get_logger()
|
||||
|
||||
# Global variables for lifespan access
|
||||
metrics_collector = None
|
||||
health_checker = None
|
||||
|
||||
# Create FastAPI app FIRST
|
||||
app = FastAPI(
|
||||
title="Bakery Sales Service",
|
||||
description="Sales data management service for bakery operations",
|
||||
version="1.0.0"
|
||||
)
|
||||
|
||||
# Setup metrics BEFORE any middleware and BEFORE lifespan
|
||||
metrics_collector = setup_metrics_early(app, "sales-service")
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
"""Application lifespan events"""
|
||||
global health_checker
|
||||
|
||||
# Startup
|
||||
logger.info("Starting Sales Service...")
|
||||
|
||||
try:
|
||||
# Initialize database
|
||||
await init_db()
|
||||
logger.info("Database initialized")
|
||||
|
||||
# Register custom metrics
|
||||
metrics_collector.register_counter("sales_records_created_total", "Total sales records created")
|
||||
metrics_collector.register_counter("sales_records_updated_total", "Total sales records updated")
|
||||
metrics_collector.register_counter("sales_queries_total", "Sales record queries")
|
||||
metrics_collector.register_counter("product_queries_total", "Product catalog queries")
|
||||
metrics_collector.register_counter("import_jobs_total", "Data import jobs")
|
||||
metrics_collector.register_counter("export_jobs_total", "Data export jobs")
|
||||
|
||||
metrics_collector.register_histogram("sales_create_duration_seconds", "Sales record creation duration")
|
||||
metrics_collector.register_histogram("sales_query_duration_seconds", "Sales query duration")
|
||||
metrics_collector.register_histogram("import_processing_duration_seconds", "Import processing duration")
|
||||
metrics_collector.register_histogram("export_generation_duration_seconds", "Export generation duration")
|
||||
|
||||
# Setup health checker
|
||||
health_checker = HealthChecker("sales-service")
|
||||
|
||||
# Add database health check
|
||||
async def check_database():
|
||||
try:
|
||||
from app.core.database import get_db
|
||||
from sqlalchemy import text
|
||||
async for db in get_db():
|
||||
await db.execute(text("SELECT 1"))
|
||||
return True
|
||||
except Exception as e:
|
||||
return f"Database error: {e}"
|
||||
|
||||
health_checker.add_check("database", check_database, timeout=5.0, critical=True)
|
||||
|
||||
# Store health checker in app state
|
||||
app.state.health_checker = health_checker
|
||||
|
||||
logger.info("Sales Service started successfully")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to start Sales Service: {e}")
|
||||
raise
|
||||
|
||||
yield
|
||||
|
||||
# Shutdown
|
||||
logger.info("Shutting down Sales Service...")
|
||||
await close_db()
|
||||
|
||||
# Set lifespan AFTER metrics setup
|
||||
app.router.lifespan_context = lifespan
|
||||
|
||||
# CORS middleware (added after metrics setup)
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=settings.CORS_ORIGINS,
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
# Include routers - import router BEFORE sales router to avoid conflicts
|
||||
from app.api.sales import router as sales_router
|
||||
from app.api.import_data import router as import_router
|
||||
app.include_router(import_router, prefix="/api/v1", tags=["import"])
|
||||
app.include_router(sales_router, prefix="/api/v1", tags=["sales"])
|
||||
|
||||
# Health check endpoint
|
||||
@app.get("/health")
|
||||
async def health_check():
|
||||
"""Comprehensive health check endpoint"""
|
||||
if health_checker:
|
||||
return await health_checker.check_health()
|
||||
else:
|
||||
return {
|
||||
"service": "sales-service",
|
||||
"status": "healthy",
|
||||
"version": "1.0.0"
|
||||
}
|
||||
|
||||
# Root endpoint
|
||||
@app.get("/")
|
||||
async def root():
|
||||
"""Root endpoint"""
|
||||
return {
|
||||
"service": "Sales Service",
|
||||
"version": "1.0.0",
|
||||
"status": "running",
|
||||
"endpoints": {
|
||||
"health": "/health",
|
||||
"docs": "/docs",
|
||||
"sales": "/api/v1/sales",
|
||||
"products": "/api/v1/products"
|
||||
}
|
||||
}
|
||||
|
||||
# Exception handlers
|
||||
@app.exception_handler(Exception)
|
||||
async def global_exception_handler(request: Request, exc: Exception):
|
||||
"""Global exception handler with metrics"""
|
||||
logger.error(f"Unhandled exception: {exc}", exc_info=True)
|
||||
|
||||
# Record error metric if available
|
||||
if metrics_collector:
|
||||
metrics_collector.increment_counter("errors_total", labels={"type": "unhandled"})
|
||||
|
||||
return JSONResponse(
|
||||
status_code=500,
|
||||
content={"detail": "Internal server error"}
|
||||
)
|
||||
5
services/sales/app/models/__init__.py
Normal file
5
services/sales/app/models/__init__.py
Normal file
@@ -0,0 +1,5 @@
|
||||
# services/sales/app/models/__init__.py
|
||||
|
||||
from .sales import SalesData, Product, SalesImportJob
|
||||
|
||||
__all__ = ["SalesData", "Product", "SalesImportJob"]
|
||||
238
services/sales/app/models/sales.py
Normal file
238
services/sales/app/models/sales.py
Normal file
@@ -0,0 +1,238 @@
|
||||
# services/sales/app/models/sales.py
|
||||
"""
|
||||
Sales data models for Sales Service
|
||||
Enhanced with additional fields and relationships
|
||||
"""
|
||||
|
||||
from sqlalchemy import Column, String, DateTime, Float, Integer, Text, Index, Boolean, Numeric
|
||||
from sqlalchemy.dialects.postgresql import UUID
|
||||
from sqlalchemy.orm import relationship
|
||||
import uuid
|
||||
from datetime import datetime, timezone
|
||||
from typing import Dict, Any, Optional
|
||||
|
||||
from shared.database.base import Base
|
||||
|
||||
|
||||
class SalesData(Base):
|
||||
"""Enhanced sales data model"""
|
||||
__tablename__ = "sales_data"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
date = Column(DateTime(timezone=True), nullable=False, index=True)
|
||||
|
||||
# Product information
|
||||
product_name = Column(String(255), nullable=False, index=True)
|
||||
product_category = Column(String(100), nullable=True, index=True)
|
||||
product_sku = Column(String(100), nullable=True, index=True)
|
||||
|
||||
# Sales data
|
||||
quantity_sold = Column(Integer, nullable=False)
|
||||
unit_price = Column(Numeric(10, 2), nullable=True)
|
||||
revenue = Column(Numeric(10, 2), nullable=False)
|
||||
cost_of_goods = Column(Numeric(10, 2), nullable=True) # For profit calculation
|
||||
discount_applied = Column(Numeric(5, 2), nullable=True, default=0.0) # Percentage
|
||||
|
||||
# Location and channel
|
||||
location_id = Column(String(100), nullable=True, index=True)
|
||||
sales_channel = Column(String(50), nullable=True, default="in_store") # in_store, online, delivery
|
||||
|
||||
# Data source and quality
|
||||
source = Column(String(50), nullable=False, default="manual") # manual, pos, online, import
|
||||
is_validated = Column(Boolean, default=False)
|
||||
validation_notes = Column(Text, nullable=True)
|
||||
|
||||
# Additional metadata
|
||||
notes = Column(Text, nullable=True)
|
||||
weather_condition = Column(String(50), nullable=True) # For correlation analysis
|
||||
is_holiday = Column(Boolean, default=False)
|
||||
is_weekend = Column(Boolean, default=False)
|
||||
|
||||
# Audit fields
|
||||
created_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc))
|
||||
updated_at = Column(DateTime(timezone=True),
|
||||
default=lambda: datetime.now(timezone.utc),
|
||||
onupdate=lambda: datetime.now(timezone.utc))
|
||||
created_by = Column(UUID(as_uuid=True), nullable=True) # User ID
|
||||
|
||||
# Performance-optimized indexes
|
||||
__table_args__ = (
|
||||
# Core query patterns
|
||||
Index('idx_sales_tenant_date', 'tenant_id', 'date'),
|
||||
Index('idx_sales_tenant_product', 'tenant_id', 'product_name'),
|
||||
Index('idx_sales_tenant_location', 'tenant_id', 'location_id'),
|
||||
Index('idx_sales_tenant_category', 'tenant_id', 'product_category'),
|
||||
|
||||
# Analytics queries
|
||||
Index('idx_sales_date_range', 'date', 'tenant_id'),
|
||||
Index('idx_sales_product_date', 'product_name', 'date', 'tenant_id'),
|
||||
Index('idx_sales_channel_date', 'sales_channel', 'date', 'tenant_id'),
|
||||
|
||||
# Data quality queries
|
||||
Index('idx_sales_source_validated', 'source', 'is_validated', 'tenant_id'),
|
||||
Index('idx_sales_sku_date', 'product_sku', 'date', 'tenant_id'),
|
||||
)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Convert model to dictionary for API responses"""
|
||||
return {
|
||||
'id': str(self.id),
|
||||
'tenant_id': str(self.tenant_id),
|
||||
'date': self.date.isoformat() if self.date else None,
|
||||
'product_name': self.product_name,
|
||||
'product_category': self.product_category,
|
||||
'product_sku': self.product_sku,
|
||||
'quantity_sold': self.quantity_sold,
|
||||
'unit_price': float(self.unit_price) if self.unit_price else None,
|
||||
'revenue': float(self.revenue) if self.revenue else None,
|
||||
'cost_of_goods': float(self.cost_of_goods) if self.cost_of_goods else None,
|
||||
'discount_applied': float(self.discount_applied) if self.discount_applied else None,
|
||||
'location_id': self.location_id,
|
||||
'sales_channel': self.sales_channel,
|
||||
'source': self.source,
|
||||
'is_validated': self.is_validated,
|
||||
'validation_notes': self.validation_notes,
|
||||
'notes': self.notes,
|
||||
'weather_condition': self.weather_condition,
|
||||
'is_holiday': self.is_holiday,
|
||||
'is_weekend': self.is_weekend,
|
||||
'created_at': self.created_at.isoformat() if self.created_at else None,
|
||||
'updated_at': self.updated_at.isoformat() if self.updated_at else None,
|
||||
'created_by': str(self.created_by) if self.created_by else None,
|
||||
}
|
||||
|
||||
@property
|
||||
def profit_margin(self) -> Optional[float]:
|
||||
"""Calculate profit margin if cost data is available"""
|
||||
if self.revenue and self.cost_of_goods:
|
||||
return float((self.revenue - self.cost_of_goods) / self.revenue * 100)
|
||||
return None
|
||||
|
||||
|
||||
class Product(Base):
|
||||
"""Product catalog model - future expansion"""
|
||||
__tablename__ = "products"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
|
||||
# Product identification
|
||||
name = Column(String(255), nullable=False, index=True)
|
||||
sku = Column(String(100), nullable=True, index=True)
|
||||
category = Column(String(100), nullable=True, index=True)
|
||||
subcategory = Column(String(100), nullable=True)
|
||||
|
||||
# Product details
|
||||
description = Column(Text, nullable=True)
|
||||
unit_of_measure = Column(String(20), nullable=False, default="unit")
|
||||
weight = Column(Float, nullable=True) # in grams
|
||||
volume = Column(Float, nullable=True) # in ml
|
||||
|
||||
# Pricing
|
||||
base_price = Column(Numeric(10, 2), nullable=True)
|
||||
cost_price = Column(Numeric(10, 2), nullable=True)
|
||||
|
||||
# Status
|
||||
is_active = Column(Boolean, default=True)
|
||||
is_seasonal = Column(Boolean, default=False)
|
||||
seasonal_start = Column(DateTime(timezone=True), nullable=True)
|
||||
seasonal_end = Column(DateTime(timezone=True), nullable=True)
|
||||
|
||||
# Audit fields
|
||||
created_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc))
|
||||
updated_at = Column(DateTime(timezone=True),
|
||||
default=lambda: datetime.now(timezone.utc),
|
||||
onupdate=lambda: datetime.now(timezone.utc))
|
||||
|
||||
__table_args__ = (
|
||||
Index('idx_products_tenant_name', 'tenant_id', 'name', unique=True),
|
||||
Index('idx_products_tenant_sku', 'tenant_id', 'sku'),
|
||||
Index('idx_products_category', 'tenant_id', 'category', 'is_active'),
|
||||
Index('idx_products_seasonal', 'is_seasonal', 'seasonal_start', 'seasonal_end'),
|
||||
)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Convert model to dictionary for API responses"""
|
||||
return {
|
||||
'id': str(self.id),
|
||||
'tenant_id': str(self.tenant_id),
|
||||
'name': self.name,
|
||||
'sku': self.sku,
|
||||
'category': self.category,
|
||||
'subcategory': self.subcategory,
|
||||
'description': self.description,
|
||||
'unit_of_measure': self.unit_of_measure,
|
||||
'weight': self.weight,
|
||||
'volume': self.volume,
|
||||
'base_price': float(self.base_price) if self.base_price else None,
|
||||
'cost_price': float(self.cost_price) if self.cost_price else None,
|
||||
'is_active': self.is_active,
|
||||
'is_seasonal': self.is_seasonal,
|
||||
'seasonal_start': self.seasonal_start.isoformat() if self.seasonal_start else None,
|
||||
'seasonal_end': self.seasonal_end.isoformat() if self.seasonal_end else None,
|
||||
'created_at': self.created_at.isoformat() if self.created_at else None,
|
||||
'updated_at': self.updated_at.isoformat() if self.updated_at else None,
|
||||
}
|
||||
|
||||
|
||||
class SalesImportJob(Base):
|
||||
"""Track sales data import jobs"""
|
||||
__tablename__ = "sales_import_jobs"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
|
||||
# Job details
|
||||
filename = Column(String(255), nullable=False)
|
||||
file_size = Column(Integer, nullable=True)
|
||||
import_type = Column(String(50), nullable=False, default="csv") # csv, xlsx, api
|
||||
|
||||
# Processing status
|
||||
status = Column(String(20), nullable=False, default="pending") # pending, processing, completed, failed
|
||||
progress_percentage = Column(Float, default=0.0)
|
||||
|
||||
# Results
|
||||
total_rows = Column(Integer, default=0)
|
||||
processed_rows = Column(Integer, default=0)
|
||||
successful_imports = Column(Integer, default=0)
|
||||
failed_imports = Column(Integer, default=0)
|
||||
duplicate_rows = Column(Integer, default=0)
|
||||
|
||||
# Error tracking
|
||||
error_message = Column(Text, nullable=True)
|
||||
validation_errors = Column(Text, nullable=True) # JSON string of validation errors
|
||||
|
||||
# Timestamps
|
||||
started_at = Column(DateTime(timezone=True), nullable=True)
|
||||
completed_at = Column(DateTime(timezone=True), nullable=True)
|
||||
created_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc))
|
||||
created_by = Column(UUID(as_uuid=True), nullable=True)
|
||||
|
||||
__table_args__ = (
|
||||
Index('idx_import_jobs_tenant_status', 'tenant_id', 'status', 'created_at'),
|
||||
Index('idx_import_jobs_status_date', 'status', 'created_at'),
|
||||
)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Convert model to dictionary for API responses"""
|
||||
return {
|
||||
'id': str(self.id),
|
||||
'tenant_id': str(self.tenant_id),
|
||||
'filename': self.filename,
|
||||
'file_size': self.file_size,
|
||||
'import_type': self.import_type,
|
||||
'status': self.status,
|
||||
'progress_percentage': self.progress_percentage,
|
||||
'total_rows': self.total_rows,
|
||||
'processed_rows': self.processed_rows,
|
||||
'successful_imports': self.successful_imports,
|
||||
'failed_imports': self.failed_imports,
|
||||
'duplicate_rows': self.duplicate_rows,
|
||||
'error_message': self.error_message,
|
||||
'validation_errors': self.validation_errors,
|
||||
'started_at': self.started_at.isoformat() if self.started_at else None,
|
||||
'completed_at': self.completed_at.isoformat() if self.completed_at else None,
|
||||
'created_at': self.created_at.isoformat() if self.created_at else None,
|
||||
'created_by': str(self.created_by) if self.created_by else None,
|
||||
}
|
||||
6
services/sales/app/repositories/__init__.py
Normal file
6
services/sales/app/repositories/__init__.py
Normal file
@@ -0,0 +1,6 @@
|
||||
# services/sales/app/repositories/__init__.py
|
||||
|
||||
from .sales_repository import SalesRepository
|
||||
from .product_repository import ProductRepository
|
||||
|
||||
__all__ = ["SalesRepository", "ProductRepository"]
|
||||
193
services/sales/app/repositories/product_repository.py
Normal file
193
services/sales/app/repositories/product_repository.py
Normal file
@@ -0,0 +1,193 @@
|
||||
# services/sales/app/repositories/product_repository.py
|
||||
"""
|
||||
Product Repository using Repository Pattern
|
||||
"""
|
||||
|
||||
from typing import List, Optional
|
||||
from uuid import UUID
|
||||
from sqlalchemy import select, and_, or_, desc, asc
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
import structlog
|
||||
|
||||
from app.models.sales import Product
|
||||
from app.schemas.sales import ProductCreate, ProductUpdate
|
||||
from shared.database.repository import BaseRepository
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class ProductRepository(BaseRepository[Product, ProductCreate, ProductUpdate]):
|
||||
"""Repository for product operations"""
|
||||
|
||||
def __init__(self, db_session: AsyncSession):
|
||||
super().__init__(Product, db_session)
|
||||
|
||||
async def create_product(self, product_data: ProductCreate, tenant_id: UUID) -> Product:
|
||||
"""Create a new product"""
|
||||
try:
|
||||
# Prepare data
|
||||
create_data = product_data.model_dump()
|
||||
create_data['tenant_id'] = tenant_id
|
||||
|
||||
# Create product
|
||||
product = await self.create(create_data)
|
||||
logger.info(
|
||||
"Created product",
|
||||
product_id=product.id,
|
||||
name=product.name,
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
return product
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to create product", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def get_by_tenant(self, tenant_id: UUID, include_inactive: bool = False) -> List[Product]:
|
||||
"""Get all products for a tenant"""
|
||||
try:
|
||||
stmt = select(Product).where(Product.tenant_id == tenant_id)
|
||||
|
||||
if not include_inactive:
|
||||
stmt = stmt.where(Product.is_active == True)
|
||||
|
||||
stmt = stmt.order_by(Product.category, Product.name)
|
||||
|
||||
result = await self.db_session.execute(stmt)
|
||||
products = result.scalars().all()
|
||||
|
||||
logger.info(
|
||||
"Retrieved products",
|
||||
count=len(products),
|
||||
tenant_id=tenant_id,
|
||||
include_inactive=include_inactive
|
||||
)
|
||||
return list(products)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get products", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def get_by_category(self, tenant_id: UUID, category: str) -> List[Product]:
|
||||
"""Get products by category"""
|
||||
try:
|
||||
stmt = select(Product).where(
|
||||
and_(
|
||||
Product.tenant_id == tenant_id,
|
||||
Product.category == category,
|
||||
Product.is_active == True
|
||||
)
|
||||
).order_by(Product.name)
|
||||
|
||||
result = await self.db_session.execute(stmt)
|
||||
products = result.scalars().all()
|
||||
|
||||
return list(products)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get products by category", error=str(e), tenant_id=tenant_id, category=category)
|
||||
raise
|
||||
|
||||
async def get_by_name(self, tenant_id: UUID, name: str) -> Optional[Product]:
|
||||
"""Get product by name"""
|
||||
try:
|
||||
stmt = select(Product).where(
|
||||
and_(
|
||||
Product.tenant_id == tenant_id,
|
||||
Product.name == name
|
||||
)
|
||||
)
|
||||
|
||||
result = await self.db_session.execute(stmt)
|
||||
product = result.scalar_one_or_none()
|
||||
|
||||
return product
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get product by name", error=str(e), tenant_id=tenant_id, name=name)
|
||||
raise
|
||||
|
||||
async def get_by_sku(self, tenant_id: UUID, sku: str) -> Optional[Product]:
|
||||
"""Get product by SKU"""
|
||||
try:
|
||||
stmt = select(Product).where(
|
||||
and_(
|
||||
Product.tenant_id == tenant_id,
|
||||
Product.sku == sku
|
||||
)
|
||||
)
|
||||
|
||||
result = await self.db_session.execute(stmt)
|
||||
product = result.scalar_one_or_none()
|
||||
|
||||
return product
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get product by SKU", error=str(e), tenant_id=tenant_id, sku=sku)
|
||||
raise
|
||||
|
||||
async def search_products(self, tenant_id: UUID, query: str, limit: int = 50) -> List[Product]:
|
||||
"""Search products by name or SKU"""
|
||||
try:
|
||||
stmt = select(Product).where(
|
||||
and_(
|
||||
Product.tenant_id == tenant_id,
|
||||
Product.is_active == True,
|
||||
or_(
|
||||
Product.name.ilike(f"%{query}%"),
|
||||
Product.sku.ilike(f"%{query}%"),
|
||||
Product.description.ilike(f"%{query}%")
|
||||
)
|
||||
)
|
||||
).order_by(Product.name).limit(limit)
|
||||
|
||||
result = await self.db_session.execute(stmt)
|
||||
products = result.scalars().all()
|
||||
|
||||
return list(products)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to search products", error=str(e), tenant_id=tenant_id, query=query)
|
||||
raise
|
||||
|
||||
async def get_categories(self, tenant_id: UUID) -> List[str]:
|
||||
"""Get distinct product categories for a tenant"""
|
||||
try:
|
||||
stmt = select(Product.category).where(
|
||||
and_(
|
||||
Product.tenant_id == tenant_id,
|
||||
Product.is_active == True,
|
||||
Product.category.is_not(None)
|
||||
)
|
||||
).distinct()
|
||||
|
||||
result = await self.db_session.execute(stmt)
|
||||
categories = [row[0] for row in result if row[0]]
|
||||
|
||||
return sorted(categories)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get product categories", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def deactivate_product(self, product_id: UUID) -> Product:
|
||||
"""Deactivate a product"""
|
||||
try:
|
||||
product = await self.update(product_id, {'is_active': False})
|
||||
logger.info("Deactivated product", product_id=product_id)
|
||||
return product
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to deactivate product", error=str(e), product_id=product_id)
|
||||
raise
|
||||
|
||||
async def activate_product(self, product_id: UUID) -> Product:
|
||||
"""Activate a product"""
|
||||
try:
|
||||
product = await self.update(product_id, {'is_active': True})
|
||||
logger.info("Activated product", product_id=product_id)
|
||||
return product
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to activate product", error=str(e), product_id=product_id)
|
||||
raise
|
||||
296
services/sales/app/repositories/sales_repository.py
Normal file
296
services/sales/app/repositories/sales_repository.py
Normal file
@@ -0,0 +1,296 @@
|
||||
# services/sales/app/repositories/sales_repository.py
|
||||
"""
|
||||
Sales Repository using Repository Pattern
|
||||
"""
|
||||
|
||||
from typing import List, Optional, Dict, Any
|
||||
from uuid import UUID
|
||||
from datetime import datetime
|
||||
from sqlalchemy import select, func, and_, or_, desc, asc
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
import structlog
|
||||
|
||||
from app.models.sales import SalesData
|
||||
from app.schemas.sales import SalesDataCreate, SalesDataUpdate, SalesDataQuery
|
||||
from shared.database.repository import BaseRepository
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class SalesRepository(BaseRepository[SalesData, SalesDataCreate, SalesDataUpdate]):
|
||||
"""Repository for sales data operations"""
|
||||
|
||||
def __init__(self, session: AsyncSession):
|
||||
super().__init__(SalesData, session)
|
||||
|
||||
async def create_sales_record(self, sales_data: SalesDataCreate, tenant_id: UUID) -> SalesData:
|
||||
"""Create a new sales record"""
|
||||
try:
|
||||
# Prepare data
|
||||
create_data = sales_data.model_dump()
|
||||
create_data['tenant_id'] = tenant_id
|
||||
|
||||
# Calculate weekend flag if not provided
|
||||
if sales_data.date and create_data.get('is_weekend') is None:
|
||||
create_data['is_weekend'] = sales_data.date.weekday() >= 5
|
||||
|
||||
# Create record
|
||||
record = await self.create(create_data)
|
||||
logger.info(
|
||||
"Created sales record",
|
||||
record_id=record.id,
|
||||
product=record.product_name,
|
||||
quantity=record.quantity_sold,
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
return record
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to create sales record", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def get_by_tenant(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
query_params: Optional[SalesDataQuery] = None
|
||||
) -> List[SalesData]:
|
||||
"""Get sales records by tenant with optional filtering"""
|
||||
try:
|
||||
# Build base query
|
||||
stmt = select(SalesData).where(SalesData.tenant_id == tenant_id)
|
||||
|
||||
# Apply filters if query_params provided
|
||||
if query_params:
|
||||
if query_params.start_date:
|
||||
stmt = stmt.where(SalesData.date >= query_params.start_date)
|
||||
if query_params.end_date:
|
||||
stmt = stmt.where(SalesData.date <= query_params.end_date)
|
||||
if query_params.product_name:
|
||||
stmt = stmt.where(SalesData.product_name.ilike(f"%{query_params.product_name}%"))
|
||||
if query_params.product_category:
|
||||
stmt = stmt.where(SalesData.product_category == query_params.product_category)
|
||||
if query_params.location_id:
|
||||
stmt = stmt.where(SalesData.location_id == query_params.location_id)
|
||||
if query_params.sales_channel:
|
||||
stmt = stmt.where(SalesData.sales_channel == query_params.sales_channel)
|
||||
if query_params.source:
|
||||
stmt = stmt.where(SalesData.source == query_params.source)
|
||||
if query_params.is_validated is not None:
|
||||
stmt = stmt.where(SalesData.is_validated == query_params.is_validated)
|
||||
|
||||
# Apply ordering
|
||||
if query_params.order_by and hasattr(SalesData, query_params.order_by):
|
||||
order_col = getattr(SalesData, query_params.order_by)
|
||||
if query_params.order_direction == 'asc':
|
||||
stmt = stmt.order_by(asc(order_col))
|
||||
else:
|
||||
stmt = stmt.order_by(desc(order_col))
|
||||
else:
|
||||
stmt = stmt.order_by(desc(SalesData.date))
|
||||
|
||||
# Apply pagination
|
||||
stmt = stmt.offset(query_params.offset).limit(query_params.limit)
|
||||
else:
|
||||
# Default ordering
|
||||
stmt = stmt.order_by(desc(SalesData.date)).limit(50)
|
||||
|
||||
result = await self.session.execute(stmt)
|
||||
records = result.scalars().all()
|
||||
|
||||
logger.info(
|
||||
"Retrieved sales records",
|
||||
count=len(records),
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
return list(records)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get sales records", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def get_by_product(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
product_name: str,
|
||||
start_date: Optional[datetime] = None,
|
||||
end_date: Optional[datetime] = None
|
||||
) -> List[SalesData]:
|
||||
"""Get sales records for a specific product"""
|
||||
try:
|
||||
stmt = select(SalesData).where(
|
||||
and_(
|
||||
SalesData.tenant_id == tenant_id,
|
||||
SalesData.product_name == product_name
|
||||
)
|
||||
)
|
||||
|
||||
if start_date:
|
||||
stmt = stmt.where(SalesData.date >= start_date)
|
||||
if end_date:
|
||||
stmt = stmt.where(SalesData.date <= end_date)
|
||||
|
||||
stmt = stmt.order_by(desc(SalesData.date))
|
||||
|
||||
result = await self.session.execute(stmt)
|
||||
records = result.scalars().all()
|
||||
|
||||
return list(records)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get product sales", error=str(e), tenant_id=tenant_id, product=product_name)
|
||||
raise
|
||||
|
||||
async def get_analytics(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
start_date: Optional[datetime] = None,
|
||||
end_date: Optional[datetime] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""Get sales analytics for a tenant"""
|
||||
try:
|
||||
# Build base query
|
||||
base_query = select(SalesData).where(SalesData.tenant_id == tenant_id)
|
||||
|
||||
if start_date:
|
||||
base_query = base_query.where(SalesData.date >= start_date)
|
||||
if end_date:
|
||||
base_query = base_query.where(SalesData.date <= end_date)
|
||||
|
||||
# Total revenue and quantity
|
||||
summary_query = select(
|
||||
func.sum(SalesData.revenue).label('total_revenue'),
|
||||
func.sum(SalesData.quantity_sold).label('total_quantity'),
|
||||
func.count().label('total_transactions'),
|
||||
func.avg(SalesData.revenue).label('avg_transaction_value')
|
||||
).where(SalesData.tenant_id == tenant_id)
|
||||
|
||||
if start_date:
|
||||
summary_query = summary_query.where(SalesData.date >= start_date)
|
||||
if end_date:
|
||||
summary_query = summary_query.where(SalesData.date <= end_date)
|
||||
|
||||
result = await self.session.execute(summary_query)
|
||||
summary = result.first()
|
||||
|
||||
# Top products
|
||||
top_products_query = select(
|
||||
SalesData.product_name,
|
||||
func.sum(SalesData.revenue).label('revenue'),
|
||||
func.sum(SalesData.quantity_sold).label('quantity')
|
||||
).where(SalesData.tenant_id == tenant_id)
|
||||
|
||||
if start_date:
|
||||
top_products_query = top_products_query.where(SalesData.date >= start_date)
|
||||
if end_date:
|
||||
top_products_query = top_products_query.where(SalesData.date <= end_date)
|
||||
|
||||
top_products_query = top_products_query.group_by(
|
||||
SalesData.product_name
|
||||
).order_by(
|
||||
desc(func.sum(SalesData.revenue))
|
||||
).limit(10)
|
||||
|
||||
top_products_result = await self.session.execute(top_products_query)
|
||||
top_products = [
|
||||
{
|
||||
'product_name': row.product_name,
|
||||
'revenue': float(row.revenue) if row.revenue else 0,
|
||||
'quantity': row.quantity or 0
|
||||
}
|
||||
for row in top_products_result
|
||||
]
|
||||
|
||||
# Sales by channel
|
||||
channel_query = select(
|
||||
SalesData.sales_channel,
|
||||
func.sum(SalesData.revenue).label('revenue'),
|
||||
func.count().label('transactions')
|
||||
).where(SalesData.tenant_id == tenant_id)
|
||||
|
||||
if start_date:
|
||||
channel_query = channel_query.where(SalesData.date >= start_date)
|
||||
if end_date:
|
||||
channel_query = channel_query.where(SalesData.date <= end_date)
|
||||
|
||||
channel_query = channel_query.group_by(SalesData.sales_channel)
|
||||
|
||||
channel_result = await self.session.execute(channel_query)
|
||||
sales_by_channel = {
|
||||
row.sales_channel: {
|
||||
'revenue': float(row.revenue) if row.revenue else 0,
|
||||
'transactions': row.transactions or 0
|
||||
}
|
||||
for row in channel_result
|
||||
}
|
||||
|
||||
return {
|
||||
'total_revenue': float(summary.total_revenue) if summary.total_revenue else 0,
|
||||
'total_quantity': summary.total_quantity or 0,
|
||||
'total_transactions': summary.total_transactions or 0,
|
||||
'average_transaction_value': float(summary.avg_transaction_value) if summary.avg_transaction_value else 0,
|
||||
'top_products': top_products,
|
||||
'sales_by_channel': sales_by_channel
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get sales analytics", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def get_product_categories(self, tenant_id: UUID) -> List[str]:
|
||||
"""Get distinct product categories for a tenant"""
|
||||
try:
|
||||
stmt = select(SalesData.product_category).where(
|
||||
and_(
|
||||
SalesData.tenant_id == tenant_id,
|
||||
SalesData.product_category.is_not(None)
|
||||
)
|
||||
).distinct()
|
||||
|
||||
result = await self.session.execute(stmt)
|
||||
categories = [row[0] for row in result if row[0]]
|
||||
|
||||
return sorted(categories)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get product categories", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def validate_record(self, record_id: UUID, validation_notes: Optional[str] = None) -> SalesData:
|
||||
"""Mark a sales record as validated"""
|
||||
try:
|
||||
record = await self.get_by_id(record_id)
|
||||
if not record:
|
||||
raise ValueError(f"Sales record {record_id} not found")
|
||||
|
||||
update_data = {
|
||||
'is_validated': True,
|
||||
'validation_notes': validation_notes
|
||||
}
|
||||
|
||||
updated_record = await self.update(record_id, update_data)
|
||||
|
||||
logger.info("Validated sales record", record_id=record_id)
|
||||
return updated_record
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to validate sales record", error=str(e), record_id=record_id)
|
||||
raise
|
||||
|
||||
async def get_product_statistics(self, tenant_id: str) -> List[Dict[str, Any]]:
|
||||
"""Get product statistics for tenant"""
|
||||
try:
|
||||
stmt = select(SalesData.product_name).where(
|
||||
and_(
|
||||
SalesData.tenant_id == tenant_id,
|
||||
SalesData.product_name.is_not(None)
|
||||
)
|
||||
).distinct()
|
||||
|
||||
result = await self.session.execute(stmt)
|
||||
products = [row[0] for row in result if row[0]]
|
||||
|
||||
return sorted(products)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get product categories", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
25
services/sales/app/schemas/__init__.py
Normal file
25
services/sales/app/schemas/__init__.py
Normal file
@@ -0,0 +1,25 @@
|
||||
# services/sales/app/schemas/__init__.py
|
||||
|
||||
from .sales import (
|
||||
SalesDataCreate,
|
||||
SalesDataUpdate,
|
||||
SalesDataResponse,
|
||||
SalesDataQuery,
|
||||
ProductCreate,
|
||||
ProductUpdate,
|
||||
ProductResponse,
|
||||
SalesAnalytics,
|
||||
ProductSalesAnalytics
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"SalesDataCreate",
|
||||
"SalesDataUpdate",
|
||||
"SalesDataResponse",
|
||||
"SalesDataQuery",
|
||||
"ProductCreate",
|
||||
"ProductUpdate",
|
||||
"ProductResponse",
|
||||
"SalesAnalytics",
|
||||
"ProductSalesAnalytics"
|
||||
]
|
||||
198
services/sales/app/schemas/sales.py
Normal file
198
services/sales/app/schemas/sales.py
Normal file
@@ -0,0 +1,198 @@
|
||||
# services/sales/app/schemas/sales.py
|
||||
"""
|
||||
Sales Service Pydantic Schemas
|
||||
"""
|
||||
|
||||
from pydantic import BaseModel, Field, validator
|
||||
from typing import Optional, List
|
||||
from datetime import datetime
|
||||
from uuid import UUID
|
||||
from decimal import Decimal
|
||||
|
||||
|
||||
class SalesDataBase(BaseModel):
|
||||
"""Base sales data schema"""
|
||||
product_name: str = Field(..., min_length=1, max_length=255, description="Product name")
|
||||
product_category: Optional[str] = Field(None, max_length=100, description="Product category")
|
||||
product_sku: Optional[str] = Field(None, max_length=100, description="Product SKU")
|
||||
|
||||
quantity_sold: int = Field(..., gt=0, description="Quantity sold")
|
||||
unit_price: Optional[Decimal] = Field(None, ge=0, description="Unit price")
|
||||
revenue: Decimal = Field(..., gt=0, description="Total revenue")
|
||||
cost_of_goods: Optional[Decimal] = Field(None, ge=0, description="Cost of goods sold")
|
||||
discount_applied: Optional[Decimal] = Field(0, ge=0, le=100, description="Discount percentage")
|
||||
|
||||
location_id: Optional[str] = Field(None, max_length=100, description="Location identifier")
|
||||
sales_channel: Optional[str] = Field("in_store", description="Sales channel")
|
||||
source: str = Field("manual", description="Data source")
|
||||
|
||||
notes: Optional[str] = Field(None, description="Additional notes")
|
||||
weather_condition: Optional[str] = Field(None, max_length=50, description="Weather condition")
|
||||
is_holiday: bool = Field(False, description="Holiday flag")
|
||||
is_weekend: bool = Field(False, description="Weekend flag")
|
||||
|
||||
@validator('sales_channel')
|
||||
def validate_sales_channel(cls, v):
|
||||
allowed_channels = ['in_store', 'online', 'delivery', 'wholesale']
|
||||
if v not in allowed_channels:
|
||||
raise ValueError(f'Sales channel must be one of: {allowed_channels}')
|
||||
return v
|
||||
|
||||
@validator('source')
|
||||
def validate_source(cls, v):
|
||||
allowed_sources = ['manual', 'pos', 'online', 'import', 'api', 'csv']
|
||||
if v not in allowed_sources:
|
||||
raise ValueError(f'Source must be one of: {allowed_sources}')
|
||||
return v
|
||||
|
||||
|
||||
class SalesDataCreate(SalesDataBase):
|
||||
"""Schema for creating sales data"""
|
||||
tenant_id: Optional[UUID] = Field(None, description="Tenant ID (set automatically)")
|
||||
date: datetime = Field(..., description="Sale date and time")
|
||||
|
||||
|
||||
class SalesDataUpdate(BaseModel):
|
||||
"""Schema for updating sales data"""
|
||||
product_name: Optional[str] = Field(None, min_length=1, max_length=255)
|
||||
product_category: Optional[str] = Field(None, max_length=100)
|
||||
product_sku: Optional[str] = Field(None, max_length=100)
|
||||
|
||||
quantity_sold: Optional[int] = Field(None, gt=0)
|
||||
unit_price: Optional[Decimal] = Field(None, ge=0)
|
||||
revenue: Optional[Decimal] = Field(None, gt=0)
|
||||
cost_of_goods: Optional[Decimal] = Field(None, ge=0)
|
||||
discount_applied: Optional[Decimal] = Field(None, ge=0, le=100)
|
||||
|
||||
location_id: Optional[str] = Field(None, max_length=100)
|
||||
sales_channel: Optional[str] = None
|
||||
|
||||
notes: Optional[str] = None
|
||||
weather_condition: Optional[str] = Field(None, max_length=50)
|
||||
is_holiday: Optional[bool] = None
|
||||
is_weekend: Optional[bool] = None
|
||||
|
||||
validation_notes: Optional[str] = None
|
||||
is_validated: Optional[bool] = None
|
||||
|
||||
|
||||
class SalesDataResponse(SalesDataBase):
|
||||
"""Schema for sales data responses"""
|
||||
id: UUID
|
||||
tenant_id: UUID
|
||||
date: datetime
|
||||
|
||||
is_validated: bool = False
|
||||
validation_notes: Optional[str] = None
|
||||
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
created_by: Optional[UUID] = None
|
||||
|
||||
profit_margin: Optional[float] = Field(None, description="Calculated profit margin")
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class SalesDataQuery(BaseModel):
|
||||
"""Schema for sales data queries"""
|
||||
start_date: Optional[datetime] = None
|
||||
end_date: Optional[datetime] = None
|
||||
product_name: Optional[str] = None
|
||||
product_category: Optional[str] = None
|
||||
location_id: Optional[str] = None
|
||||
sales_channel: Optional[str] = None
|
||||
source: Optional[str] = None
|
||||
is_validated: Optional[bool] = None
|
||||
|
||||
limit: int = Field(50, ge=1, le=1000, description="Number of records to return")
|
||||
offset: int = Field(0, ge=0, description="Number of records to skip")
|
||||
|
||||
order_by: str = Field("date", description="Field to order by")
|
||||
order_direction: str = Field("desc", description="Order direction")
|
||||
|
||||
@validator('order_direction')
|
||||
def validate_order_direction(cls, v):
|
||||
if v.lower() not in ['asc', 'desc']:
|
||||
raise ValueError('Order direction must be "asc" or "desc"')
|
||||
return v.lower()
|
||||
|
||||
|
||||
# Product schemas
|
||||
class ProductBase(BaseModel):
|
||||
"""Base product schema"""
|
||||
name: str = Field(..., min_length=1, max_length=255, description="Product name")
|
||||
sku: Optional[str] = Field(None, max_length=100, description="Stock Keeping Unit")
|
||||
category: Optional[str] = Field(None, max_length=100, description="Product category")
|
||||
subcategory: Optional[str] = Field(None, max_length=100, description="Product subcategory")
|
||||
|
||||
description: Optional[str] = Field(None, description="Product description")
|
||||
unit_of_measure: str = Field("unit", description="Unit of measure")
|
||||
weight: Optional[float] = Field(None, gt=0, description="Weight in grams")
|
||||
volume: Optional[float] = Field(None, gt=0, description="Volume in ml")
|
||||
|
||||
base_price: Optional[Decimal] = Field(None, ge=0, description="Base selling price")
|
||||
cost_price: Optional[Decimal] = Field(None, ge=0, description="Cost price")
|
||||
|
||||
is_seasonal: bool = Field(False, description="Seasonal product flag")
|
||||
seasonal_start: Optional[datetime] = Field(None, description="Season start date")
|
||||
seasonal_end: Optional[datetime] = Field(None, description="Season end date")
|
||||
|
||||
|
||||
class ProductCreate(ProductBase):
|
||||
"""Schema for creating products"""
|
||||
tenant_id: Optional[UUID] = Field(None, description="Tenant ID (set automatically)")
|
||||
|
||||
|
||||
class ProductUpdate(BaseModel):
|
||||
"""Schema for updating products"""
|
||||
name: Optional[str] = Field(None, min_length=1, max_length=255)
|
||||
sku: Optional[str] = Field(None, max_length=100)
|
||||
category: Optional[str] = Field(None, max_length=100)
|
||||
subcategory: Optional[str] = Field(None, max_length=100)
|
||||
description: Optional[str] = None
|
||||
unit_of_measure: Optional[str] = None
|
||||
weight: Optional[float] = Field(None, gt=0)
|
||||
volume: Optional[float] = Field(None, gt=0)
|
||||
base_price: Optional[Decimal] = Field(None, ge=0)
|
||||
cost_price: Optional[Decimal] = Field(None, ge=0)
|
||||
is_active: Optional[bool] = None
|
||||
is_seasonal: Optional[bool] = None
|
||||
seasonal_start: Optional[datetime] = None
|
||||
seasonal_end: Optional[datetime] = None
|
||||
|
||||
|
||||
class ProductResponse(ProductBase):
|
||||
"""Schema for product responses"""
|
||||
id: UUID
|
||||
tenant_id: UUID
|
||||
is_active: bool = True
|
||||
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
# Analytics schemas
|
||||
class SalesAnalytics(BaseModel):
|
||||
"""Sales analytics response"""
|
||||
total_revenue: Decimal
|
||||
total_quantity: int
|
||||
total_transactions: int
|
||||
average_transaction_value: Decimal
|
||||
top_products: List[dict]
|
||||
sales_by_channel: dict
|
||||
sales_by_day: List[dict]
|
||||
|
||||
|
||||
class ProductSalesAnalytics(BaseModel):
|
||||
"""Product-specific sales analytics"""
|
||||
product_name: str
|
||||
total_revenue: Decimal
|
||||
total_quantity: int
|
||||
total_transactions: int
|
||||
average_price: Decimal
|
||||
growth_rate: Optional[float] = None
|
||||
8
services/sales/app/services/__init__.py
Normal file
8
services/sales/app/services/__init__.py
Normal file
@@ -0,0 +1,8 @@
|
||||
# services/sales/app/services/__init__.py
|
||||
|
||||
from .sales_service import SalesService
|
||||
from .product_service import ProductService
|
||||
from .data_import_service import DataImportService
|
||||
from .messaging import SalesEventPublisher, sales_publisher
|
||||
|
||||
__all__ = ["SalesService", "ProductService", "DataImportService", "SalesEventPublisher", "sales_publisher"]
|
||||
943
services/sales/app/services/data_import_service.py
Normal file
943
services/sales/app/services/data_import_service.py
Normal file
@@ -0,0 +1,943 @@
|
||||
# services/sales/app/services/data_import_service.py
|
||||
"""
|
||||
Data Import Service
|
||||
Service for importing sales data using repository pattern and enhanced error handling
|
||||
"""
|
||||
|
||||
import csv
|
||||
import io
|
||||
import json
|
||||
import base64
|
||||
import pandas as pd
|
||||
from typing import Dict, Any, List, Optional, Union
|
||||
from datetime import datetime, timezone
|
||||
import structlog
|
||||
import re
|
||||
|
||||
from app.repositories.sales_repository import SalesRepository
|
||||
from app.models.sales import SalesData
|
||||
from app.schemas.sales import SalesDataCreate
|
||||
from app.core.database import get_db_transaction
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
# Import result schemas (dataclass definitions)
|
||||
from dataclasses import dataclass
|
||||
from typing import List, Dict, Any
|
||||
|
||||
@dataclass
|
||||
class SalesValidationResult:
|
||||
is_valid: bool
|
||||
total_records: int
|
||||
valid_records: int
|
||||
invalid_records: int
|
||||
errors: List[Dict[str, Any]]
|
||||
warnings: List[Dict[str, Any]]
|
||||
summary: Dict[str, Any]
|
||||
|
||||
@dataclass
|
||||
class SalesImportResult:
|
||||
success: bool
|
||||
records_processed: int
|
||||
records_created: int
|
||||
records_updated: int
|
||||
records_failed: int
|
||||
errors: List[Dict[str, Any]]
|
||||
warnings: List[Dict[str, Any]]
|
||||
processing_time_seconds: float
|
||||
|
||||
|
||||
class DataImportService:
|
||||
"""Enhanced data import service using repository pattern with STRICT validation for production"""
|
||||
|
||||
# PRODUCTION VALIDATION CONFIGURATION
|
||||
STRICT_VALIDATION = True # Set to False for lenient validation, True for production quality
|
||||
MAX_QUANTITY_PER_DAY = 10000 # Maximum reasonable quantity per product per day
|
||||
MAX_REVENUE_PER_ITEM = 100000 # Maximum reasonable revenue per line item
|
||||
MAX_UNIT_PRICE = 10000 # Maximum reasonable price per unit for bakery items
|
||||
|
||||
# Common column mappings for different languages/formats
|
||||
COLUMN_MAPPINGS = {
|
||||
'date': ['date', 'fecha', 'datum', 'data', 'dia'],
|
||||
'datetime': ['datetime', 'fecha_hora', 'timestamp'],
|
||||
'product': ['product', 'producto', 'item', 'articulo', 'nombre', 'name'],
|
||||
'product_name': ['product_name', 'nombre_producto', 'item_name'],
|
||||
'quantity': ['quantity', 'cantidad', 'qty', 'units', 'unidades'],
|
||||
'quantity_sold': ['quantity_sold', 'cantidad_vendida', 'sold'],
|
||||
'revenue': ['revenue', 'ingresos', 'sales', 'ventas', 'total', 'importe'],
|
||||
'price': ['price', 'precio', 'cost', 'coste'],
|
||||
'location': ['location', 'ubicacion', 'tienda', 'store', 'punto_venta'],
|
||||
'location_id': ['location_id', 'store_id', 'tienda_id'],
|
||||
}
|
||||
|
||||
DATE_FORMATS = [
|
||||
'%Y-%m-%d', '%d/%m/%Y', '%m/%d/%Y', '%d-%m-%Y', '%m-%d-%Y',
|
||||
'%d.%m.%Y', '%Y/%m/%d', '%d/%m/%y', '%m/%d/%y',
|
||||
'%Y-%m-%d %H:%M:%S', '%d/%m/%Y %H:%M',
|
||||
]
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize enhanced import service"""
|
||||
pass
|
||||
|
||||
async def validate_import_data(self, data: Dict[str, Any]) -> SalesValidationResult:
|
||||
"""Enhanced validation with better error handling and suggestions"""
|
||||
try:
|
||||
logger.info("Starting enhanced import data validation", tenant_id=data.get("tenant_id"))
|
||||
|
||||
validation_result = SalesValidationResult(
|
||||
is_valid=True,
|
||||
total_records=0,
|
||||
valid_records=0,
|
||||
invalid_records=0,
|
||||
errors=[],
|
||||
warnings=[],
|
||||
summary={}
|
||||
)
|
||||
|
||||
errors = []
|
||||
warnings = []
|
||||
|
||||
# Basic validation checks
|
||||
if not data.get("tenant_id"):
|
||||
errors.append({
|
||||
"type": "missing_field",
|
||||
"message": "tenant_id es requerido",
|
||||
"field": "tenant_id",
|
||||
"row": None,
|
||||
"code": "MISSING_TENANT_ID"
|
||||
})
|
||||
|
||||
if not data.get("data"):
|
||||
errors.append({
|
||||
"type": "missing_data",
|
||||
"message": "Datos de archivo faltantes",
|
||||
"field": "data",
|
||||
"row": None,
|
||||
"code": "NO_DATA_PROVIDED"
|
||||
})
|
||||
|
||||
validation_result.is_valid = False
|
||||
validation_result.errors = errors
|
||||
validation_result.summary = {
|
||||
"status": "failed",
|
||||
"reason": "no_data_provided",
|
||||
"file_format": data.get("data_format", "unknown"),
|
||||
"suggestions": ["Selecciona un archivo válido para importar"]
|
||||
}
|
||||
return validation_result
|
||||
|
||||
# Validate file format
|
||||
format_type = data.get("data_format", "").lower()
|
||||
supported_formats = ["csv", "excel", "xlsx", "xls", "json", "pos"]
|
||||
|
||||
if format_type not in supported_formats:
|
||||
errors.append({
|
||||
"type": "unsupported_format",
|
||||
"message": f"Formato no soportado: {format_type}",
|
||||
"field": "data_format",
|
||||
"row": None,
|
||||
"code": "UNSUPPORTED_FORMAT"
|
||||
})
|
||||
|
||||
# Validate data size
|
||||
data_content = data.get("data", "")
|
||||
data_size = len(data_content)
|
||||
|
||||
if data_size == 0:
|
||||
errors.append({
|
||||
"type": "empty_file",
|
||||
"message": "El archivo está vacío",
|
||||
"field": "data",
|
||||
"row": None,
|
||||
"code": "EMPTY_FILE"
|
||||
})
|
||||
elif data_size > 10 * 1024 * 1024: # 10MB limit
|
||||
errors.append({
|
||||
"type": "file_too_large",
|
||||
"message": "Archivo demasiado grande (máximo 10MB)",
|
||||
"field": "data",
|
||||
"row": None,
|
||||
"code": "FILE_TOO_LARGE"
|
||||
})
|
||||
elif data_size > 1024 * 1024: # 1MB warning
|
||||
warnings.append({
|
||||
"type": "large_file",
|
||||
"message": "Archivo grande detectado. El procesamiento puede tomar más tiempo.",
|
||||
"field": "data",
|
||||
"row": None,
|
||||
"code": "LARGE_FILE_WARNING"
|
||||
})
|
||||
|
||||
# Analyze CSV content if format is CSV
|
||||
if format_type == "csv" and data_content and not errors:
|
||||
try:
|
||||
reader = csv.DictReader(io.StringIO(data_content))
|
||||
rows = list(reader)
|
||||
|
||||
validation_result.total_records = len(rows)
|
||||
|
||||
if not rows:
|
||||
errors.append({
|
||||
"type": "empty_content",
|
||||
"message": "El archivo CSV no contiene datos",
|
||||
"field": "data",
|
||||
"row": None,
|
||||
"code": "NO_CONTENT"
|
||||
})
|
||||
else:
|
||||
# Enhanced column analysis
|
||||
headers = list(rows[0].keys()) if rows else []
|
||||
column_mapping = self._detect_columns(headers)
|
||||
|
||||
# Check for required columns
|
||||
if not column_mapping.get('date'):
|
||||
errors.append({
|
||||
"type": "missing_column",
|
||||
"message": "Columna de fecha no encontrada",
|
||||
"field": "date",
|
||||
"row": None,
|
||||
"code": "MISSING_DATE_COLUMN"
|
||||
})
|
||||
|
||||
if not column_mapping.get('product'):
|
||||
errors.append({
|
||||
"type": "missing_column",
|
||||
"message": "Columna de producto no encontrada",
|
||||
"field": "product",
|
||||
"row": None,
|
||||
"code": "MISSING_PRODUCT_COLUMN"
|
||||
})
|
||||
|
||||
if not column_mapping.get('quantity'):
|
||||
warnings.append({
|
||||
"type": "missing_column",
|
||||
"message": "Columna de cantidad no encontrada, se usará 1 por defecto",
|
||||
"field": "quantity",
|
||||
"row": None,
|
||||
"code": "MISSING_QUANTITY_COLUMN"
|
||||
})
|
||||
|
||||
# Enhanced data quality estimation
|
||||
if not errors:
|
||||
sample_size = min(10, len(rows))
|
||||
sample_rows = rows[:sample_size]
|
||||
quality_issues = 0
|
||||
|
||||
for i, row in enumerate(sample_rows):
|
||||
parsed_data = await self._parse_row_data(row, column_mapping, i + 1)
|
||||
if parsed_data.get("skip") or parsed_data.get("errors"):
|
||||
quality_issues += 1
|
||||
|
||||
estimated_error_rate = (quality_issues / sample_size) * 100 if sample_size > 0 else 0
|
||||
estimated_invalid = int(validation_result.total_records * estimated_error_rate / 100)
|
||||
|
||||
validation_result.valid_records = validation_result.total_records - estimated_invalid
|
||||
validation_result.invalid_records = estimated_invalid
|
||||
|
||||
# STRICT: Any data quality issues should fail validation for production
|
||||
if estimated_error_rate > 0:
|
||||
errors.append({
|
||||
"type": "data_quality_error",
|
||||
"message": f"Falló la validación de calidad: {estimated_error_rate:.0f}% de los datos tienen errores críticos",
|
||||
"field": "data",
|
||||
"row": None,
|
||||
"code": "DATA_QUALITY_FAILED"
|
||||
})
|
||||
|
||||
# Add specific error details
|
||||
if estimated_error_rate > 50:
|
||||
errors.append({
|
||||
"type": "data_quality_critical",
|
||||
"message": f"Calidad de datos crítica: más del 50% de los registros tienen errores",
|
||||
"field": "data",
|
||||
"row": None,
|
||||
"code": "DATA_QUALITY_CRITICAL"
|
||||
})
|
||||
elif estimated_error_rate > 20:
|
||||
errors.append({
|
||||
"type": "data_quality_high",
|
||||
"message": f"Alta tasa de errores detectada: {estimated_error_rate:.0f}% de los datos requieren corrección",
|
||||
"field": "data",
|
||||
"row": None,
|
||||
"code": "DATA_QUALITY_HIGH_ERROR_RATE"
|
||||
})
|
||||
else:
|
||||
# Even small error rates are now treated as validation failures
|
||||
errors.append({
|
||||
"type": "data_quality_detected",
|
||||
"message": f"Se detectaron errores de validación en {estimated_error_rate:.0f}% de los datos",
|
||||
"field": "data",
|
||||
"row": None,
|
||||
"code": "DATA_QUALITY_ERRORS_FOUND"
|
||||
})
|
||||
else:
|
||||
validation_result.valid_records = 0
|
||||
validation_result.invalid_records = validation_result.total_records
|
||||
|
||||
except Exception as csv_error:
|
||||
logger.warning("Enhanced CSV analysis failed", error=str(csv_error))
|
||||
warnings.append({
|
||||
"type": "analysis_warning",
|
||||
"message": f"No se pudo analizar completamente el CSV: {str(csv_error)}",
|
||||
"field": "data",
|
||||
"row": None,
|
||||
"code": "CSV_ANALYSIS_WARNING"
|
||||
})
|
||||
|
||||
# Set validation result
|
||||
validation_result.is_valid = len(errors) == 0
|
||||
validation_result.errors = errors
|
||||
validation_result.warnings = warnings
|
||||
|
||||
# Enhanced summary generation
|
||||
validation_result.summary = {
|
||||
"status": "valid" if validation_result.is_valid else "invalid",
|
||||
"file_format": format_type,
|
||||
"file_size_bytes": data_size,
|
||||
"file_size_mb": round(data_size / (1024 * 1024), 2),
|
||||
"estimated_processing_time_seconds": max(1, validation_result.total_records // 100),
|
||||
"validation_timestamp": datetime.utcnow().isoformat(),
|
||||
"detected_columns": list(self._detect_columns(list(csv.DictReader(io.StringIO(data_content)).fieldnames or [])).keys()) if format_type == "csv" and data_content else [],
|
||||
"suggestions": self._generate_suggestions(validation_result, format_type, len(warnings))
|
||||
}
|
||||
|
||||
logger.info("Enhanced import validation completed",
|
||||
is_valid=validation_result.is_valid,
|
||||
total_records=validation_result.total_records,
|
||||
error_count=len(errors),
|
||||
warning_count=len(warnings))
|
||||
|
||||
return validation_result
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Enhanced validation process failed", error=str(e))
|
||||
|
||||
return SalesValidationResult(
|
||||
is_valid=False,
|
||||
total_records=0,
|
||||
valid_records=0,
|
||||
invalid_records=0,
|
||||
errors=[{
|
||||
"type": "system_error",
|
||||
"message": f"Error en el proceso de validación: {str(e)}",
|
||||
"field": None,
|
||||
"row": None,
|
||||
"code": "SYSTEM_ERROR"
|
||||
}],
|
||||
warnings=[],
|
||||
summary={
|
||||
"status": "error",
|
||||
"file_format": data.get("data_format", "unknown"),
|
||||
"error_type": "system_error",
|
||||
"suggestions": [
|
||||
"Intenta de nuevo con un archivo diferente",
|
||||
"Contacta soporte si el problema persiste"
|
||||
]
|
||||
}
|
||||
)
|
||||
|
||||
async def process_import(
|
||||
self,
|
||||
tenant_id: str,
|
||||
content: str,
|
||||
file_format: str,
|
||||
filename: Optional[str] = None
|
||||
) -> SalesImportResult:
|
||||
"""Enhanced data import processing with better error handling"""
|
||||
start_time = datetime.utcnow()
|
||||
|
||||
try:
|
||||
logger.info("Starting enhanced data import",
|
||||
filename=filename,
|
||||
format=file_format,
|
||||
tenant_id=tenant_id)
|
||||
|
||||
async with get_db_transaction() as db:
|
||||
repository = SalesRepository(db)
|
||||
|
||||
# Process data based on format
|
||||
if file_format.lower() == 'csv':
|
||||
result = await self._process_csv_data(tenant_id, content, repository, filename)
|
||||
elif file_format.lower() == 'json':
|
||||
result = await self._process_json_data(tenant_id, content, repository, filename)
|
||||
elif file_format.lower() in ['excel', 'xlsx']:
|
||||
result = await self._process_excel_data(tenant_id, content, repository, filename)
|
||||
else:
|
||||
raise ValueError(f"Unsupported format: {file_format}")
|
||||
|
||||
# Calculate processing time
|
||||
end_time = datetime.utcnow()
|
||||
processing_time = (end_time - start_time).total_seconds()
|
||||
|
||||
# Build enhanced final result
|
||||
final_result = SalesImportResult(
|
||||
success=result.get("success", False),
|
||||
records_processed=result.get("total_rows", 0),
|
||||
records_created=result.get("records_created", 0),
|
||||
records_updated=0, # We don't update, only create
|
||||
records_failed=result.get("total_rows", 0) - result.get("records_created", 0),
|
||||
errors=self._structure_messages(result.get("errors", [])),
|
||||
warnings=self._structure_messages(result.get("warnings", [])),
|
||||
processing_time_seconds=processing_time
|
||||
)
|
||||
|
||||
logger.info("Enhanced data import completed successfully",
|
||||
records_created=final_result.records_created,
|
||||
processing_time=processing_time)
|
||||
|
||||
return final_result
|
||||
|
||||
except Exception as e:
|
||||
end_time = datetime.utcnow()
|
||||
processing_time = (end_time - start_time).total_seconds()
|
||||
|
||||
logger.error("Enhanced data import failed", error=str(e), tenant_id=tenant_id)
|
||||
|
||||
return SalesImportResult(
|
||||
success=False,
|
||||
records_processed=0,
|
||||
records_created=0,
|
||||
records_updated=0,
|
||||
records_failed=0,
|
||||
errors=[{
|
||||
"type": "import_error",
|
||||
"message": f"Import failed: {str(e)}",
|
||||
"field": None,
|
||||
"row": None,
|
||||
"code": "IMPORT_FAILURE"
|
||||
}],
|
||||
warnings=[],
|
||||
processing_time_seconds=processing_time
|
||||
)
|
||||
|
||||
async def _process_csv_data(
|
||||
self,
|
||||
tenant_id: str,
|
||||
csv_content: str,
|
||||
repository: SalesRepository,
|
||||
filename: Optional[str] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""Enhanced CSV processing with better data handling"""
|
||||
try:
|
||||
reader = csv.DictReader(io.StringIO(csv_content))
|
||||
rows = list(reader)
|
||||
|
||||
if not rows:
|
||||
return {
|
||||
"success": False,
|
||||
"total_rows": 0,
|
||||
"records_created": 0,
|
||||
"errors": ["CSV file is empty"],
|
||||
"warnings": []
|
||||
}
|
||||
|
||||
# Enhanced column mapping
|
||||
column_mapping = self._detect_columns(list(rows[0].keys()))
|
||||
|
||||
records_created = 0
|
||||
errors = []
|
||||
warnings = []
|
||||
|
||||
logger.info(f"Processing {len(rows)} records from CSV with enhanced mapping")
|
||||
|
||||
for index, row in enumerate(rows):
|
||||
try:
|
||||
# Enhanced data parsing and validation
|
||||
parsed_data = await self._parse_row_data(row, column_mapping, index + 1)
|
||||
if parsed_data.get("skip"):
|
||||
errors.extend(parsed_data.get("errors", []))
|
||||
warnings.extend(parsed_data.get("warnings", []))
|
||||
continue
|
||||
|
||||
# Create sales record with enhanced data
|
||||
sales_data = SalesDataCreate(
|
||||
tenant_id=tenant_id,
|
||||
date=parsed_data["date"],
|
||||
product_name=parsed_data["product_name"],
|
||||
product_category=parsed_data.get("product_category"),
|
||||
quantity_sold=parsed_data["quantity_sold"],
|
||||
unit_price=parsed_data.get("unit_price"),
|
||||
revenue=parsed_data.get("revenue"),
|
||||
location_id=parsed_data.get("location_id"),
|
||||
source="csv"
|
||||
)
|
||||
|
||||
created_record = await repository.create_sales_record(sales_data, tenant_id)
|
||||
records_created += 1
|
||||
|
||||
# Enhanced progress logging
|
||||
if records_created % 100 == 0:
|
||||
logger.info(f"Enhanced processing: {records_created}/{len(rows)} records completed...")
|
||||
|
||||
except Exception as e:
|
||||
error_msg = f"Row {index + 1}: {str(e)}"
|
||||
errors.append(error_msg)
|
||||
logger.warning("Enhanced record processing failed", error=error_msg)
|
||||
|
||||
success_rate = (records_created / len(rows)) * 100 if rows else 0
|
||||
|
||||
return {
|
||||
"success": records_created > 0,
|
||||
"total_rows": len(rows),
|
||||
"records_created": records_created,
|
||||
"success_rate": success_rate,
|
||||
"errors": errors,
|
||||
"warnings": warnings
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Enhanced CSV processing failed", error=str(e))
|
||||
raise
|
||||
|
||||
async def _process_json_data(
|
||||
self,
|
||||
tenant_id: str,
|
||||
json_content: str,
|
||||
repository: SalesRepository,
|
||||
filename: Optional[str] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""Enhanced JSON processing with pandas integration"""
|
||||
try:
|
||||
# Parse JSON with base64 support
|
||||
if json_content.startswith('data:'):
|
||||
json_content = base64.b64decode(json_content.split(',')[1]).decode('utf-8')
|
||||
|
||||
data = json.loads(json_content)
|
||||
|
||||
# Handle different JSON structures
|
||||
if isinstance(data, dict):
|
||||
if 'data' in data:
|
||||
records = data['data']
|
||||
elif 'records' in data:
|
||||
records = data['records']
|
||||
elif 'sales' in data:
|
||||
records = data['sales']
|
||||
else:
|
||||
records = [data] # Single record
|
||||
elif isinstance(data, list):
|
||||
records = data
|
||||
else:
|
||||
raise ValueError("Invalid JSON format")
|
||||
|
||||
# Convert to DataFrame for enhanced processing
|
||||
if records:
|
||||
df = pd.DataFrame(records)
|
||||
df.columns = df.columns.str.strip().str.lower()
|
||||
|
||||
return await self._process_dataframe(tenant_id, df, repository, "json", filename)
|
||||
else:
|
||||
return {
|
||||
"success": False,
|
||||
"total_rows": 0,
|
||||
"records_created": 0,
|
||||
"errors": ["No records found in JSON"],
|
||||
"warnings": []
|
||||
}
|
||||
|
||||
except json.JSONDecodeError as e:
|
||||
raise ValueError(f"Invalid JSON: {str(e)}")
|
||||
except Exception as e:
|
||||
logger.error("Enhanced JSON processing failed", error=str(e))
|
||||
raise
|
||||
|
||||
async def _process_excel_data(
|
||||
self,
|
||||
tenant_id: str,
|
||||
excel_content: str,
|
||||
repository: SalesRepository,
|
||||
filename: Optional[str] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""Enhanced Excel processing with base64 support"""
|
||||
try:
|
||||
# Decode base64 content
|
||||
if excel_content.startswith('data:'):
|
||||
excel_bytes = base64.b64decode(excel_content.split(',')[1])
|
||||
else:
|
||||
excel_bytes = base64.b64decode(excel_content)
|
||||
|
||||
# Read Excel file with pandas
|
||||
df = pd.read_excel(io.BytesIO(excel_bytes), sheet_name=0)
|
||||
|
||||
# Enhanced column cleaning
|
||||
df.columns = df.columns.str.strip().str.lower()
|
||||
|
||||
# Remove empty rows
|
||||
df = df.dropna(how='all')
|
||||
|
||||
return await self._process_dataframe(tenant_id, df, repository, "excel", filename)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Enhanced Excel processing failed", error=str(e))
|
||||
raise
|
||||
|
||||
async def _process_dataframe(
|
||||
self,
|
||||
tenant_id: str,
|
||||
df: pd.DataFrame,
|
||||
repository: SalesRepository,
|
||||
source: str,
|
||||
filename: Optional[str] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""Enhanced DataFrame processing with better error handling"""
|
||||
try:
|
||||
# Enhanced column mapping
|
||||
column_mapping = self._detect_columns(df.columns.tolist())
|
||||
|
||||
if not column_mapping.get('date') or not column_mapping.get('product'):
|
||||
required_missing = []
|
||||
if not column_mapping.get('date'):
|
||||
required_missing.append("date")
|
||||
if not column_mapping.get('product'):
|
||||
required_missing.append("product")
|
||||
|
||||
raise ValueError(f"Required columns missing: {', '.join(required_missing)}")
|
||||
|
||||
records_created = 0
|
||||
errors = []
|
||||
warnings = []
|
||||
|
||||
logger.info(f"Enhanced processing of {len(df)} records from {source}")
|
||||
|
||||
for index, row in df.iterrows():
|
||||
try:
|
||||
# Convert pandas row to dict
|
||||
row_dict = {}
|
||||
for col in df.columns:
|
||||
val = row[col]
|
||||
# Handle pandas NaN values
|
||||
if pd.isna(val):
|
||||
row_dict[col] = None
|
||||
else:
|
||||
row_dict[col] = val
|
||||
|
||||
# Enhanced data parsing
|
||||
parsed_data = await self._parse_row_data(row_dict, column_mapping, index + 1)
|
||||
if parsed_data.get("skip"):
|
||||
errors.extend(parsed_data.get("errors", []))
|
||||
warnings.extend(parsed_data.get("warnings", []))
|
||||
continue
|
||||
|
||||
# Create enhanced sales record
|
||||
sales_data = SalesDataCreate(
|
||||
tenant_id=tenant_id,
|
||||
date=parsed_data["date"],
|
||||
product_name=parsed_data["product_name"],
|
||||
product_category=parsed_data.get("product_category"),
|
||||
quantity_sold=parsed_data["quantity_sold"],
|
||||
unit_price=parsed_data.get("unit_price"),
|
||||
revenue=parsed_data.get("revenue"),
|
||||
location_id=parsed_data.get("location_id"),
|
||||
source=source
|
||||
)
|
||||
|
||||
created_record = await repository.create_sales_record(sales_data, tenant_id)
|
||||
records_created += 1
|
||||
|
||||
# Progress logging for large datasets
|
||||
if records_created % 100 == 0:
|
||||
logger.info(f"Enhanced DataFrame processing: {records_created}/{len(df)} records completed...")
|
||||
|
||||
except Exception as e:
|
||||
error_msg = f"Row {index + 1}: {str(e)}"
|
||||
errors.append(error_msg)
|
||||
logger.warning("Enhanced record processing failed", error=error_msg)
|
||||
|
||||
success_rate = (records_created / len(df)) * 100 if len(df) > 0 else 0
|
||||
|
||||
return {
|
||||
"success": records_created > 0,
|
||||
"total_rows": len(df),
|
||||
"records_created": records_created,
|
||||
"success_rate": success_rate,
|
||||
"errors": errors[:10], # Limit errors for performance
|
||||
"warnings": warnings[:10] # Limit warnings
|
||||
}
|
||||
|
||||
except ValueError:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Enhanced DataFrame processing failed", error=str(e))
|
||||
raise
|
||||
|
||||
async def _parse_row_data(
|
||||
self,
|
||||
row: Dict[str, Any],
|
||||
column_mapping: Dict[str, str],
|
||||
row_number: int
|
||||
) -> Dict[str, Any]:
|
||||
"""Enhanced row data parsing with better validation"""
|
||||
errors = []
|
||||
warnings = []
|
||||
|
||||
try:
|
||||
# Enhanced date extraction and validation
|
||||
date_str = str(row.get(column_mapping.get('date', ''), '')).strip()
|
||||
if not date_str or date_str.lower() in ['nan', 'null', 'none', '']:
|
||||
errors.append(f"Row {row_number}: Missing date")
|
||||
return {"skip": True, "errors": errors, "warnings": warnings}
|
||||
|
||||
parsed_date = self._parse_date(date_str)
|
||||
if not parsed_date:
|
||||
errors.append(f"Row {row_number}: Invalid date format: {date_str}")
|
||||
return {"skip": True, "errors": errors, "warnings": warnings}
|
||||
|
||||
# Enhanced product name extraction and cleaning
|
||||
product_name = str(row.get(column_mapping.get('product', ''), '')).strip()
|
||||
if not product_name or product_name.lower() in ['nan', 'null', 'none', '']:
|
||||
errors.append(f"Row {row_number}: Missing product name")
|
||||
return {"skip": True, "errors": errors, "warnings": warnings}
|
||||
|
||||
product_name = self._clean_product_name(product_name)
|
||||
|
||||
# STRICT quantity validation for production data quality
|
||||
quantity_raw = row.get(column_mapping.get('quantity', 'quantity'), 1)
|
||||
try:
|
||||
if pd.isna(quantity_raw):
|
||||
# Allow default quantity of 1 for missing values
|
||||
quantity = 1
|
||||
else:
|
||||
quantity = int(float(str(quantity_raw).replace(',', '.')))
|
||||
if quantity <= 0:
|
||||
# STRICT: Treat invalid quantities as ERRORS, not warnings
|
||||
errors.append(f"Row {row_number}: Invalid quantity ({quantity}) - quantities must be positive")
|
||||
return {"skip": True, "errors": errors, "warnings": warnings}
|
||||
elif self.STRICT_VALIDATION and quantity > self.MAX_QUANTITY_PER_DAY:
|
||||
# STRICT: Check for unrealistic quantities
|
||||
errors.append(f"Row {row_number}: Unrealistic quantity ({quantity}) - exceeds maximum expected daily sales ({self.MAX_QUANTITY_PER_DAY})")
|
||||
return {"skip": True, "errors": errors, "warnings": warnings}
|
||||
except (ValueError, TypeError):
|
||||
# STRICT: Treat non-numeric quantities as ERRORS
|
||||
errors.append(f"Row {row_number}: Invalid quantity format ({quantity_raw}) - must be a positive number")
|
||||
return {"skip": True, "errors": errors, "warnings": warnings}
|
||||
|
||||
# Enhanced revenue extraction
|
||||
revenue = None
|
||||
unit_price = None
|
||||
if 'revenue' in column_mapping and column_mapping['revenue'] in row:
|
||||
revenue_raw = row.get(column_mapping['revenue'])
|
||||
if revenue_raw and not pd.isna(revenue_raw) and str(revenue_raw).lower() not in ['nan', 'null', 'none', '']:
|
||||
try:
|
||||
revenue = float(str(revenue_raw).replace(',', '.').replace('€', '').replace('$', '').strip())
|
||||
if revenue < 0:
|
||||
# STRICT: Treat negative revenue as ERROR, not warning
|
||||
errors.append(f"Row {row_number}: Negative revenue ({revenue}) - revenue must be positive or zero")
|
||||
return {"skip": True, "errors": errors, "warnings": warnings}
|
||||
else:
|
||||
# STRICT: Check for unrealistic revenue values
|
||||
if self.STRICT_VALIDATION and revenue > self.MAX_REVENUE_PER_ITEM:
|
||||
errors.append(f"Row {row_number}: Unrealistic revenue ({revenue}) - exceeds maximum expected value ({self.MAX_REVENUE_PER_ITEM})")
|
||||
return {"skip": True, "errors": errors, "warnings": warnings}
|
||||
|
||||
# Calculate unit price if we have both revenue and quantity
|
||||
unit_price = revenue / quantity if quantity > 0 else None
|
||||
|
||||
# STRICT: Validate unit price reasonableness
|
||||
if unit_price and unit_price > 10000: # More than €10,000 per unit seems unrealistic for bakery
|
||||
errors.append(f"Row {row_number}: Unrealistic unit price ({unit_price:.2f}) - check quantity and revenue values")
|
||||
return {"skip": True, "errors": errors, "warnings": warnings}
|
||||
except (ValueError, TypeError):
|
||||
# STRICT: Treat invalid revenue format as ERROR
|
||||
errors.append(f"Row {row_number}: Invalid revenue format ({revenue_raw}) - must be a valid number")
|
||||
return {"skip": True, "errors": errors, "warnings": warnings}
|
||||
|
||||
# Enhanced location extraction
|
||||
location_id = None
|
||||
if 'location' in column_mapping and column_mapping['location'] in row:
|
||||
location_raw = row.get(column_mapping['location'])
|
||||
if location_raw and not pd.isna(location_raw) and str(location_raw).lower() not in ['nan', 'null', 'none', '']:
|
||||
location_id = str(location_raw).strip()
|
||||
|
||||
# Enhanced product category extraction
|
||||
product_category = None
|
||||
if 'category' in column_mapping and column_mapping['category'] in row:
|
||||
category_raw = row.get(column_mapping['category'])
|
||||
if category_raw and not pd.isna(category_raw) and str(category_raw).lower() not in ['nan', 'null', 'none', '']:
|
||||
product_category = str(category_raw).strip()
|
||||
|
||||
return {
|
||||
"skip": False,
|
||||
"date": parsed_date,
|
||||
"product_name": product_name,
|
||||
"product_category": product_category,
|
||||
"quantity_sold": quantity,
|
||||
"unit_price": unit_price,
|
||||
"revenue": revenue,
|
||||
"location_id": location_id,
|
||||
"errors": errors,
|
||||
"warnings": warnings
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
errors.append(f"Row {row_number}: Enhanced parsing error: {str(e)}")
|
||||
return {"skip": True, "errors": errors, "warnings": warnings}
|
||||
|
||||
def _detect_columns(self, columns: List[str]) -> Dict[str, str]:
|
||||
"""Enhanced column detection with fuzzy matching"""
|
||||
mapping = {}
|
||||
columns_lower = [col.lower().strip() for col in columns]
|
||||
|
||||
for standard_name, possible_names in self.COLUMN_MAPPINGS.items():
|
||||
best_match = None
|
||||
best_score = 0
|
||||
|
||||
for col_idx, col in enumerate(columns_lower):
|
||||
for possible in possible_names:
|
||||
# Exact match (highest priority)
|
||||
if possible == col:
|
||||
best_match = columns[col_idx]
|
||||
best_score = 100
|
||||
break
|
||||
# Contains match
|
||||
elif possible in col or col in possible:
|
||||
score = len(possible) / len(col) * 90
|
||||
if score > best_score:
|
||||
best_match = columns[col_idx]
|
||||
best_score = score
|
||||
|
||||
if best_score == 100: # Found exact match
|
||||
break
|
||||
|
||||
if best_match and best_score > 70: # Threshold for matches
|
||||
mapping[standard_name] = best_match
|
||||
|
||||
# Enhanced alias mapping
|
||||
if 'product' not in mapping and 'product_name' in mapping:
|
||||
mapping['product'] = mapping['product_name']
|
||||
if 'quantity' not in mapping and 'quantity_sold' in mapping:
|
||||
mapping['quantity'] = mapping['quantity_sold']
|
||||
if 'location' not in mapping and 'location_id' in mapping:
|
||||
mapping['location'] = mapping['location_id']
|
||||
|
||||
return mapping
|
||||
|
||||
def _parse_date(self, date_str: str) -> Optional[datetime]:
|
||||
"""Enhanced date parsing with pandas and multiple format support"""
|
||||
if not date_str or str(date_str).lower() in ['nan', 'null', 'none']:
|
||||
return None
|
||||
|
||||
date_str = str(date_str).strip()
|
||||
|
||||
# Try pandas first (most robust)
|
||||
try:
|
||||
parsed_dt = pd.to_datetime(date_str, dayfirst=True)
|
||||
if hasattr(parsed_dt, 'to_pydatetime'):
|
||||
parsed_dt = parsed_dt.to_pydatetime()
|
||||
|
||||
if parsed_dt.tzinfo is None:
|
||||
parsed_dt = parsed_dt.replace(tzinfo=timezone.utc)
|
||||
|
||||
return parsed_dt
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Try specific formats as fallback
|
||||
for fmt in self.DATE_FORMATS:
|
||||
try:
|
||||
parsed_dt = datetime.strptime(date_str, fmt)
|
||||
if parsed_dt.tzinfo is None:
|
||||
parsed_dt = parsed_dt.replace(tzinfo=timezone.utc)
|
||||
return parsed_dt
|
||||
except ValueError:
|
||||
continue
|
||||
|
||||
logger.warning(f"Could not parse date: {date_str}")
|
||||
return None
|
||||
|
||||
def _clean_product_name(self, product_name: str) -> str:
|
||||
"""Enhanced product name cleaning and standardization"""
|
||||
if not product_name:
|
||||
return "Producto sin nombre"
|
||||
|
||||
# Remove extra whitespace
|
||||
cleaned = re.sub(r'\s+', ' ', str(product_name).strip())
|
||||
|
||||
# Remove special characters but keep Spanish characters
|
||||
cleaned = re.sub(r'[^\w\s\-áéíóúñçüÁÉÍÓÚÑÇÜ]', '', cleaned)
|
||||
|
||||
# Capitalize first letter of each word
|
||||
cleaned = cleaned.title()
|
||||
|
||||
# Enhanced corrections for Spanish bakeries
|
||||
replacements = {
|
||||
'Pan De': 'Pan de',
|
||||
'Café Con': 'Café con',
|
||||
'Te ': 'Té ',
|
||||
'Bocadillo De': 'Bocadillo de',
|
||||
'Dulce De': 'Dulce de',
|
||||
'Tarta De': 'Tarta de',
|
||||
}
|
||||
|
||||
for old, new in replacements.items():
|
||||
cleaned = cleaned.replace(old, new)
|
||||
|
||||
return cleaned if cleaned else "Producto sin nombre"
|
||||
|
||||
def _structure_messages(self, messages: List[Union[str, Dict]]) -> List[Dict[str, Any]]:
|
||||
"""Convert string messages to structured format"""
|
||||
structured = []
|
||||
for msg in messages:
|
||||
if isinstance(msg, str):
|
||||
structured.append({
|
||||
"type": "general_message",
|
||||
"message": msg,
|
||||
"field": None,
|
||||
"row": None,
|
||||
"code": "GENERAL_MESSAGE"
|
||||
})
|
||||
else:
|
||||
structured.append(msg)
|
||||
return structured
|
||||
|
||||
def _generate_suggestions(
|
||||
self,
|
||||
validation_result: SalesValidationResult,
|
||||
format_type: str,
|
||||
warning_count: int
|
||||
) -> List[str]:
|
||||
"""Generate enhanced contextual suggestions"""
|
||||
suggestions = []
|
||||
|
||||
if validation_result.is_valid:
|
||||
suggestions.append("El archivo está listo para procesamiento")
|
||||
suggestions.append(f"Se procesarán aproximadamente {validation_result.total_records} registros")
|
||||
|
||||
if validation_result.total_records > 1000:
|
||||
suggestions.append("Archivo grande: el procesamiento puede tomar varios minutos")
|
||||
suggestions.append("Considera dividir archivos muy grandes para mejor rendimiento")
|
||||
|
||||
if warning_count > 0:
|
||||
suggestions.append("Revisa las advertencias antes de continuar")
|
||||
suggestions.append("Los datos con advertencias se procesarán con valores por defecto")
|
||||
|
||||
# Format-specific suggestions
|
||||
if format_type == "csv":
|
||||
suggestions.append("Asegúrate de que las fechas estén en formato DD/MM/YYYY")
|
||||
suggestions.append("Verifica que los números usen punto decimal (no coma)")
|
||||
elif format_type in ["excel", "xlsx"]:
|
||||
suggestions.append("Solo se procesará la primera hoja del archivo Excel")
|
||||
suggestions.append("Evita celdas combinadas y fórmulas complejas")
|
||||
else:
|
||||
suggestions.append("Corrige los errores antes de continuar")
|
||||
suggestions.append("Verifica que el archivo tenga el formato correcto")
|
||||
|
||||
if format_type not in ["csv", "excel", "xlsx", "json"]:
|
||||
suggestions.append("Usa formato CSV o Excel para mejores resultados")
|
||||
suggestions.append("El formato JSON es para usuarios avanzados")
|
||||
|
||||
if validation_result.total_records == 0:
|
||||
suggestions.append("Asegúrate de que el archivo contenga datos")
|
||||
suggestions.append("Verifica que el archivo no esté corrupto")
|
||||
|
||||
# Missing column suggestions
|
||||
error_codes = [error.get("code", "") for error in validation_result.errors if isinstance(error, dict)]
|
||||
if "MISSING_DATE_COLUMN" in error_codes:
|
||||
suggestions.append("Incluye una columna de fecha (fecha, date, dia)")
|
||||
if "MISSING_PRODUCT_COLUMN" in error_codes:
|
||||
suggestions.append("Incluye una columna de producto (producto, product, item)")
|
||||
|
||||
return suggestions
|
||||
|
||||
|
||||
# Main DataImportService class with enhanced functionality
|
||||
232
services/sales/app/services/messaging.py
Normal file
232
services/sales/app/services/messaging.py
Normal file
@@ -0,0 +1,232 @@
|
||||
# services/sales/app/services/messaging.py
|
||||
"""
|
||||
Sales Service Messaging - Event Publishing using shared messaging infrastructure
|
||||
"""
|
||||
|
||||
import structlog
|
||||
from typing import Dict, Any, Optional
|
||||
from uuid import UUID
|
||||
from datetime import datetime
|
||||
|
||||
from shared.messaging.rabbitmq import RabbitMQClient
|
||||
from shared.messaging.events import BaseEvent, DataImportedEvent
|
||||
from app.core.config import settings
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class SalesEventPublisher:
|
||||
"""Sales service event publisher using RabbitMQ"""
|
||||
|
||||
def __init__(self):
|
||||
self.enabled = True
|
||||
self._rabbitmq_client = None
|
||||
|
||||
async def _get_rabbitmq_client(self):
|
||||
"""Get or create RabbitMQ client"""
|
||||
if not self._rabbitmq_client:
|
||||
self._rabbitmq_client = RabbitMQClient(
|
||||
connection_url=settings.RABBITMQ_URL,
|
||||
service_name="sales-service"
|
||||
)
|
||||
await self._rabbitmq_client.connect()
|
||||
return self._rabbitmq_client
|
||||
|
||||
async def publish_sales_created(self, sales_data: Dict[str, Any], correlation_id: Optional[str] = None) -> bool:
|
||||
"""Publish sales created event"""
|
||||
try:
|
||||
if not self.enabled:
|
||||
return True
|
||||
|
||||
# Create event
|
||||
event = BaseEvent(
|
||||
service_name="sales-service",
|
||||
data={
|
||||
"record_id": str(sales_data.get("id")),
|
||||
"tenant_id": str(sales_data.get("tenant_id")),
|
||||
"product_name": sales_data.get("product_name"),
|
||||
"revenue": float(sales_data.get("revenue", 0)),
|
||||
"quantity_sold": sales_data.get("quantity_sold", 0),
|
||||
"timestamp": datetime.now().isoformat()
|
||||
},
|
||||
event_type="sales.created",
|
||||
correlation_id=correlation_id
|
||||
)
|
||||
|
||||
# Publish via RabbitMQ
|
||||
client = await self._get_rabbitmq_client()
|
||||
success = await client.publish_event(
|
||||
exchange_name="sales.events",
|
||||
routing_key="sales.created",
|
||||
event_data=event.to_dict()
|
||||
)
|
||||
|
||||
if success:
|
||||
logger.info("Sales record created event published",
|
||||
record_id=sales_data.get("id"),
|
||||
tenant_id=sales_data.get("tenant_id"),
|
||||
product=sales_data.get("product_name"))
|
||||
|
||||
return success
|
||||
|
||||
except Exception as e:
|
||||
logger.warning("Failed to publish sales created event", error=str(e))
|
||||
return False
|
||||
|
||||
async def publish_sales_updated(self, sales_data: Dict[str, Any], correlation_id: Optional[str] = None) -> bool:
|
||||
"""Publish sales updated event"""
|
||||
try:
|
||||
if not self.enabled:
|
||||
return True
|
||||
|
||||
event = BaseEvent(
|
||||
service_name="sales-service",
|
||||
data={
|
||||
"record_id": str(sales_data.get("id")),
|
||||
"tenant_id": str(sales_data.get("tenant_id")),
|
||||
"product_name": sales_data.get("product_name"),
|
||||
"timestamp": datetime.now().isoformat()
|
||||
},
|
||||
event_type="sales.updated",
|
||||
correlation_id=correlation_id
|
||||
)
|
||||
|
||||
client = await self._get_rabbitmq_client()
|
||||
success = await client.publish_event(
|
||||
exchange_name="sales.events",
|
||||
routing_key="sales.updated",
|
||||
event_data=event.to_dict()
|
||||
)
|
||||
|
||||
if success:
|
||||
logger.info("Sales record updated event published",
|
||||
record_id=sales_data.get("id"),
|
||||
tenant_id=sales_data.get("tenant_id"))
|
||||
|
||||
return success
|
||||
|
||||
except Exception as e:
|
||||
logger.warning("Failed to publish sales updated event", error=str(e))
|
||||
return False
|
||||
|
||||
async def publish_sales_deleted(self, record_id: UUID, tenant_id: UUID, correlation_id: Optional[str] = None) -> bool:
|
||||
"""Publish sales deleted event"""
|
||||
try:
|
||||
if not self.enabled:
|
||||
return True
|
||||
|
||||
event = BaseEvent(
|
||||
service_name="sales-service",
|
||||
data={
|
||||
"record_id": str(record_id),
|
||||
"tenant_id": str(tenant_id),
|
||||
"timestamp": datetime.now().isoformat()
|
||||
},
|
||||
event_type="sales.deleted",
|
||||
correlation_id=correlation_id
|
||||
)
|
||||
|
||||
client = await self._get_rabbitmq_client()
|
||||
success = await client.publish_event(
|
||||
exchange_name="sales.events",
|
||||
routing_key="sales.deleted",
|
||||
event_data=event.to_dict()
|
||||
)
|
||||
|
||||
if success:
|
||||
logger.info("Sales record deleted event published",
|
||||
record_id=record_id,
|
||||
tenant_id=tenant_id)
|
||||
|
||||
return success
|
||||
|
||||
except Exception as e:
|
||||
logger.warning("Failed to publish sales deleted event", error=str(e))
|
||||
return False
|
||||
|
||||
async def publish_data_imported(self, import_result: Dict[str, Any], correlation_id: Optional[str] = None) -> bool:
|
||||
"""Publish data imported event"""
|
||||
try:
|
||||
if not self.enabled:
|
||||
return True
|
||||
|
||||
event = DataImportedEvent(
|
||||
service_name="sales-service",
|
||||
data={
|
||||
"records_created": import_result.get("records_created", 0),
|
||||
"records_updated": import_result.get("records_updated", 0),
|
||||
"records_failed": import_result.get("records_failed", 0),
|
||||
"tenant_id": str(import_result.get("tenant_id")),
|
||||
"success": import_result.get("success", False),
|
||||
"file_name": import_result.get("file_name"),
|
||||
"timestamp": datetime.now().isoformat()
|
||||
},
|
||||
correlation_id=correlation_id
|
||||
)
|
||||
|
||||
client = await self._get_rabbitmq_client()
|
||||
success = await client.publish_event(
|
||||
exchange_name="data.events",
|
||||
routing_key="data.imported",
|
||||
event_data=event.to_dict()
|
||||
)
|
||||
|
||||
if success:
|
||||
logger.info("Sales data imported event published",
|
||||
records_created=import_result.get("records_created"),
|
||||
tenant_id=import_result.get("tenant_id"),
|
||||
success=import_result.get("success"))
|
||||
|
||||
return success
|
||||
|
||||
except Exception as e:
|
||||
logger.warning("Failed to publish data imported event", error=str(e))
|
||||
return False
|
||||
|
||||
async def publish_analytics_generated(self, analytics_data: Dict[str, Any], correlation_id: Optional[str] = None) -> bool:
|
||||
"""Publish analytics generated event"""
|
||||
try:
|
||||
if not self.enabled:
|
||||
return True
|
||||
|
||||
event = BaseEvent(
|
||||
service_name="sales-service",
|
||||
data={
|
||||
"tenant_id": str(analytics_data.get("tenant_id")),
|
||||
"total_revenue": float(analytics_data.get("total_revenue", 0)),
|
||||
"total_quantity": analytics_data.get("total_quantity", 0),
|
||||
"total_transactions": analytics_data.get("total_transactions", 0),
|
||||
"period_start": analytics_data.get("period_start"),
|
||||
"period_end": analytics_data.get("period_end"),
|
||||
"timestamp": datetime.now().isoformat()
|
||||
},
|
||||
event_type="analytics.generated",
|
||||
correlation_id=correlation_id
|
||||
)
|
||||
|
||||
client = await self._get_rabbitmq_client()
|
||||
success = await client.publish_event(
|
||||
exchange_name="analytics.events",
|
||||
routing_key="analytics.generated",
|
||||
event_data=event.to_dict()
|
||||
)
|
||||
|
||||
if success:
|
||||
logger.info("Sales analytics generated event published",
|
||||
tenant_id=analytics_data.get("tenant_id"),
|
||||
total_revenue=analytics_data.get("total_revenue"))
|
||||
|
||||
return success
|
||||
|
||||
except Exception as e:
|
||||
logger.warning("Failed to publish analytics generated event", error=str(e))
|
||||
return False
|
||||
|
||||
async def cleanup(self):
|
||||
"""Cleanup RabbitMQ connections"""
|
||||
if self._rabbitmq_client:
|
||||
await self._rabbitmq_client.disconnect()
|
||||
|
||||
|
||||
# Global instance
|
||||
sales_publisher = SalesEventPublisher()
|
||||
171
services/sales/app/services/product_service.py
Normal file
171
services/sales/app/services/product_service.py
Normal file
@@ -0,0 +1,171 @@
|
||||
# services/sales/app/services/product_service.py
|
||||
"""
|
||||
Product Service - Business Logic Layer
|
||||
"""
|
||||
|
||||
from typing import List, Optional, Dict, Any
|
||||
from uuid import UUID
|
||||
from datetime import datetime
|
||||
import structlog
|
||||
|
||||
from app.models.sales import Product
|
||||
from app.repositories.product_repository import ProductRepository
|
||||
from app.schemas.sales import ProductCreate, ProductUpdate
|
||||
from app.core.database import get_db_transaction
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class ProductService:
|
||||
"""Service layer for product operations"""
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
async def create_product(
|
||||
self,
|
||||
product_data: ProductCreate,
|
||||
tenant_id: UUID,
|
||||
user_id: Optional[UUID] = None
|
||||
) -> Product:
|
||||
"""Create a new product with business validation"""
|
||||
try:
|
||||
# Business validation
|
||||
await self._validate_product_data(product_data, tenant_id)
|
||||
|
||||
async with get_db_transaction() as db:
|
||||
repository = ProductRepository(db)
|
||||
product = await repository.create_product(product_data, tenant_id)
|
||||
|
||||
logger.info("Created product", product_id=product.id, tenant_id=tenant_id)
|
||||
return product
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to create product", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def update_product(
|
||||
self,
|
||||
product_id: UUID,
|
||||
update_data: ProductUpdate,
|
||||
tenant_id: UUID
|
||||
) -> Product:
|
||||
"""Update a product"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
repository = ProductRepository(db)
|
||||
|
||||
# Verify product belongs to tenant
|
||||
existing_product = await repository.get_by_id(product_id)
|
||||
if not existing_product or existing_product.tenant_id != tenant_id:
|
||||
raise ValueError(f"Product {product_id} not found for tenant {tenant_id}")
|
||||
|
||||
# Update the product
|
||||
updated_product = await repository.update(product_id, update_data.model_dump(exclude_unset=True))
|
||||
|
||||
logger.info("Updated product", product_id=product_id, tenant_id=tenant_id)
|
||||
return updated_product
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to update product", error=str(e), product_id=product_id, tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def get_products(self, tenant_id: UUID) -> List[Product]:
|
||||
"""Get all products for a tenant"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
repository = ProductRepository(db)
|
||||
products = await repository.get_by_tenant(tenant_id)
|
||||
|
||||
logger.info("Retrieved products", count=len(products), tenant_id=tenant_id)
|
||||
return products
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get products", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def get_product(self, product_id: UUID, tenant_id: UUID) -> Optional[Product]:
|
||||
"""Get a specific product"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
repository = ProductRepository(db)
|
||||
product = await repository.get_by_id(product_id)
|
||||
|
||||
# Verify product belongs to tenant
|
||||
if product and product.tenant_id != tenant_id:
|
||||
return None
|
||||
|
||||
return product
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get product", error=str(e), product_id=product_id, tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def delete_product(self, product_id: UUID, tenant_id: UUID) -> bool:
|
||||
"""Delete a product"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
repository = ProductRepository(db)
|
||||
|
||||
# Verify product belongs to tenant
|
||||
existing_product = await repository.get_by_id(product_id)
|
||||
if not existing_product or existing_product.tenant_id != tenant_id:
|
||||
raise ValueError(f"Product {product_id} not found for tenant {tenant_id}")
|
||||
|
||||
success = await repository.delete(product_id)
|
||||
|
||||
if success:
|
||||
logger.info("Deleted product", product_id=product_id, tenant_id=tenant_id)
|
||||
|
||||
return success
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to delete product", error=str(e), product_id=product_id, tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def get_products_by_category(self, tenant_id: UUID, category: str) -> List[Product]:
|
||||
"""Get products by category"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
repository = ProductRepository(db)
|
||||
products = await repository.get_by_category(tenant_id, category)
|
||||
|
||||
logger.info("Retrieved products by category", count=len(products), category=category, tenant_id=tenant_id)
|
||||
return products
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get products by category", error=str(e), category=category, tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def search_products(self, tenant_id: UUID, search_term: str) -> List[Product]:
|
||||
"""Search products by name or SKU"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
repository = ProductRepository(db)
|
||||
products = await repository.search_products(tenant_id, search_term)
|
||||
|
||||
logger.info("Searched products", count=len(products), search_term=search_term, tenant_id=tenant_id)
|
||||
return products
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to search products", error=str(e), search_term=search_term, tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def _validate_product_data(self, product_data: ProductCreate, tenant_id: UUID):
|
||||
"""Validate product data according to business rules"""
|
||||
# Check if product with same SKU already exists
|
||||
if product_data.sku:
|
||||
async with get_db_transaction() as db:
|
||||
repository = ProductRepository(db)
|
||||
existing_product = await repository.get_by_sku(tenant_id, product_data.sku)
|
||||
if existing_product:
|
||||
raise ValueError(f"Product with SKU {product_data.sku} already exists for tenant {tenant_id}")
|
||||
|
||||
# Validate seasonal dates
|
||||
if product_data.is_seasonal:
|
||||
if not product_data.seasonal_start or not product_data.seasonal_end:
|
||||
raise ValueError("Seasonal products must have start and end dates")
|
||||
if product_data.seasonal_start >= product_data.seasonal_end:
|
||||
raise ValueError("Seasonal start date must be before end date")
|
||||
|
||||
logger.info("Product data validation passed", tenant_id=tenant_id)
|
||||
282
services/sales/app/services/sales_service.py
Normal file
282
services/sales/app/services/sales_service.py
Normal file
@@ -0,0 +1,282 @@
|
||||
# services/sales/app/services/sales_service.py
|
||||
"""
|
||||
Sales Service - Business Logic Layer
|
||||
"""
|
||||
|
||||
from typing import List, Optional, Dict, Any
|
||||
from uuid import UUID
|
||||
from datetime import datetime
|
||||
import structlog
|
||||
|
||||
from app.models.sales import SalesData
|
||||
from app.repositories.sales_repository import SalesRepository
|
||||
from app.schemas.sales import SalesDataCreate, SalesDataUpdate, SalesDataQuery, SalesAnalytics
|
||||
from app.core.database import get_db_transaction
|
||||
from shared.database.exceptions import DatabaseError
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class SalesService:
|
||||
"""Service layer for sales operations"""
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
async def create_sales_record(
|
||||
self,
|
||||
sales_data: SalesDataCreate,
|
||||
tenant_id: UUID,
|
||||
user_id: Optional[UUID] = None
|
||||
) -> SalesData:
|
||||
"""Create a new sales record with business validation"""
|
||||
try:
|
||||
# Business validation
|
||||
await self._validate_sales_data(sales_data, tenant_id)
|
||||
|
||||
# Set user who created the record
|
||||
if user_id:
|
||||
sales_data_dict = sales_data.model_dump()
|
||||
sales_data_dict['created_by'] = user_id
|
||||
sales_data = SalesDataCreate(**sales_data_dict)
|
||||
|
||||
async with get_db_transaction() as db:
|
||||
repository = SalesRepository(db)
|
||||
record = await repository.create_sales_record(sales_data, tenant_id)
|
||||
|
||||
# Additional business logic (e.g., notifications, analytics updates)
|
||||
await self._post_create_actions(record)
|
||||
|
||||
return record
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to create sales record in service", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def update_sales_record(
|
||||
self,
|
||||
record_id: UUID,
|
||||
update_data: SalesDataUpdate,
|
||||
tenant_id: UUID
|
||||
) -> SalesData:
|
||||
"""Update a sales record"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
repository = SalesRepository(db)
|
||||
|
||||
# Verify record belongs to tenant
|
||||
existing_record = await repository.get_by_id(record_id)
|
||||
if not existing_record or existing_record.tenant_id != tenant_id:
|
||||
raise ValueError(f"Sales record {record_id} not found for tenant {tenant_id}")
|
||||
|
||||
# Update the record
|
||||
updated_record = await repository.update(record_id, update_data.model_dump(exclude_unset=True))
|
||||
|
||||
logger.info("Updated sales record", record_id=record_id, tenant_id=tenant_id)
|
||||
return updated_record
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to update sales record", error=str(e), record_id=record_id, tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def get_sales_records(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
query_params: Optional[SalesDataQuery] = None
|
||||
) -> List[SalesData]:
|
||||
"""Get sales records for a tenant"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
repository = SalesRepository(db)
|
||||
records = await repository.get_by_tenant(tenant_id, query_params)
|
||||
|
||||
logger.info("Retrieved sales records", count=len(records), tenant_id=tenant_id)
|
||||
return records
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get sales records", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def get_sales_record(self, record_id: UUID, tenant_id: UUID) -> Optional[SalesData]:
|
||||
"""Get a specific sales record"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
repository = SalesRepository(db)
|
||||
record = await repository.get_by_id(record_id)
|
||||
|
||||
# Verify record belongs to tenant
|
||||
if record and record.tenant_id != tenant_id:
|
||||
return None
|
||||
|
||||
return record
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get sales record", error=str(e), record_id=record_id, tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def delete_sales_record(self, record_id: UUID, tenant_id: UUID) -> bool:
|
||||
"""Delete a sales record"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
repository = SalesRepository(db)
|
||||
|
||||
# Verify record belongs to tenant
|
||||
existing_record = await repository.get_by_id(record_id)
|
||||
if not existing_record or existing_record.tenant_id != tenant_id:
|
||||
raise ValueError(f"Sales record {record_id} not found for tenant {tenant_id}")
|
||||
|
||||
success = await repository.delete(record_id)
|
||||
|
||||
if success:
|
||||
logger.info("Deleted sales record", record_id=record_id, tenant_id=tenant_id)
|
||||
|
||||
return success
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to delete sales record", error=str(e), record_id=record_id, tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def get_product_sales(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
product_name: str,
|
||||
start_date: Optional[datetime] = None,
|
||||
end_date: Optional[datetime] = None
|
||||
) -> List[SalesData]:
|
||||
"""Get sales records for a specific product"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
repository = SalesRepository(db)
|
||||
records = await repository.get_by_product(tenant_id, product_name, start_date, end_date)
|
||||
|
||||
logger.info(
|
||||
"Retrieved product sales",
|
||||
count=len(records),
|
||||
product=product_name,
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
return records
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get product sales", error=str(e), tenant_id=tenant_id, product=product_name)
|
||||
raise
|
||||
|
||||
async def get_sales_analytics(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
start_date: Optional[datetime] = None,
|
||||
end_date: Optional[datetime] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""Get sales analytics for a tenant"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
repository = SalesRepository(db)
|
||||
analytics = await repository.get_analytics(tenant_id, start_date, end_date)
|
||||
|
||||
logger.info("Retrieved sales analytics", tenant_id=tenant_id)
|
||||
return analytics
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get sales analytics", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def get_product_categories(self, tenant_id: UUID) -> List[str]:
|
||||
"""Get distinct product categories"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
repository = SalesRepository(db)
|
||||
categories = await repository.get_product_categories(tenant_id)
|
||||
|
||||
return categories
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get product categories", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def validate_sales_record(
|
||||
self,
|
||||
record_id: UUID,
|
||||
tenant_id: UUID,
|
||||
validation_notes: Optional[str] = None
|
||||
) -> SalesData:
|
||||
"""Validate a sales record"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
repository = SalesRepository(db)
|
||||
|
||||
# Verify record belongs to tenant
|
||||
existing_record = await repository.get_by_id(record_id)
|
||||
if not existing_record or existing_record.tenant_id != tenant_id:
|
||||
raise ValueError(f"Sales record {record_id} not found for tenant {tenant_id}")
|
||||
|
||||
validated_record = await repository.validate_record(record_id, validation_notes)
|
||||
|
||||
logger.info("Validated sales record", record_id=record_id, tenant_id=tenant_id)
|
||||
return validated_record
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to validate sales record", error=str(e), record_id=record_id, tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def _validate_sales_data(self, sales_data: SalesDataCreate, tenant_id: UUID):
|
||||
"""Validate sales data according to business rules"""
|
||||
# Example business validations
|
||||
|
||||
# Check if revenue matches quantity * unit_price (if unit_price provided)
|
||||
if sales_data.unit_price and sales_data.quantity_sold:
|
||||
expected_revenue = sales_data.unit_price * sales_data.quantity_sold
|
||||
# Apply discount if any
|
||||
if sales_data.discount_applied:
|
||||
expected_revenue *= (1 - sales_data.discount_applied / 100)
|
||||
|
||||
# Allow for small rounding differences
|
||||
if abs(float(sales_data.revenue) - float(expected_revenue)) > 0.01:
|
||||
logger.warning(
|
||||
"Revenue mismatch detected",
|
||||
expected=float(expected_revenue),
|
||||
actual=float(sales_data.revenue),
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
|
||||
# Check date validity (not in future)
|
||||
if sales_data.date > datetime.utcnow():
|
||||
raise ValueError("Sales date cannot be in the future")
|
||||
|
||||
# Additional business rules can be added here
|
||||
logger.info("Sales data validation passed", tenant_id=tenant_id)
|
||||
|
||||
async def _post_create_actions(self, record: SalesData):
|
||||
"""Actions to perform after creating a sales record"""
|
||||
try:
|
||||
# Here you could:
|
||||
# - Send notifications
|
||||
# - Update analytics caches
|
||||
# - Trigger ML model updates
|
||||
# - Update inventory levels (future integration)
|
||||
|
||||
logger.info("Post-create actions completed", record_id=record.id)
|
||||
|
||||
except Exception as e:
|
||||
# Don't fail the main operation for auxiliary actions
|
||||
logger.warning("Failed to execute post-create actions", error=str(e), record_id=record.id)
|
||||
|
||||
async def get_products_list(self, tenant_id: str) -> List[Dict[str, Any]]:
|
||||
"""Get list of all products with sales data for tenant using repository pattern"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
repository = SalesRepository(db)
|
||||
|
||||
# Use repository method for product statistics
|
||||
products = await repository.get_product_statistics(tenant_id)
|
||||
|
||||
logger.debug("Products list retrieved successfully",
|
||||
tenant_id=tenant_id,
|
||||
product_count=len(products))
|
||||
|
||||
return products
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get products list",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id)
|
||||
raise DatabaseError(f"Failed to get products list: {str(e)}")
|
||||
19
services/sales/pytest.ini
Normal file
19
services/sales/pytest.ini
Normal file
@@ -0,0 +1,19 @@
|
||||
[tool:pytest]
|
||||
testpaths = tests
|
||||
asyncio_mode = auto
|
||||
python_files = test_*.py
|
||||
python_classes = Test*
|
||||
python_functions = test_*
|
||||
addopts =
|
||||
-v
|
||||
--tb=short
|
||||
--strict-markers
|
||||
--disable-warnings
|
||||
--cov=app
|
||||
--cov-report=term-missing
|
||||
--cov-report=html:htmlcov
|
||||
markers =
|
||||
unit: Unit tests
|
||||
integration: Integration tests
|
||||
slow: Slow running tests
|
||||
external: Tests requiring external services
|
||||
39
services/sales/requirements.txt
Normal file
39
services/sales/requirements.txt
Normal file
@@ -0,0 +1,39 @@
|
||||
# services/sales/requirements.txt
|
||||
# FastAPI and web framework
|
||||
fastapi==0.104.1
|
||||
uvicorn[standard]==0.24.0
|
||||
|
||||
# Database
|
||||
sqlalchemy==2.0.23
|
||||
psycopg2-binary==2.9.9
|
||||
asyncpg==0.29.0
|
||||
aiosqlite==0.19.0
|
||||
alembic==1.12.1
|
||||
|
||||
# Data processing
|
||||
pandas==2.1.3
|
||||
numpy==1.25.2
|
||||
|
||||
# HTTP clients
|
||||
httpx==0.25.2
|
||||
aiofiles==23.2.0
|
||||
|
||||
# Validation and serialization
|
||||
pydantic==2.5.0
|
||||
pydantic-settings==2.0.3
|
||||
|
||||
# Authentication and security
|
||||
python-jose[cryptography]==3.3.0
|
||||
passlib[bcrypt]==1.7.4
|
||||
|
||||
# Logging and monitoring
|
||||
structlog==23.2.0
|
||||
prometheus-client==0.19.0
|
||||
|
||||
# Message queues
|
||||
aio-pika==9.3.1
|
||||
|
||||
# Note: pytest and testing dependencies are in tests/requirements.txt
|
||||
|
||||
# Development
|
||||
python-multipart==0.0.6
|
||||
1
services/sales/shared/shared
Symbolic link
1
services/sales/shared/shared
Symbolic link
@@ -0,0 +1 @@
|
||||
/Users/urtzialfaro/Documents/bakery-ia/shared
|
||||
239
services/sales/tests/conftest.py
Normal file
239
services/sales/tests/conftest.py
Normal file
@@ -0,0 +1,239 @@
|
||||
# services/sales/tests/conftest.py
|
||||
"""
|
||||
Pytest configuration and fixtures for Sales Service tests
|
||||
"""
|
||||
|
||||
import pytest
|
||||
import asyncio
|
||||
from datetime import datetime, timezone
|
||||
from decimal import Decimal
|
||||
from typing import AsyncGenerator
|
||||
from uuid import uuid4, UUID
|
||||
|
||||
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession, async_sessionmaker
|
||||
from sqlalchemy.pool import StaticPool
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
from app.main import app
|
||||
from app.core.config import settings
|
||||
from app.core.database import Base, get_db
|
||||
from app.models.sales import SalesData
|
||||
from app.schemas.sales import SalesDataCreate
|
||||
|
||||
|
||||
# Test database configuration
|
||||
TEST_DATABASE_URL = "sqlite+aiosqlite:///:memory:"
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def event_loop():
|
||||
"""Create event loop for the test session"""
|
||||
loop = asyncio.new_event_loop()
|
||||
yield loop
|
||||
loop.close()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def test_engine():
|
||||
"""Create test database engine"""
|
||||
engine = create_async_engine(
|
||||
TEST_DATABASE_URL,
|
||||
poolclass=StaticPool,
|
||||
connect_args={"check_same_thread": False}
|
||||
)
|
||||
|
||||
# Create tables
|
||||
async with engine.begin() as conn:
|
||||
await conn.run_sync(Base.metadata.create_all)
|
||||
|
||||
yield engine
|
||||
|
||||
await engine.dispose()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def test_db_session(test_engine) -> AsyncGenerator[AsyncSession, None]:
|
||||
"""Create test database session"""
|
||||
async_session = async_sessionmaker(
|
||||
test_engine, class_=AsyncSession, expire_on_commit=False
|
||||
)
|
||||
|
||||
async with async_session() as session:
|
||||
yield session
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def test_client():
|
||||
"""Create test client"""
|
||||
return TestClient(app)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def override_get_db(test_db_session):
|
||||
"""Override get_db dependency for testing"""
|
||||
async def _override_get_db():
|
||||
yield test_db_session
|
||||
|
||||
app.dependency_overrides[get_db] = _override_get_db
|
||||
yield
|
||||
app.dependency_overrides.clear()
|
||||
|
||||
|
||||
# Test data fixtures
|
||||
@pytest.fixture
|
||||
def sample_tenant_id() -> UUID:
|
||||
"""Sample tenant ID for testing"""
|
||||
return uuid4()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_sales_data(sample_tenant_id: UUID) -> SalesDataCreate:
|
||||
"""Sample sales data for testing"""
|
||||
return SalesDataCreate(
|
||||
date=datetime.now(timezone.utc),
|
||||
product_name="Pan Integral",
|
||||
product_category="Panadería",
|
||||
product_sku="PAN001",
|
||||
quantity_sold=5,
|
||||
unit_price=Decimal("2.50"),
|
||||
revenue=Decimal("12.50"),
|
||||
cost_of_goods=Decimal("6.25"),
|
||||
discount_applied=Decimal("0"),
|
||||
location_id="STORE_001",
|
||||
sales_channel="in_store",
|
||||
source="manual",
|
||||
notes="Test sale",
|
||||
weather_condition="sunny",
|
||||
is_holiday=False,
|
||||
is_weekend=False
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_sales_records(sample_tenant_id: UUID) -> list[dict]:
|
||||
"""Multiple sample sales records"""
|
||||
base_date = datetime.now(timezone.utc)
|
||||
return [
|
||||
{
|
||||
"tenant_id": sample_tenant_id,
|
||||
"date": base_date,
|
||||
"product_name": "Croissant",
|
||||
"quantity_sold": 3,
|
||||
"revenue": Decimal("7.50"),
|
||||
"location_id": "STORE_001",
|
||||
"source": "manual"
|
||||
},
|
||||
{
|
||||
"tenant_id": sample_tenant_id,
|
||||
"date": base_date,
|
||||
"product_name": "Café Americano",
|
||||
"quantity_sold": 2,
|
||||
"revenue": Decimal("5.00"),
|
||||
"location_id": "STORE_001",
|
||||
"source": "pos"
|
||||
},
|
||||
{
|
||||
"tenant_id": sample_tenant_id,
|
||||
"date": base_date,
|
||||
"product_name": "Bocadillo Jamón",
|
||||
"quantity_sold": 1,
|
||||
"revenue": Decimal("4.50"),
|
||||
"location_id": "STORE_002",
|
||||
"source": "manual"
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_csv_data() -> str:
|
||||
"""Sample CSV data for import testing"""
|
||||
return """date,product,quantity,revenue,location
|
||||
2024-01-15,Pan Integral,5,12.50,STORE_001
|
||||
2024-01-15,Croissant,3,7.50,STORE_001
|
||||
2024-01-15,Café Americano,2,5.00,STORE_002
|
||||
2024-01-16,Pan de Molde,8,16.00,STORE_001
|
||||
2024-01-16,Magdalenas,6,9.00,STORE_002"""
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_json_data() -> str:
|
||||
"""Sample JSON data for import testing"""
|
||||
return """[
|
||||
{
|
||||
"date": "2024-01-15",
|
||||
"product": "Pan Integral",
|
||||
"quantity": 5,
|
||||
"revenue": 12.50,
|
||||
"location": "STORE_001"
|
||||
},
|
||||
{
|
||||
"date": "2024-01-15",
|
||||
"product": "Croissant",
|
||||
"quantity": 3,
|
||||
"revenue": 7.50,
|
||||
"location": "STORE_001"
|
||||
}
|
||||
]"""
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def populated_db(test_db_session: AsyncSession, sample_sales_records: list[dict]):
|
||||
"""Database populated with test data"""
|
||||
for record_data in sample_sales_records:
|
||||
sales_record = SalesData(**record_data)
|
||||
test_db_session.add(sales_record)
|
||||
|
||||
await test_db_session.commit()
|
||||
yield test_db_session
|
||||
|
||||
|
||||
# Mock fixtures for external dependencies
|
||||
@pytest.fixture
|
||||
def mock_messaging():
|
||||
"""Mock messaging service"""
|
||||
class MockMessaging:
|
||||
def __init__(self):
|
||||
self.published_events = []
|
||||
|
||||
async def publish_sales_created(self, data):
|
||||
self.published_events.append(("sales_created", data))
|
||||
return True
|
||||
|
||||
async def publish_data_imported(self, data):
|
||||
self.published_events.append(("data_imported", data))
|
||||
return True
|
||||
|
||||
return MockMessaging()
|
||||
|
||||
|
||||
# Performance testing fixtures
|
||||
@pytest.fixture
|
||||
def large_csv_data() -> str:
|
||||
"""Large CSV data for performance testing"""
|
||||
headers = "date,product,quantity,revenue,location\n"
|
||||
rows = []
|
||||
|
||||
for i in range(1000): # 1000 records
|
||||
rows.append(f"2024-01-{(i % 30) + 1:02d},Producto_{i % 10},1,2.50,STORE_{i % 3 + 1:03d}")
|
||||
|
||||
return headers + "\n".join(rows)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def performance_test_data(sample_tenant_id: UUID) -> list[dict]:
|
||||
"""Large dataset for performance testing"""
|
||||
records = []
|
||||
base_date = datetime.now(timezone.utc)
|
||||
|
||||
for i in range(500): # 500 records
|
||||
records.append({
|
||||
"tenant_id": sample_tenant_id,
|
||||
"date": base_date,
|
||||
"product_name": f"Test Product {i % 20}",
|
||||
"quantity_sold": (i % 10) + 1,
|
||||
"revenue": Decimal(str(((i % 10) + 1) * 2.5)),
|
||||
"location_id": f"STORE_{(i % 5) + 1:03d}",
|
||||
"source": "test"
|
||||
})
|
||||
|
||||
return records
|
||||
417
services/sales/tests/integration/test_api_endpoints.py
Normal file
417
services/sales/tests/integration/test_api_endpoints.py
Normal file
@@ -0,0 +1,417 @@
|
||||
# services/sales/tests/integration/test_api_endpoints.py
|
||||
"""
|
||||
Integration tests for Sales API endpoints
|
||||
"""
|
||||
|
||||
import pytest
|
||||
import json
|
||||
from decimal import Decimal
|
||||
from datetime import datetime, timezone
|
||||
from uuid import uuid4
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
class TestSalesAPIEndpoints:
|
||||
"""Test Sales API endpoints integration"""
|
||||
|
||||
async def test_create_sales_record_success(self, test_client, override_get_db, sample_tenant_id):
|
||||
"""Test creating a sales record via API"""
|
||||
sales_data = {
|
||||
"date": datetime.now(timezone.utc).isoformat(),
|
||||
"product_name": "Pan Integral",
|
||||
"product_category": "Panadería",
|
||||
"quantity_sold": 5,
|
||||
"unit_price": 2.50,
|
||||
"revenue": 12.50,
|
||||
"location_id": "STORE_001",
|
||||
"sales_channel": "in_store",
|
||||
"source": "manual"
|
||||
}
|
||||
|
||||
response = test_client.post(
|
||||
f"/api/v1/sales?tenant_id={sample_tenant_id}",
|
||||
json=sales_data
|
||||
)
|
||||
|
||||
assert response.status_code == 201
|
||||
data = response.json()
|
||||
assert data["product_name"] == "Pan Integral"
|
||||
assert data["quantity_sold"] == 5
|
||||
assert "id" in data
|
||||
|
||||
async def test_create_sales_record_validation_error(self, test_client, override_get_db, sample_tenant_id):
|
||||
"""Test creating a sales record with validation errors"""
|
||||
invalid_data = {
|
||||
"date": "invalid-date",
|
||||
"product_name": "", # Empty product name
|
||||
"quantity_sold": -1, # Invalid quantity
|
||||
"revenue": -5.00 # Invalid revenue
|
||||
}
|
||||
|
||||
response = test_client.post(
|
||||
f"/api/v1/sales?tenant_id={sample_tenant_id}",
|
||||
json=invalid_data
|
||||
)
|
||||
|
||||
assert response.status_code == 422 # Validation error
|
||||
|
||||
async def test_get_sales_records(self, test_client, override_get_db, sample_tenant_id, populated_db):
|
||||
"""Test getting sales records for tenant"""
|
||||
response = test_client.get(f"/api/v1/sales?tenant_id={sample_tenant_id}")
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert isinstance(data, list)
|
||||
assert len(data) >= 0
|
||||
|
||||
async def test_get_sales_records_with_filters(self, test_client, override_get_db, sample_tenant_id):
|
||||
"""Test getting sales records with filters"""
|
||||
# First create a record
|
||||
sales_data = {
|
||||
"date": datetime.now(timezone.utc).isoformat(),
|
||||
"product_name": "Croissant",
|
||||
"quantity_sold": 3,
|
||||
"revenue": 7.50,
|
||||
"source": "manual"
|
||||
}
|
||||
|
||||
create_response = test_client.post(
|
||||
f"/api/v1/sales?tenant_id={sample_tenant_id}",
|
||||
json=sales_data
|
||||
)
|
||||
assert create_response.status_code == 201
|
||||
|
||||
# Get with product filter
|
||||
response = test_client.get(
|
||||
f"/api/v1/sales?tenant_id={sample_tenant_id}&product_name=Croissant"
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert len(data) >= 1
|
||||
assert all(record["product_name"] == "Croissant" for record in data)
|
||||
|
||||
async def test_get_sales_record_by_id(self, test_client, override_get_db, sample_tenant_id):
|
||||
"""Test getting a specific sales record"""
|
||||
# First create a record
|
||||
sales_data = {
|
||||
"date": datetime.now(timezone.utc).isoformat(),
|
||||
"product_name": "Test Product",
|
||||
"quantity_sold": 1,
|
||||
"revenue": 5.00,
|
||||
"source": "manual"
|
||||
}
|
||||
|
||||
create_response = test_client.post(
|
||||
f"/api/v1/sales?tenant_id={sample_tenant_id}",
|
||||
json=sales_data
|
||||
)
|
||||
assert create_response.status_code == 201
|
||||
created_record = create_response.json()
|
||||
|
||||
# Get the specific record
|
||||
response = test_client.get(
|
||||
f"/api/v1/sales/{created_record['id']}?tenant_id={sample_tenant_id}"
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["id"] == created_record["id"]
|
||||
assert data["product_name"] == "Test Product"
|
||||
|
||||
async def test_get_sales_record_not_found(self, test_client, override_get_db, sample_tenant_id):
|
||||
"""Test getting a non-existent sales record"""
|
||||
fake_id = str(uuid4())
|
||||
|
||||
response = test_client.get(
|
||||
f"/api/v1/sales/{fake_id}?tenant_id={sample_tenant_id}"
|
||||
)
|
||||
|
||||
assert response.status_code == 404
|
||||
|
||||
async def test_update_sales_record(self, test_client, override_get_db, sample_tenant_id):
|
||||
"""Test updating a sales record"""
|
||||
# First create a record
|
||||
sales_data = {
|
||||
"date": datetime.now(timezone.utc).isoformat(),
|
||||
"product_name": "Original Product",
|
||||
"quantity_sold": 1,
|
||||
"revenue": 5.00,
|
||||
"source": "manual"
|
||||
}
|
||||
|
||||
create_response = test_client.post(
|
||||
f"/api/v1/sales?tenant_id={sample_tenant_id}",
|
||||
json=sales_data
|
||||
)
|
||||
assert create_response.status_code == 201
|
||||
created_record = create_response.json()
|
||||
|
||||
# Update the record
|
||||
update_data = {
|
||||
"product_name": "Updated Product",
|
||||
"quantity_sold": 2,
|
||||
"revenue": 10.00
|
||||
}
|
||||
|
||||
response = test_client.put(
|
||||
f"/api/v1/sales/{created_record['id']}?tenant_id={sample_tenant_id}",
|
||||
json=update_data
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["product_name"] == "Updated Product"
|
||||
assert data["quantity_sold"] == 2
|
||||
|
||||
async def test_delete_sales_record(self, test_client, override_get_db, sample_tenant_id):
|
||||
"""Test deleting a sales record"""
|
||||
# First create a record
|
||||
sales_data = {
|
||||
"date": datetime.now(timezone.utc).isoformat(),
|
||||
"product_name": "To Delete",
|
||||
"quantity_sold": 1,
|
||||
"revenue": 5.00,
|
||||
"source": "manual"
|
||||
}
|
||||
|
||||
create_response = test_client.post(
|
||||
f"/api/v1/sales?tenant_id={sample_tenant_id}",
|
||||
json=sales_data
|
||||
)
|
||||
assert create_response.status_code == 201
|
||||
created_record = create_response.json()
|
||||
|
||||
# Delete the record
|
||||
response = test_client.delete(
|
||||
f"/api/v1/sales/{created_record['id']}?tenant_id={sample_tenant_id}"
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
|
||||
# Verify it's deleted
|
||||
get_response = test_client.get(
|
||||
f"/api/v1/sales/{created_record['id']}?tenant_id={sample_tenant_id}"
|
||||
)
|
||||
assert get_response.status_code == 404
|
||||
|
||||
async def test_get_sales_analytics(self, test_client, override_get_db, sample_tenant_id):
|
||||
"""Test getting sales analytics"""
|
||||
# First create some records
|
||||
for i in range(3):
|
||||
sales_data = {
|
||||
"date": datetime.now(timezone.utc).isoformat(),
|
||||
"product_name": f"Product {i}",
|
||||
"quantity_sold": i + 1,
|
||||
"revenue": (i + 1) * 5.0,
|
||||
"source": "manual"
|
||||
}
|
||||
|
||||
response = test_client.post(
|
||||
f"/api/v1/sales?tenant_id={sample_tenant_id}",
|
||||
json=sales_data
|
||||
)
|
||||
assert response.status_code == 201
|
||||
|
||||
# Get analytics
|
||||
response = test_client.get(
|
||||
f"/api/v1/sales/analytics?tenant_id={sample_tenant_id}"
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert "total_revenue" in data
|
||||
assert "total_quantity" in data
|
||||
assert "total_transactions" in data
|
||||
|
||||
async def test_validate_sales_record(self, test_client, override_get_db, sample_tenant_id):
|
||||
"""Test validating a sales record"""
|
||||
# First create a record
|
||||
sales_data = {
|
||||
"date": datetime.now(timezone.utc).isoformat(),
|
||||
"product_name": "To Validate",
|
||||
"quantity_sold": 1,
|
||||
"revenue": 5.00,
|
||||
"source": "manual"
|
||||
}
|
||||
|
||||
create_response = test_client.post(
|
||||
f"/api/v1/sales?tenant_id={sample_tenant_id}",
|
||||
json=sales_data
|
||||
)
|
||||
assert create_response.status_code == 201
|
||||
created_record = create_response.json()
|
||||
|
||||
# Validate the record
|
||||
validation_data = {
|
||||
"validation_notes": "Validated by manager"
|
||||
}
|
||||
|
||||
response = test_client.post(
|
||||
f"/api/v1/sales/{created_record['id']}/validate?tenant_id={sample_tenant_id}",
|
||||
json=validation_data
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["is_validated"] is True
|
||||
assert data["validation_notes"] == "Validated by manager"
|
||||
|
||||
async def test_get_product_sales(self, test_client, override_get_db, sample_tenant_id):
|
||||
"""Test getting sales for specific product"""
|
||||
# First create records for different products
|
||||
products = ["Product A", "Product B", "Product A"]
|
||||
for product in products:
|
||||
sales_data = {
|
||||
"date": datetime.now(timezone.utc).isoformat(),
|
||||
"product_name": product,
|
||||
"quantity_sold": 1,
|
||||
"revenue": 5.00,
|
||||
"source": "manual"
|
||||
}
|
||||
|
||||
response = test_client.post(
|
||||
f"/api/v1/sales?tenant_id={sample_tenant_id}",
|
||||
json=sales_data
|
||||
)
|
||||
assert response.status_code == 201
|
||||
|
||||
# Get sales for Product A
|
||||
response = test_client.get(
|
||||
f"/api/v1/sales/products/Product A?tenant_id={sample_tenant_id}"
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert len(data) == 2 # Two Product A records
|
||||
assert all(record["product_name"] == "Product A" for record in data)
|
||||
|
||||
async def test_get_product_categories(self, test_client, override_get_db, sample_tenant_id):
|
||||
"""Test getting product categories"""
|
||||
# First create records with categories
|
||||
for category in ["Panadería", "Cafetería", "Panadería"]:
|
||||
sales_data = {
|
||||
"date": datetime.now(timezone.utc).isoformat(),
|
||||
"product_name": "Test Product",
|
||||
"product_category": category,
|
||||
"quantity_sold": 1,
|
||||
"revenue": 5.00,
|
||||
"source": "manual"
|
||||
}
|
||||
|
||||
response = test_client.post(
|
||||
f"/api/v1/sales?tenant_id={sample_tenant_id}",
|
||||
json=sales_data
|
||||
)
|
||||
assert response.status_code == 201
|
||||
|
||||
# Get categories
|
||||
response = test_client.get(
|
||||
f"/api/v1/sales/categories?tenant_id={sample_tenant_id}"
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert isinstance(data, list)
|
||||
|
||||
async def test_export_sales_data_csv(self, test_client, override_get_db, sample_tenant_id):
|
||||
"""Test exporting sales data as CSV"""
|
||||
# First create some records
|
||||
for i in range(3):
|
||||
sales_data = {
|
||||
"date": datetime.now(timezone.utc).isoformat(),
|
||||
"product_name": f"Export Product {i}",
|
||||
"quantity_sold": i + 1,
|
||||
"revenue": (i + 1) * 5.0,
|
||||
"source": "manual"
|
||||
}
|
||||
|
||||
response = test_client.post(
|
||||
f"/api/v1/sales?tenant_id={sample_tenant_id}",
|
||||
json=sales_data
|
||||
)
|
||||
assert response.status_code == 201
|
||||
|
||||
# Export as CSV
|
||||
response = test_client.get(
|
||||
f"/api/v1/sales/export?tenant_id={sample_tenant_id}&format=csv"
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
assert response.headers["content-type"].startswith("text/csv")
|
||||
assert "Export Product" in response.text
|
||||
|
||||
async def test_bulk_create_sales_records(self, test_client, override_get_db, sample_tenant_id):
|
||||
"""Test bulk creating sales records"""
|
||||
bulk_data = [
|
||||
{
|
||||
"date": datetime.now(timezone.utc).isoformat(),
|
||||
"product_name": f"Bulk Product {i}",
|
||||
"quantity_sold": i + 1,
|
||||
"revenue": (i + 1) * 3.0,
|
||||
"source": "bulk"
|
||||
}
|
||||
for i in range(5)
|
||||
]
|
||||
|
||||
response = test_client.post(
|
||||
f"/api/v1/sales/bulk?tenant_id={sample_tenant_id}",
|
||||
json=bulk_data
|
||||
)
|
||||
|
||||
assert response.status_code == 201
|
||||
data = response.json()
|
||||
assert data["created_count"] == 5
|
||||
assert data["success"] is True
|
||||
|
||||
async def test_tenant_isolation(self, test_client, override_get_db):
|
||||
"""Test that tenants can only access their own data"""
|
||||
tenant_1 = uuid4()
|
||||
tenant_2 = uuid4()
|
||||
|
||||
# Create record for tenant 1
|
||||
sales_data = {
|
||||
"date": datetime.now(timezone.utc).isoformat(),
|
||||
"product_name": "Tenant 1 Product",
|
||||
"quantity_sold": 1,
|
||||
"revenue": 5.00,
|
||||
"source": "manual"
|
||||
}
|
||||
|
||||
create_response = test_client.post(
|
||||
f"/api/v1/sales?tenant_id={tenant_1}",
|
||||
json=sales_data
|
||||
)
|
||||
assert create_response.status_code == 201
|
||||
created_record = create_response.json()
|
||||
|
||||
# Try to access with tenant 2
|
||||
response = test_client.get(
|
||||
f"/api/v1/sales/{created_record['id']}?tenant_id={tenant_2}"
|
||||
)
|
||||
|
||||
assert response.status_code == 404 # Should not be found
|
||||
|
||||
# Tenant 1 should still be able to access
|
||||
response = test_client.get(
|
||||
f"/api/v1/sales/{created_record['id']}?tenant_id={tenant_1}"
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
|
||||
async def test_api_error_handling(self, test_client, override_get_db, sample_tenant_id):
|
||||
"""Test API error handling"""
|
||||
# Test missing tenant_id
|
||||
sales_data = {
|
||||
"date": datetime.now(timezone.utc).isoformat(),
|
||||
"product_name": "Test Product",
|
||||
"quantity_sold": 1,
|
||||
"revenue": 5.00
|
||||
}
|
||||
|
||||
response = test_client.post("/api/v1/sales", json=sales_data)
|
||||
assert response.status_code == 422 # Missing required parameter
|
||||
|
||||
# Test invalid UUID
|
||||
response = test_client.get("/api/v1/sales/invalid-uuid?tenant_id={sample_tenant_id}")
|
||||
assert response.status_code == 422 # Invalid UUID format
|
||||
10
services/sales/tests/requirements.txt
Normal file
10
services/sales/tests/requirements.txt
Normal file
@@ -0,0 +1,10 @@
|
||||
# Testing dependencies for Sales Service
|
||||
pytest==7.4.3
|
||||
pytest-asyncio==0.21.1
|
||||
pytest-mock==3.12.0
|
||||
httpx==0.25.2
|
||||
fastapi[all]==0.104.1
|
||||
sqlalchemy[asyncio]==2.0.23
|
||||
aiosqlite==0.19.0
|
||||
pandas==2.1.4
|
||||
coverage==7.3.2
|
||||
383
services/sales/tests/unit/test_data_import.py
Normal file
383
services/sales/tests/unit/test_data_import.py
Normal file
@@ -0,0 +1,383 @@
|
||||
# services/sales/tests/unit/test_data_import.py
|
||||
"""
|
||||
Unit tests for Data Import Service
|
||||
"""
|
||||
|
||||
import pytest
|
||||
import json
|
||||
import base64
|
||||
from decimal import Decimal
|
||||
from datetime import datetime, timezone
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
from app.services.data_import_service import DataImportService, SalesValidationResult, SalesImportResult
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
class TestDataImportService:
|
||||
"""Test Data Import Service functionality"""
|
||||
|
||||
@pytest.fixture
|
||||
def import_service(self):
|
||||
"""Create data import service instance"""
|
||||
return DataImportService()
|
||||
|
||||
async def test_validate_csv_import_data_valid(self, import_service, sample_tenant_id, sample_csv_data):
|
||||
"""Test validation of valid CSV import data"""
|
||||
data = {
|
||||
"tenant_id": str(sample_tenant_id),
|
||||
"data": sample_csv_data,
|
||||
"data_format": "csv"
|
||||
}
|
||||
|
||||
result = await import_service.validate_import_data(data)
|
||||
|
||||
assert result.is_valid is True
|
||||
assert result.total_records == 5
|
||||
assert len(result.errors) == 0
|
||||
assert result.summary["status"] == "valid"
|
||||
|
||||
async def test_validate_csv_import_data_missing_tenant(self, import_service, sample_csv_data):
|
||||
"""Test validation with missing tenant_id"""
|
||||
data = {
|
||||
"data": sample_csv_data,
|
||||
"data_format": "csv"
|
||||
}
|
||||
|
||||
result = await import_service.validate_import_data(data)
|
||||
|
||||
assert result.is_valid is False
|
||||
assert any(error["code"] == "MISSING_TENANT_ID" for error in result.errors)
|
||||
|
||||
async def test_validate_csv_import_data_empty_file(self, import_service, sample_tenant_id):
|
||||
"""Test validation with empty file"""
|
||||
data = {
|
||||
"tenant_id": str(sample_tenant_id),
|
||||
"data": "",
|
||||
"data_format": "csv"
|
||||
}
|
||||
|
||||
result = await import_service.validate_import_data(data)
|
||||
|
||||
assert result.is_valid is False
|
||||
assert any(error["code"] == "EMPTY_FILE" for error in result.errors)
|
||||
|
||||
async def test_validate_csv_import_data_unsupported_format(self, import_service, sample_tenant_id):
|
||||
"""Test validation with unsupported format"""
|
||||
data = {
|
||||
"tenant_id": str(sample_tenant_id),
|
||||
"data": "some data",
|
||||
"data_format": "unsupported"
|
||||
}
|
||||
|
||||
result = await import_service.validate_import_data(data)
|
||||
|
||||
assert result.is_valid is False
|
||||
assert any(error["code"] == "UNSUPPORTED_FORMAT" for error in result.errors)
|
||||
|
||||
async def test_validate_csv_missing_required_columns(self, import_service, sample_tenant_id):
|
||||
"""Test validation with missing required columns"""
|
||||
invalid_csv = "invalid_column,another_invalid\nvalue1,value2"
|
||||
data = {
|
||||
"tenant_id": str(sample_tenant_id),
|
||||
"data": invalid_csv,
|
||||
"data_format": "csv"
|
||||
}
|
||||
|
||||
result = await import_service.validate_import_data(data)
|
||||
|
||||
assert result.is_valid is False
|
||||
assert any(error["code"] == "MISSING_DATE_COLUMN" for error in result.errors)
|
||||
assert any(error["code"] == "MISSING_PRODUCT_COLUMN" for error in result.errors)
|
||||
|
||||
async def test_process_csv_import_success(self, import_service, sample_tenant_id, sample_csv_data):
|
||||
"""Test successful CSV import processing"""
|
||||
with patch('app.services.data_import_service.get_db_transaction') as mock_get_db:
|
||||
mock_db = AsyncMock()
|
||||
mock_get_db.return_value.__aenter__.return_value = mock_db
|
||||
|
||||
mock_repository = AsyncMock()
|
||||
mock_repository.create_sales_record.return_value = AsyncMock()
|
||||
|
||||
with patch('app.services.data_import_service.SalesRepository', return_value=mock_repository):
|
||||
result = await import_service.process_import(
|
||||
sample_tenant_id,
|
||||
sample_csv_data,
|
||||
"csv",
|
||||
"test.csv"
|
||||
)
|
||||
|
||||
assert result.success is True
|
||||
assert result.records_processed == 5
|
||||
assert result.records_created == 5
|
||||
assert result.records_failed == 0
|
||||
|
||||
async def test_process_json_import_success(self, import_service, sample_tenant_id, sample_json_data):
|
||||
"""Test successful JSON import processing"""
|
||||
with patch('app.services.data_import_service.get_db_transaction') as mock_get_db:
|
||||
mock_db = AsyncMock()
|
||||
mock_get_db.return_value.__aenter__.return_value = mock_db
|
||||
|
||||
mock_repository = AsyncMock()
|
||||
mock_repository.create_sales_record.return_value = AsyncMock()
|
||||
|
||||
with patch('app.services.data_import_service.SalesRepository', return_value=mock_repository):
|
||||
result = await import_service.process_import(
|
||||
sample_tenant_id,
|
||||
sample_json_data,
|
||||
"json",
|
||||
"test.json"
|
||||
)
|
||||
|
||||
assert result.success is True
|
||||
assert result.records_processed == 2
|
||||
assert result.records_created == 2
|
||||
|
||||
async def test_process_excel_import_base64(self, import_service, sample_tenant_id):
|
||||
"""Test Excel import with base64 encoded data"""
|
||||
# Create a simple Excel-like data structure
|
||||
excel_data = json.dumps([{
|
||||
"date": "2024-01-15",
|
||||
"product": "Pan Integral",
|
||||
"quantity": 5,
|
||||
"revenue": 12.50
|
||||
}])
|
||||
|
||||
# Encode as base64
|
||||
encoded_data = "data:application/vnd.openxmlformats-officedocument.spreadsheetml.sheet;base64," + \
|
||||
base64.b64encode(excel_data.encode()).decode()
|
||||
|
||||
with patch('app.services.data_import_service.get_db_transaction') as mock_get_db:
|
||||
mock_db = AsyncMock()
|
||||
mock_get_db.return_value.__aenter__.return_value = mock_db
|
||||
|
||||
mock_repository = AsyncMock()
|
||||
mock_repository.create_sales_record.return_value = AsyncMock()
|
||||
|
||||
# Mock pandas.read_excel to avoid dependency issues
|
||||
with patch('pandas.read_excel') as mock_read_excel:
|
||||
import pandas as pd
|
||||
mock_df = pd.DataFrame([{
|
||||
"date": "2024-01-15",
|
||||
"product": "Pan Integral",
|
||||
"quantity": 5,
|
||||
"revenue": 12.50
|
||||
}])
|
||||
mock_read_excel.return_value = mock_df
|
||||
|
||||
with patch('app.services.data_import_service.SalesRepository', return_value=mock_repository):
|
||||
result = await import_service.process_import(
|
||||
sample_tenant_id,
|
||||
encoded_data,
|
||||
"excel",
|
||||
"test.xlsx"
|
||||
)
|
||||
|
||||
assert result.success is True
|
||||
assert result.records_created == 1
|
||||
|
||||
async def test_detect_columns_mapping(self, import_service):
|
||||
"""Test column detection and mapping"""
|
||||
columns = ["fecha", "producto", "cantidad", "ingresos", "tienda"]
|
||||
|
||||
mapping = import_service._detect_columns(columns)
|
||||
|
||||
assert mapping["date"] == "fecha"
|
||||
assert mapping["product"] == "producto"
|
||||
assert mapping["quantity"] == "cantidad"
|
||||
assert mapping["revenue"] == "ingresos"
|
||||
assert mapping["location"] == "tienda"
|
||||
|
||||
async def test_parse_date_multiple_formats(self, import_service):
|
||||
"""Test date parsing with different formats"""
|
||||
# Test various date formats
|
||||
dates_to_test = [
|
||||
"2024-01-15",
|
||||
"15/01/2024",
|
||||
"01/15/2024",
|
||||
"15-01-2024",
|
||||
"2024/01/15",
|
||||
"2024-01-15 10:30:00"
|
||||
]
|
||||
|
||||
for date_str in dates_to_test:
|
||||
result = import_service._parse_date(date_str)
|
||||
assert result is not None
|
||||
assert isinstance(result, datetime)
|
||||
|
||||
async def test_parse_date_invalid_formats(self, import_service):
|
||||
"""Test date parsing with invalid formats"""
|
||||
invalid_dates = ["invalid", "not-a-date", "", None, "32/13/2024"]
|
||||
|
||||
for date_str in invalid_dates:
|
||||
result = import_service._parse_date(date_str)
|
||||
assert result is None
|
||||
|
||||
async def test_clean_product_name(self, import_service):
|
||||
"""Test product name cleaning"""
|
||||
test_cases = [
|
||||
(" pan de molde ", "Pan De Molde"),
|
||||
("café con leche!!!", "Café Con Leche"),
|
||||
("té verde orgánico", "Té Verde Orgánico"),
|
||||
("bocadillo de jamón", "Bocadillo De Jamón"),
|
||||
("", "Producto sin nombre"),
|
||||
(None, "Producto sin nombre")
|
||||
]
|
||||
|
||||
for input_name, expected in test_cases:
|
||||
result = import_service._clean_product_name(input_name)
|
||||
assert result == expected
|
||||
|
||||
async def test_parse_row_data_valid(self, import_service):
|
||||
"""Test parsing valid row data"""
|
||||
row = {
|
||||
"fecha": "2024-01-15",
|
||||
"producto": "Pan Integral",
|
||||
"cantidad": "5",
|
||||
"ingresos": "12.50",
|
||||
"tienda": "STORE_001"
|
||||
}
|
||||
|
||||
column_mapping = {
|
||||
"date": "fecha",
|
||||
"product": "producto",
|
||||
"quantity": "cantidad",
|
||||
"revenue": "ingresos",
|
||||
"location": "tienda"
|
||||
}
|
||||
|
||||
result = await import_service._parse_row_data(row, column_mapping, 1)
|
||||
|
||||
assert result["skip"] is False
|
||||
assert result["product_name"] == "Pan Integral"
|
||||
assert result["quantity_sold"] == 5
|
||||
assert result["revenue"] == 12.5
|
||||
assert result["location_id"] == "STORE_001"
|
||||
|
||||
async def test_parse_row_data_missing_required(self, import_service):
|
||||
"""Test parsing row data with missing required fields"""
|
||||
row = {
|
||||
"producto": "Pan Integral",
|
||||
"cantidad": "5"
|
||||
# Missing date
|
||||
}
|
||||
|
||||
column_mapping = {
|
||||
"date": "fecha",
|
||||
"product": "producto",
|
||||
"quantity": "cantidad"
|
||||
}
|
||||
|
||||
result = await import_service._parse_row_data(row, column_mapping, 1)
|
||||
|
||||
assert result["skip"] is True
|
||||
assert len(result["errors"]) > 0
|
||||
assert "Missing date" in result["errors"][0]
|
||||
|
||||
async def test_parse_row_data_invalid_quantity(self, import_service):
|
||||
"""Test parsing row data with invalid quantity"""
|
||||
row = {
|
||||
"fecha": "2024-01-15",
|
||||
"producto": "Pan Integral",
|
||||
"cantidad": "invalid_quantity"
|
||||
}
|
||||
|
||||
column_mapping = {
|
||||
"date": "fecha",
|
||||
"product": "producto",
|
||||
"quantity": "cantidad"
|
||||
}
|
||||
|
||||
result = await import_service._parse_row_data(row, column_mapping, 1)
|
||||
|
||||
assert result["skip"] is False # Should not skip, just use default
|
||||
assert result["quantity_sold"] == 1 # Default quantity
|
||||
assert len(result["warnings"]) > 0
|
||||
|
||||
async def test_structure_messages(self, import_service):
|
||||
"""Test message structuring"""
|
||||
messages = [
|
||||
"Simple string message",
|
||||
{
|
||||
"type": "existing_dict",
|
||||
"message": "Already structured",
|
||||
"code": "TEST_CODE"
|
||||
}
|
||||
]
|
||||
|
||||
result = import_service._structure_messages(messages)
|
||||
|
||||
assert len(result) == 2
|
||||
assert result[0]["type"] == "general_message"
|
||||
assert result[0]["message"] == "Simple string message"
|
||||
assert result[1]["type"] == "existing_dict"
|
||||
|
||||
async def test_generate_suggestions_valid_file(self, import_service):
|
||||
"""Test suggestion generation for valid files"""
|
||||
validation_result = SalesValidationResult(
|
||||
is_valid=True,
|
||||
total_records=50,
|
||||
valid_records=50,
|
||||
invalid_records=0,
|
||||
errors=[],
|
||||
warnings=[],
|
||||
summary={}
|
||||
)
|
||||
|
||||
suggestions = import_service._generate_suggestions(validation_result, "csv", 0)
|
||||
|
||||
assert "El archivo está listo para procesamiento" in suggestions
|
||||
assert "Se procesarán aproximadamente 50 registros" in suggestions
|
||||
|
||||
async def test_generate_suggestions_large_file(self, import_service):
|
||||
"""Test suggestion generation for large files"""
|
||||
validation_result = SalesValidationResult(
|
||||
is_valid=True,
|
||||
total_records=2000,
|
||||
valid_records=2000,
|
||||
invalid_records=0,
|
||||
errors=[],
|
||||
warnings=[],
|
||||
summary={}
|
||||
)
|
||||
|
||||
suggestions = import_service._generate_suggestions(validation_result, "csv", 0)
|
||||
|
||||
assert "Archivo grande: el procesamiento puede tomar varios minutos" in suggestions
|
||||
|
||||
async def test_import_error_handling(self, import_service, sample_tenant_id):
|
||||
"""Test import error handling"""
|
||||
# Test with unsupported format
|
||||
with pytest.raises(ValueError, match="Unsupported format"):
|
||||
await import_service.process_import(
|
||||
sample_tenant_id,
|
||||
"some data",
|
||||
"unsupported_format"
|
||||
)
|
||||
|
||||
async def test_performance_large_import(self, import_service, sample_tenant_id, large_csv_data):
|
||||
"""Test performance with large CSV import"""
|
||||
with patch('app.services.data_import_service.get_db_transaction') as mock_get_db:
|
||||
mock_db = AsyncMock()
|
||||
mock_get_db.return_value.__aenter__.return_value = mock_db
|
||||
|
||||
mock_repository = AsyncMock()
|
||||
mock_repository.create_sales_record.return_value = AsyncMock()
|
||||
|
||||
with patch('app.services.data_import_service.SalesRepository', return_value=mock_repository):
|
||||
import time
|
||||
start_time = time.time()
|
||||
|
||||
result = await import_service.process_import(
|
||||
sample_tenant_id,
|
||||
large_csv_data,
|
||||
"csv",
|
||||
"large_test.csv"
|
||||
)
|
||||
|
||||
end_time = time.time()
|
||||
execution_time = end_time - start_time
|
||||
|
||||
assert result.success is True
|
||||
assert result.records_processed == 1000
|
||||
assert execution_time < 10.0 # Should complete in under 10 seconds
|
||||
211
services/sales/tests/unit/test_repositories.py
Normal file
211
services/sales/tests/unit/test_repositories.py
Normal file
@@ -0,0 +1,211 @@
|
||||
# services/sales/tests/unit/test_repositories.py
|
||||
"""
|
||||
Unit tests for Sales Repository
|
||||
"""
|
||||
|
||||
import pytest
|
||||
from datetime import datetime, timezone
|
||||
from decimal import Decimal
|
||||
from uuid import UUID
|
||||
|
||||
from app.repositories.sales_repository import SalesRepository
|
||||
from app.models.sales import SalesData
|
||||
from app.schemas.sales import SalesDataCreate, SalesDataUpdate, SalesDataQuery
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
class TestSalesRepository:
|
||||
"""Test Sales Repository operations"""
|
||||
|
||||
async def test_create_sales_record(self, test_db_session, sample_tenant_id, sample_sales_data):
|
||||
"""Test creating a sales record"""
|
||||
repository = SalesRepository(test_db_session)
|
||||
|
||||
record = await repository.create_sales_record(sample_sales_data, sample_tenant_id)
|
||||
|
||||
assert record is not None
|
||||
assert record.id is not None
|
||||
assert record.tenant_id == sample_tenant_id
|
||||
assert record.product_name == sample_sales_data.product_name
|
||||
assert record.quantity_sold == sample_sales_data.quantity_sold
|
||||
assert record.revenue == sample_sales_data.revenue
|
||||
|
||||
async def test_get_by_id(self, test_db_session, sample_tenant_id, sample_sales_data):
|
||||
"""Test getting a sales record by ID"""
|
||||
repository = SalesRepository(test_db_session)
|
||||
|
||||
# Create record first
|
||||
created_record = await repository.create_sales_record(sample_sales_data, sample_tenant_id)
|
||||
|
||||
# Get by ID
|
||||
retrieved_record = await repository.get_by_id(created_record.id)
|
||||
|
||||
assert retrieved_record is not None
|
||||
assert retrieved_record.id == created_record.id
|
||||
assert retrieved_record.product_name == created_record.product_name
|
||||
|
||||
async def test_get_by_tenant(self, populated_db, sample_tenant_id):
|
||||
"""Test getting records by tenant"""
|
||||
repository = SalesRepository(populated_db)
|
||||
|
||||
records = await repository.get_by_tenant(sample_tenant_id)
|
||||
|
||||
assert len(records) == 3 # From populated_db fixture
|
||||
assert all(record.tenant_id == sample_tenant_id for record in records)
|
||||
|
||||
async def test_get_by_product(self, populated_db, sample_tenant_id):
|
||||
"""Test getting records by product"""
|
||||
repository = SalesRepository(populated_db)
|
||||
|
||||
records = await repository.get_by_product(sample_tenant_id, "Croissant")
|
||||
|
||||
assert len(records) == 1
|
||||
assert records[0].product_name == "Croissant"
|
||||
|
||||
async def test_update_record(self, test_db_session, sample_tenant_id, sample_sales_data):
|
||||
"""Test updating a sales record"""
|
||||
repository = SalesRepository(test_db_session)
|
||||
|
||||
# Create record first
|
||||
created_record = await repository.create_sales_record(sample_sales_data, sample_tenant_id)
|
||||
|
||||
# Update record
|
||||
update_data = SalesDataUpdate(
|
||||
product_name="Updated Product",
|
||||
quantity_sold=10,
|
||||
revenue=Decimal("25.00")
|
||||
)
|
||||
|
||||
updated_record = await repository.update(created_record.id, update_data.model_dump(exclude_unset=True))
|
||||
|
||||
assert updated_record.product_name == "Updated Product"
|
||||
assert updated_record.quantity_sold == 10
|
||||
assert updated_record.revenue == Decimal("25.00")
|
||||
|
||||
async def test_delete_record(self, test_db_session, sample_tenant_id, sample_sales_data):
|
||||
"""Test deleting a sales record"""
|
||||
repository = SalesRepository(test_db_session)
|
||||
|
||||
# Create record first
|
||||
created_record = await repository.create_sales_record(sample_sales_data, sample_tenant_id)
|
||||
|
||||
# Delete record
|
||||
success = await repository.delete(created_record.id)
|
||||
|
||||
assert success is True
|
||||
|
||||
# Verify record is deleted
|
||||
deleted_record = await repository.get_by_id(created_record.id)
|
||||
assert deleted_record is None
|
||||
|
||||
async def test_get_analytics(self, populated_db, sample_tenant_id):
|
||||
"""Test getting analytics for tenant"""
|
||||
repository = SalesRepository(populated_db)
|
||||
|
||||
analytics = await repository.get_analytics(sample_tenant_id)
|
||||
|
||||
assert "total_revenue" in analytics
|
||||
assert "total_quantity" in analytics
|
||||
assert "total_transactions" in analytics
|
||||
assert "average_transaction_value" in analytics
|
||||
assert analytics["total_transactions"] == 3
|
||||
|
||||
async def test_get_product_categories(self, populated_db, sample_tenant_id):
|
||||
"""Test getting distinct product categories"""
|
||||
repository = SalesRepository(populated_db)
|
||||
|
||||
categories = await repository.get_product_categories(sample_tenant_id)
|
||||
|
||||
assert isinstance(categories, list)
|
||||
# Should be empty since populated_db doesn't set categories
|
||||
|
||||
async def test_validate_record(self, test_db_session, sample_tenant_id, sample_sales_data):
|
||||
"""Test validating a sales record"""
|
||||
repository = SalesRepository(test_db_session)
|
||||
|
||||
# Create record first
|
||||
created_record = await repository.create_sales_record(sample_sales_data, sample_tenant_id)
|
||||
|
||||
# Validate record
|
||||
validated_record = await repository.validate_record(created_record.id, "Test validation")
|
||||
|
||||
assert validated_record.is_validated is True
|
||||
assert validated_record.validation_notes == "Test validation"
|
||||
|
||||
async def test_query_with_filters(self, populated_db, sample_tenant_id):
|
||||
"""Test querying with filters"""
|
||||
repository = SalesRepository(populated_db)
|
||||
|
||||
query = SalesDataQuery(
|
||||
product_name="Croissant",
|
||||
limit=10,
|
||||
offset=0
|
||||
)
|
||||
|
||||
records = await repository.get_by_tenant(sample_tenant_id, query)
|
||||
|
||||
assert len(records) == 1
|
||||
assert records[0].product_name == "Croissant"
|
||||
|
||||
async def test_bulk_create(self, test_db_session, sample_tenant_id):
|
||||
"""Test bulk creating records"""
|
||||
repository = SalesRepository(test_db_session)
|
||||
|
||||
# Create multiple records data
|
||||
bulk_data = [
|
||||
{
|
||||
"date": datetime.now(timezone.utc),
|
||||
"product_name": f"Product {i}",
|
||||
"quantity_sold": i + 1,
|
||||
"revenue": Decimal(str((i + 1) * 2.5)),
|
||||
"source": "bulk_test"
|
||||
}
|
||||
for i in range(5)
|
||||
]
|
||||
|
||||
created_count = await repository.bulk_create_sales_data(bulk_data, sample_tenant_id)
|
||||
|
||||
assert created_count == 5
|
||||
|
||||
# Verify records were created
|
||||
all_records = await repository.get_by_tenant(sample_tenant_id)
|
||||
assert len(all_records) == 5
|
||||
|
||||
async def test_repository_error_handling(self, test_db_session, sample_tenant_id):
|
||||
"""Test repository error handling"""
|
||||
repository = SalesRepository(test_db_session)
|
||||
|
||||
# Test getting non-existent record
|
||||
non_existent = await repository.get_by_id("non-existent-id")
|
||||
assert non_existent is None
|
||||
|
||||
# Test deleting non-existent record
|
||||
delete_success = await repository.delete("non-existent-id")
|
||||
assert delete_success is False
|
||||
|
||||
async def test_performance_bulk_operations(self, test_db_session, sample_tenant_id, performance_test_data):
|
||||
"""Test performance of bulk operations"""
|
||||
repository = SalesRepository(test_db_session)
|
||||
|
||||
# Test bulk create performance
|
||||
import time
|
||||
start_time = time.time()
|
||||
|
||||
created_count = await repository.bulk_create_sales_data(performance_test_data, sample_tenant_id)
|
||||
|
||||
end_time = time.time()
|
||||
execution_time = end_time - start_time
|
||||
|
||||
assert created_count == len(performance_test_data)
|
||||
assert execution_time < 5.0 # Should complete in under 5 seconds
|
||||
|
||||
# Test bulk retrieval performance
|
||||
start_time = time.time()
|
||||
|
||||
all_records = await repository.get_by_tenant(sample_tenant_id)
|
||||
|
||||
end_time = time.time()
|
||||
execution_time = end_time - start_time
|
||||
|
||||
assert len(all_records) == len(performance_test_data)
|
||||
assert execution_time < 2.0 # Should complete in under 2 seconds
|
||||
287
services/sales/tests/unit/test_services.py
Normal file
287
services/sales/tests/unit/test_services.py
Normal file
@@ -0,0 +1,287 @@
|
||||
# services/sales/tests/unit/test_services.py
|
||||
"""
|
||||
Unit tests for Sales Service
|
||||
"""
|
||||
|
||||
import pytest
|
||||
from datetime import datetime, timezone
|
||||
from decimal import Decimal
|
||||
from unittest.mock import AsyncMock, patch
|
||||
from uuid import uuid4
|
||||
|
||||
from app.services.sales_service import SalesService
|
||||
from app.schemas.sales import SalesDataCreate, SalesDataUpdate, SalesDataQuery
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
class TestSalesService:
|
||||
"""Test Sales Service business logic"""
|
||||
|
||||
@pytest.fixture
|
||||
def sales_service(self):
|
||||
"""Create sales service instance"""
|
||||
return SalesService()
|
||||
|
||||
async def test_create_sales_record_success(self, sales_service, sample_tenant_id, sample_sales_data):
|
||||
"""Test successful sales record creation"""
|
||||
with patch('app.services.sales_service.get_db_transaction') as mock_get_db:
|
||||
mock_db = AsyncMock()
|
||||
mock_get_db.return_value.__aenter__.return_value = mock_db
|
||||
|
||||
mock_repository = AsyncMock()
|
||||
mock_record = AsyncMock()
|
||||
mock_record.id = uuid4()
|
||||
mock_record.product_name = sample_sales_data.product_name
|
||||
mock_repository.create_sales_record.return_value = mock_record
|
||||
|
||||
with patch('app.services.sales_service.SalesRepository', return_value=mock_repository):
|
||||
result = await sales_service.create_sales_record(
|
||||
sample_sales_data,
|
||||
sample_tenant_id
|
||||
)
|
||||
|
||||
assert result is not None
|
||||
assert result.id is not None
|
||||
mock_repository.create_sales_record.assert_called_once_with(sample_sales_data, sample_tenant_id)
|
||||
|
||||
async def test_create_sales_record_validation_error(self, sales_service, sample_tenant_id):
|
||||
"""Test sales record creation with validation error"""
|
||||
# Create invalid sales data (future date)
|
||||
invalid_data = SalesDataCreate(
|
||||
date=datetime(2030, 1, 1, tzinfo=timezone.utc), # Future date
|
||||
product_name="Test Product",
|
||||
quantity_sold=1,
|
||||
revenue=Decimal("5.00")
|
||||
)
|
||||
|
||||
with pytest.raises(ValueError, match="Sales date cannot be in the future"):
|
||||
await sales_service.create_sales_record(invalid_data, sample_tenant_id)
|
||||
|
||||
async def test_update_sales_record(self, sales_service, sample_tenant_id):
|
||||
"""Test updating a sales record"""
|
||||
record_id = uuid4()
|
||||
update_data = SalesDataUpdate(
|
||||
product_name="Updated Product",
|
||||
quantity_sold=10
|
||||
)
|
||||
|
||||
with patch('app.services.sales_service.get_db_transaction') as mock_get_db:
|
||||
mock_db = AsyncMock()
|
||||
mock_get_db.return_value.__aenter__.return_value = mock_db
|
||||
|
||||
mock_repository = AsyncMock()
|
||||
|
||||
# Mock existing record
|
||||
mock_existing = AsyncMock()
|
||||
mock_existing.tenant_id = sample_tenant_id
|
||||
mock_repository.get_by_id.return_value = mock_existing
|
||||
|
||||
# Mock updated record
|
||||
mock_updated = AsyncMock()
|
||||
mock_updated.product_name = "Updated Product"
|
||||
mock_repository.update.return_value = mock_updated
|
||||
|
||||
with patch('app.services.sales_service.SalesRepository', return_value=mock_repository):
|
||||
result = await sales_service.update_sales_record(
|
||||
record_id,
|
||||
update_data,
|
||||
sample_tenant_id
|
||||
)
|
||||
|
||||
assert result.product_name == "Updated Product"
|
||||
mock_repository.update.assert_called_once()
|
||||
|
||||
async def test_update_nonexistent_record(self, sales_service, sample_tenant_id):
|
||||
"""Test updating a non-existent record"""
|
||||
record_id = uuid4()
|
||||
update_data = SalesDataUpdate(product_name="Updated Product")
|
||||
|
||||
with patch('app.services.sales_service.get_db_transaction') as mock_get_db:
|
||||
mock_db = AsyncMock()
|
||||
mock_get_db.return_value.__aenter__.return_value = mock_db
|
||||
|
||||
mock_repository = AsyncMock()
|
||||
mock_repository.get_by_id.return_value = None # Record not found
|
||||
|
||||
with patch('app.services.sales_service.SalesRepository', return_value=mock_repository):
|
||||
with pytest.raises(ValueError, match="not found for tenant"):
|
||||
await sales_service.update_sales_record(
|
||||
record_id,
|
||||
update_data,
|
||||
sample_tenant_id
|
||||
)
|
||||
|
||||
async def test_get_sales_records(self, sales_service, sample_tenant_id):
|
||||
"""Test getting sales records for tenant"""
|
||||
query_params = SalesDataQuery(limit=10)
|
||||
|
||||
with patch('app.services.sales_service.get_db_transaction') as mock_get_db:
|
||||
mock_db = AsyncMock()
|
||||
mock_get_db.return_value.__aenter__.return_value = mock_db
|
||||
|
||||
mock_repository = AsyncMock()
|
||||
mock_records = [AsyncMock(), AsyncMock()]
|
||||
mock_repository.get_by_tenant.return_value = mock_records
|
||||
|
||||
with patch('app.services.sales_service.SalesRepository', return_value=mock_repository):
|
||||
result = await sales_service.get_sales_records(
|
||||
sample_tenant_id,
|
||||
query_params
|
||||
)
|
||||
|
||||
assert len(result) == 2
|
||||
mock_repository.get_by_tenant.assert_called_once_with(sample_tenant_id, query_params)
|
||||
|
||||
async def test_get_sales_record_success(self, sales_service, sample_tenant_id):
|
||||
"""Test getting a specific sales record"""
|
||||
record_id = uuid4()
|
||||
|
||||
with patch('app.services.sales_service.get_db_transaction') as mock_get_db:
|
||||
mock_db = AsyncMock()
|
||||
mock_get_db.return_value.__aenter__.return_value = mock_db
|
||||
|
||||
mock_repository = AsyncMock()
|
||||
mock_record = AsyncMock()
|
||||
mock_record.tenant_id = sample_tenant_id
|
||||
mock_repository.get_by_id.return_value = mock_record
|
||||
|
||||
with patch('app.services.sales_service.SalesRepository', return_value=mock_repository):
|
||||
result = await sales_service.get_sales_record(record_id, sample_tenant_id)
|
||||
|
||||
assert result is not None
|
||||
assert result.tenant_id == sample_tenant_id
|
||||
|
||||
async def test_get_sales_record_wrong_tenant(self, sales_service, sample_tenant_id):
|
||||
"""Test getting a record that belongs to different tenant"""
|
||||
record_id = uuid4()
|
||||
wrong_tenant_id = uuid4()
|
||||
|
||||
with patch('app.services.sales_service.get_db_transaction') as mock_get_db:
|
||||
mock_db = AsyncMock()
|
||||
mock_get_db.return_value.__aenter__.return_value = mock_db
|
||||
|
||||
mock_repository = AsyncMock()
|
||||
mock_record = AsyncMock()
|
||||
mock_record.tenant_id = sample_tenant_id # Different tenant
|
||||
mock_repository.get_by_id.return_value = mock_record
|
||||
|
||||
with patch('app.services.sales_service.SalesRepository', return_value=mock_repository):
|
||||
result = await sales_service.get_sales_record(record_id, wrong_tenant_id)
|
||||
|
||||
assert result is None
|
||||
|
||||
async def test_delete_sales_record(self, sales_service, sample_tenant_id):
|
||||
"""Test deleting a sales record"""
|
||||
record_id = uuid4()
|
||||
|
||||
with patch('app.services.sales_service.get_db_transaction') as mock_get_db:
|
||||
mock_db = AsyncMock()
|
||||
mock_get_db.return_value.__aenter__.return_value = mock_db
|
||||
|
||||
mock_repository = AsyncMock()
|
||||
|
||||
# Mock existing record
|
||||
mock_existing = AsyncMock()
|
||||
mock_existing.tenant_id = sample_tenant_id
|
||||
mock_repository.get_by_id.return_value = mock_existing
|
||||
|
||||
mock_repository.delete.return_value = True
|
||||
|
||||
with patch('app.services.sales_service.SalesRepository', return_value=mock_repository):
|
||||
result = await sales_service.delete_sales_record(record_id, sample_tenant_id)
|
||||
|
||||
assert result is True
|
||||
mock_repository.delete.assert_called_once_with(record_id)
|
||||
|
||||
async def test_get_product_sales(self, sales_service, sample_tenant_id):
|
||||
"""Test getting sales for specific product"""
|
||||
product_name = "Test Product"
|
||||
|
||||
with patch('app.services.sales_service.get_db_transaction') as mock_get_db:
|
||||
mock_db = AsyncMock()
|
||||
mock_get_db.return_value.__aenter__.return_value = mock_db
|
||||
|
||||
mock_repository = AsyncMock()
|
||||
mock_records = [AsyncMock(), AsyncMock()]
|
||||
mock_repository.get_by_product.return_value = mock_records
|
||||
|
||||
with patch('app.services.sales_service.SalesRepository', return_value=mock_repository):
|
||||
result = await sales_service.get_product_sales(sample_tenant_id, product_name)
|
||||
|
||||
assert len(result) == 2
|
||||
mock_repository.get_by_product.assert_called_once()
|
||||
|
||||
async def test_get_sales_analytics(self, sales_service, sample_tenant_id):
|
||||
"""Test getting sales analytics"""
|
||||
with patch('app.services.sales_service.get_db_transaction') as mock_get_db:
|
||||
mock_db = AsyncMock()
|
||||
mock_get_db.return_value.__aenter__.return_value = mock_db
|
||||
|
||||
mock_repository = AsyncMock()
|
||||
mock_analytics = {
|
||||
"total_revenue": Decimal("100.00"),
|
||||
"total_quantity": 50,
|
||||
"total_transactions": 10
|
||||
}
|
||||
mock_repository.get_analytics.return_value = mock_analytics
|
||||
|
||||
with patch('app.services.sales_service.SalesRepository', return_value=mock_repository):
|
||||
result = await sales_service.get_sales_analytics(sample_tenant_id)
|
||||
|
||||
assert result["total_revenue"] == Decimal("100.00")
|
||||
assert result["total_quantity"] == 50
|
||||
assert result["total_transactions"] == 10
|
||||
|
||||
async def test_validate_sales_record(self, sales_service, sample_tenant_id):
|
||||
"""Test validating a sales record"""
|
||||
record_id = uuid4()
|
||||
validation_notes = "Validated by manager"
|
||||
|
||||
with patch('app.services.sales_service.get_db_transaction') as mock_get_db:
|
||||
mock_db = AsyncMock()
|
||||
mock_get_db.return_value.__aenter__.return_value = mock_db
|
||||
|
||||
mock_repository = AsyncMock()
|
||||
|
||||
# Mock existing record
|
||||
mock_existing = AsyncMock()
|
||||
mock_existing.tenant_id = sample_tenant_id
|
||||
mock_repository.get_by_id.return_value = mock_existing
|
||||
|
||||
# Mock validated record
|
||||
mock_validated = AsyncMock()
|
||||
mock_validated.is_validated = True
|
||||
mock_repository.validate_record.return_value = mock_validated
|
||||
|
||||
with patch('app.services.sales_service.SalesRepository', return_value=mock_repository):
|
||||
result = await sales_service.validate_sales_record(
|
||||
record_id,
|
||||
sample_tenant_id,
|
||||
validation_notes
|
||||
)
|
||||
|
||||
assert result.is_validated is True
|
||||
mock_repository.validate_record.assert_called_once_with(record_id, validation_notes)
|
||||
|
||||
async def test_validate_sales_data_business_rules(self, sales_service, sample_tenant_id):
|
||||
"""Test business validation rules"""
|
||||
# Test revenue mismatch detection
|
||||
sales_data = SalesDataCreate(
|
||||
date=datetime.now(timezone.utc),
|
||||
product_name="Test Product",
|
||||
quantity_sold=5,
|
||||
unit_price=Decimal("2.00"),
|
||||
revenue=Decimal("15.00"), # Should be 10.00 (5 * 2.00)
|
||||
discount_applied=Decimal("0")
|
||||
)
|
||||
|
||||
# This should not raise an error, just log a warning
|
||||
await sales_service._validate_sales_data(sales_data, sample_tenant_id)
|
||||
|
||||
async def test_post_create_actions(self, sales_service):
|
||||
"""Test post-create actions"""
|
||||
mock_record = AsyncMock()
|
||||
mock_record.id = uuid4()
|
||||
|
||||
# Should not raise any exceptions
|
||||
await sales_service._post_create_actions(mock_record)
|
||||
Reference in New Issue
Block a user