Fix Data API
This commit is contained in:
@@ -1,13 +1,16 @@
|
||||
# ================================================================
|
||||
# services/data/app/api/sales.py
|
||||
# services/data/app/api/sales.py - FIXED VERSION
|
||||
# ================================================================
|
||||
"""Sales data API endpoints"""
|
||||
"""Sales data API endpoints with improved error handling"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, UploadFile, File, Form, Query
|
||||
from fastapi import APIRouter, Depends, HTTPException, UploadFile, File, Form, Query, Response
|
||||
from fastapi.responses import StreamingResponse
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from typing import List
|
||||
from typing import List, Optional
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
import base64
|
||||
import structlog
|
||||
|
||||
from app.core.database import get_db
|
||||
from app.core.auth import get_current_user, AuthInfo
|
||||
@@ -18,10 +21,14 @@ from app.schemas.sales import (
|
||||
SalesDataCreate,
|
||||
SalesDataResponse,
|
||||
SalesDataQuery,
|
||||
SalesDataImport
|
||||
SalesDataImport,
|
||||
SalesImportResult,
|
||||
SalesValidationResult,
|
||||
SalesExportRequest
|
||||
)
|
||||
|
||||
router = APIRouter()
|
||||
logger = structlog.get_logger()
|
||||
|
||||
@router.post("/", response_model=SalesDataResponse)
|
||||
async def create_sales_record(
|
||||
@@ -31,19 +38,32 @@ async def create_sales_record(
|
||||
):
|
||||
"""Create a new sales record"""
|
||||
try:
|
||||
logger.debug("API: Creating sales record", product=sales_data.product_name, quantity=sales_data.quantity_sold)
|
||||
|
||||
record = await SalesService.create_sales_record(sales_data, db)
|
||||
|
||||
# Publish event
|
||||
await data_publisher.publish_data_imported({
|
||||
"tenant_id": str(sales_data.tenant_id),
|
||||
"type": "sales_record",
|
||||
"source": sales_data.source,
|
||||
"timestamp": datetime.utcnow().isoformat()
|
||||
})
|
||||
# Publish event (with error handling)
|
||||
try:
|
||||
await data_publisher.publish_sales_created({
|
||||
"tenant_id": str(sales_data.tenant_id),
|
||||
"product_name": sales_data.product_name,
|
||||
"quantity_sold": sales_data.quantity_sold,
|
||||
"revenue": sales_data.revenue,
|
||||
"source": sales_data.source,
|
||||
"timestamp": datetime.utcnow().isoformat()
|
||||
})
|
||||
except Exception as pub_error:
|
||||
logger.warning("Failed to publish sales created event", error=str(pub_error))
|
||||
# Continue processing - event publishing failure shouldn't break the API
|
||||
|
||||
logger.debug("Successfully created sales record", record_id=record.id)
|
||||
return record
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
logger.error("Failed to create sales record", error=str(e))
|
||||
import traceback
|
||||
logger.error("Sales creation traceback", traceback=traceback.format_exc())
|
||||
raise HTTPException(status_code=500, detail=f"Failed to create sales record: {str(e)}")
|
||||
|
||||
@router.post("/query", response_model=List[SalesDataResponse])
|
||||
async def get_sales_data(
|
||||
@@ -53,12 +73,18 @@ async def get_sales_data(
|
||||
):
|
||||
"""Get sales data by query parameters"""
|
||||
try:
|
||||
logger.debug("API: Querying sales data", tenant_id=query.tenant_id)
|
||||
|
||||
records = await SalesService.get_sales_data(query, db)
|
||||
|
||||
logger.debug("Successfully retrieved sales data", count=len(records))
|
||||
return records
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
logger.error("Failed to query sales data", error=str(e))
|
||||
raise HTTPException(status_code=500, detail=f"Failed to query sales data: {str(e)}")
|
||||
|
||||
@router.post("/import")
|
||||
@router.post("/import", response_model=SalesImportResult)
|
||||
async def import_sales_data(
|
||||
tenant_id: str = Form(...),
|
||||
file_format: str = Form(...),
|
||||
@@ -68,6 +94,8 @@ async def import_sales_data(
|
||||
):
|
||||
"""Import sales data from file"""
|
||||
try:
|
||||
logger.debug("API: Importing sales data", tenant_id=tenant_id, format=file_format, filename=file.filename)
|
||||
|
||||
# Read file content
|
||||
content = await file.read()
|
||||
file_content = content.decode('utf-8')
|
||||
@@ -78,21 +106,30 @@ async def import_sales_data(
|
||||
)
|
||||
|
||||
if result["success"]:
|
||||
# Publish event
|
||||
await data_publisher.publish_data_imported({
|
||||
"tenant_id": tenant_id,
|
||||
"type": "bulk_import",
|
||||
"format": file_format,
|
||||
"records_created": result["records_created"],
|
||||
"timestamp": datetime.utcnow().isoformat()
|
||||
})
|
||||
# Publish event (with error handling)
|
||||
try:
|
||||
await data_publisher.publish_data_imported({
|
||||
"tenant_id": tenant_id,
|
||||
"type": "bulk_import",
|
||||
"format": file_format,
|
||||
"filename": file.filename,
|
||||
"records_created": result["records_created"],
|
||||
"timestamp": datetime.utcnow().isoformat()
|
||||
})
|
||||
except Exception as pub_error:
|
||||
logger.warning("Failed to publish data imported event", error=str(pub_error))
|
||||
# Continue processing
|
||||
|
||||
logger.debug("Import completed", success=result["success"], records_created=result.get("records_created", 0))
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
logger.error("Failed to import sales data", error=str(e))
|
||||
import traceback
|
||||
logger.error("Sales import traceback", traceback=traceback.format_exc())
|
||||
raise HTTPException(status_code=500, detail=f"Failed to import sales data: {str(e)}")
|
||||
|
||||
@router.post("/import/json")
|
||||
@router.post("/import/json", response_model=SalesImportResult)
|
||||
async def import_sales_json(
|
||||
import_data: SalesDataImport,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
@@ -100,6 +137,8 @@ async def import_sales_json(
|
||||
):
|
||||
"""Import sales data from JSON"""
|
||||
try:
|
||||
logger.debug("API: Importing JSON sales data", tenant_id=import_data.tenant_id)
|
||||
|
||||
result = await DataImportService.process_upload(
|
||||
str(import_data.tenant_id),
|
||||
import_data.data,
|
||||
@@ -108,32 +147,46 @@ async def import_sales_json(
|
||||
)
|
||||
|
||||
if result["success"]:
|
||||
await data_publisher.publish_data_imported({
|
||||
"tenant_id": str(import_data.tenant_id),
|
||||
"type": "json_import",
|
||||
"records_created": result["records_created"],
|
||||
"timestamp": datetime.utcnow().isoformat()
|
||||
})
|
||||
# Publish event (with error handling)
|
||||
try:
|
||||
await data_publisher.publish_data_imported({
|
||||
"tenant_id": str(import_data.tenant_id),
|
||||
"type": "json_import",
|
||||
"records_created": result["records_created"],
|
||||
"timestamp": datetime.utcnow().isoformat()
|
||||
})
|
||||
except Exception as pub_error:
|
||||
logger.warning("Failed to publish JSON import event", error=str(pub_error))
|
||||
# Continue processing
|
||||
|
||||
logger.debug("JSON import completed", success=result["success"], records_created=result.get("records_created", 0))
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
logger.error("Failed to import JSON sales data", error=str(e))
|
||||
import traceback
|
||||
logger.error("JSON import traceback", traceback=traceback.format_exc())
|
||||
raise HTTPException(status_code=500, detail=f"Failed to import JSON sales data: {str(e)}")
|
||||
|
||||
@router.post("/import/validate")
|
||||
@router.post("/import/validate", response_model=SalesValidationResult)
|
||||
async def validate_import_data(
|
||||
import_data: SalesDataImport,
|
||||
current_user: AuthInfo = Depends(get_current_user)
|
||||
):
|
||||
"""Validate import data before processing"""
|
||||
try:
|
||||
logger.debug("API: Validating import data", tenant_id=import_data.tenant_id)
|
||||
|
||||
validation = await DataImportService.validate_import_data(
|
||||
import_data.model_dump()
|
||||
)
|
||||
|
||||
logger.debug("Validation completed", is_valid=validation.get("is_valid", False))
|
||||
return validation
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
logger.error("Failed to validate import data", error=str(e))
|
||||
raise HTTPException(status_code=500, detail=f"Failed to validate import data: {str(e)}")
|
||||
|
||||
@router.get("/import/template/{format_type}")
|
||||
async def get_import_template(
|
||||
@@ -142,11 +195,16 @@ async def get_import_template(
|
||||
):
|
||||
"""Get import template for specified format"""
|
||||
try:
|
||||
logger.debug("API: Getting import template", format=format_type)
|
||||
|
||||
template = await DataImportService.get_import_template(format_type)
|
||||
|
||||
if "error" in template:
|
||||
logger.warning("Template generation error", error=template["error"])
|
||||
raise HTTPException(status_code=400, detail=template["error"])
|
||||
|
||||
logger.debug("Template generated successfully", format=format_type)
|
||||
|
||||
if format_type.lower() == "csv":
|
||||
return Response(
|
||||
content=template["template"],
|
||||
@@ -168,362 +226,92 @@ async def get_import_template(
|
||||
else:
|
||||
return template
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
logger.error("Failed to generate import template", error=str(e))
|
||||
import traceback
|
||||
logger.error("Template generation traceback", traceback=traceback.format_exc())
|
||||
raise HTTPException(status_code=500, detail=f"Failed to generate template: {str(e)}")
|
||||
|
||||
@router.post("/import/advanced")
|
||||
async def import_sales_data_advanced(
|
||||
tenant_id: str = Form(...),
|
||||
file_format: str = Form(...),
|
||||
file: UploadFile = File(...),
|
||||
validate_only: bool = Form(False),
|
||||
@router.get("/analytics/{tenant_id}")
|
||||
async def get_sales_analytics(
|
||||
tenant_id: str,
|
||||
start_date: Optional[datetime] = Query(None, description="Start date"),
|
||||
end_date: Optional[datetime] = Query(None, description="End date"),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
current_user: AuthInfo = Depends(get_current_user)
|
||||
):
|
||||
"""Advanced import with validation and preview options"""
|
||||
"""Get sales analytics for tenant"""
|
||||
try:
|
||||
# Read file content
|
||||
content = await file.read()
|
||||
logger.debug("API: Getting sales analytics", tenant_id=tenant_id)
|
||||
|
||||
# Determine encoding
|
||||
try:
|
||||
file_content = content.decode('utf-8')
|
||||
except UnicodeDecodeError:
|
||||
try:
|
||||
file_content = content.decode('latin-1')
|
||||
except UnicodeDecodeError:
|
||||
file_content = content.decode('cp1252')
|
||||
|
||||
# Validate first if requested
|
||||
if validate_only:
|
||||
validation = await DataImportService.validate_import_data({
|
||||
"tenant_id": tenant_id,
|
||||
"data": file_content,
|
||||
"data_format": file_format
|
||||
})
|
||||
|
||||
# Add file preview for validation
|
||||
if validation["valid"]:
|
||||
# Get first few lines for preview
|
||||
lines = file_content.split('\n')[:5]
|
||||
validation["preview"] = lines
|
||||
validation["total_lines"] = len(file_content.split('\n'))
|
||||
|
||||
return validation
|
||||
|
||||
# Process import
|
||||
result = await DataImportService.process_upload(
|
||||
tenant_id, file_content, file_format, db, file.filename
|
||||
analytics = await SalesService.get_sales_analytics(
|
||||
tenant_id, start_date, end_date, db
|
||||
)
|
||||
|
||||
if result["success"]:
|
||||
# Publish event
|
||||
await data_publisher.publish_data_imported({
|
||||
"tenant_id": tenant_id,
|
||||
"type": "advanced_import",
|
||||
"format": file_format,
|
||||
"filename": file.filename,
|
||||
"records_created": result["records_created"],
|
||||
"success_rate": result.get("success_rate", 0),
|
||||
"timestamp": datetime.utcnow().isoformat()
|
||||
})
|
||||
|
||||
return result
|
||||
logger.debug("Analytics generated successfully", tenant_id=tenant_id)
|
||||
return analytics
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@router.get("/import/history/{tenant_id}")
|
||||
async def get_import_history(
|
||||
tenant_id: str,
|
||||
limit: int = Query(10, ge=1, le=100),
|
||||
offset: int = Query(0, ge=0),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
current_user: AuthInfo = Depends(get_current_user)
|
||||
):
|
||||
"""Get import history for tenant"""
|
||||
try:
|
||||
# Get recent imports by source and creation date
|
||||
stmt = select(SalesData).where(
|
||||
and_(
|
||||
SalesData.tenant_id == tenant_id,
|
||||
SalesData.source.in_(['csv', 'excel', 'json', 'pos', 'migrated'])
|
||||
)
|
||||
).order_by(SalesData.created_at.desc()).offset(offset).limit(limit)
|
||||
|
||||
result = await db.execute(stmt)
|
||||
records = result.scalars().all()
|
||||
|
||||
# Group by import session (same created_at)
|
||||
import_sessions = {}
|
||||
for record in records:
|
||||
session_key = f"{record.source}_{record.created_at.date()}"
|
||||
if session_key not in import_sessions:
|
||||
import_sessions[session_key] = {
|
||||
"date": record.created_at,
|
||||
"source": record.source,
|
||||
"records": [],
|
||||
"total_records": 0,
|
||||
"total_revenue": 0.0
|
||||
}
|
||||
|
||||
import_sessions[session_key]["records"].append({
|
||||
"id": str(record.id),
|
||||
"product_name": record.product_name,
|
||||
"quantity_sold": record.quantity_sold,
|
||||
"revenue": record.revenue or 0.0
|
||||
})
|
||||
import_sessions[session_key]["total_records"] += 1
|
||||
import_sessions[session_key]["total_revenue"] += record.revenue or 0.0
|
||||
|
||||
return {
|
||||
"import_sessions": list(import_sessions.values()),
|
||||
"total_sessions": len(import_sessions),
|
||||
"offset": offset,
|
||||
"limit": limit
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@router.delete("/import/{tenant_id}/{import_date}")
|
||||
async def delete_import_batch(
|
||||
tenant_id: str,
|
||||
import_date: str, # Format: YYYY-MM-DD
|
||||
source: str = Query(..., description="Import source (csv, excel, json, pos)"),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
current_user: AuthInfo = Depends(get_current_user)
|
||||
):
|
||||
"""Delete an entire import batch"""
|
||||
try:
|
||||
# Parse date
|
||||
import_date_obj = datetime.strptime(import_date, "%Y-%m-%d").date()
|
||||
start_datetime = datetime.combine(import_date_obj, datetime.min.time())
|
||||
end_datetime = datetime.combine(import_date_obj, datetime.max.time())
|
||||
|
||||
# Find records to delete
|
||||
stmt = select(SalesData).where(
|
||||
and_(
|
||||
SalesData.tenant_id == tenant_id,
|
||||
SalesData.source == source,
|
||||
SalesData.created_at >= start_datetime,
|
||||
SalesData.created_at <= end_datetime
|
||||
)
|
||||
)
|
||||
|
||||
result = await db.execute(stmt)
|
||||
records_to_delete = result.scalars().all()
|
||||
|
||||
if not records_to_delete:
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail="No import batch found for the specified date and source"
|
||||
)
|
||||
|
||||
# Delete records
|
||||
for record in records_to_delete:
|
||||
await db.delete(record)
|
||||
|
||||
await db.commit()
|
||||
|
||||
# Publish event
|
||||
await data_publisher.publish_data_imported({
|
||||
"tenant_id": tenant_id,
|
||||
"type": "import_deleted",
|
||||
"source": source,
|
||||
"import_date": import_date,
|
||||
"records_deleted": len(records_to_delete),
|
||||
"timestamp": datetime.utcnow().isoformat()
|
||||
})
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"records_deleted": len(records_to_delete),
|
||||
"import_date": import_date,
|
||||
"source": source
|
||||
}
|
||||
|
||||
except ValueError:
|
||||
raise HTTPException(status_code=400, detail="Invalid date format. Use YYYY-MM-DD")
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@router.get("/stats/{tenant_id}")
|
||||
async def get_sales_statistics(
|
||||
tenant_id: str,
|
||||
start_date: datetime = Query(None, description="Start date for statistics"),
|
||||
end_date: datetime = Query(None, description="End date for statistics"),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
current_user: AuthInfo = Depends(get_current_user)
|
||||
):
|
||||
"""Get sales statistics for tenant"""
|
||||
try:
|
||||
# Default to last 30 days if no dates provided
|
||||
if not start_date:
|
||||
start_date = datetime.now() - timedelta(days=30)
|
||||
if not end_date:
|
||||
end_date = datetime.now()
|
||||
|
||||
# Get sales data
|
||||
stmt = select(SalesData).where(
|
||||
and_(
|
||||
SalesData.tenant_id == tenant_id,
|
||||
SalesData.date >= start_date,
|
||||
SalesData.date <= end_date
|
||||
)
|
||||
)
|
||||
|
||||
result = await db.execute(stmt)
|
||||
records = result.scalars().all()
|
||||
|
||||
if not records:
|
||||
return {
|
||||
"total_records": 0,
|
||||
"total_revenue": 0.0,
|
||||
"total_quantity": 0,
|
||||
"top_products": [],
|
||||
"daily_sales": [],
|
||||
"data_sources": {}
|
||||
}
|
||||
|
||||
# Calculate statistics
|
||||
total_revenue = sum(record.revenue or 0 for record in records)
|
||||
total_quantity = sum(record.quantity_sold for record in records)
|
||||
|
||||
# Top products
|
||||
product_stats = {}
|
||||
for record in records:
|
||||
if record.product_name not in product_stats:
|
||||
product_stats[record.product_name] = {
|
||||
"quantity": 0,
|
||||
"revenue": 0.0,
|
||||
"occurrences": 0
|
||||
}
|
||||
product_stats[record.product_name]["quantity"] += record.quantity_sold
|
||||
product_stats[record.product_name]["revenue"] += record.revenue or 0
|
||||
product_stats[record.product_name]["occurrences"] += 1
|
||||
|
||||
top_products = sorted(
|
||||
[{"product": k, **v} for k, v in product_stats.items()],
|
||||
key=lambda x: x["quantity"],
|
||||
reverse=True
|
||||
)[:10]
|
||||
|
||||
# Daily sales
|
||||
daily_stats = {}
|
||||
for record in records:
|
||||
date_key = record.date.date().isoformat()
|
||||
if date_key not in daily_stats:
|
||||
daily_stats[date_key] = {"quantity": 0, "revenue": 0.0, "products": 0}
|
||||
daily_stats[date_key]["quantity"] += record.quantity_sold
|
||||
daily_stats[date_key]["revenue"] += record.revenue or 0
|
||||
daily_stats[date_key]["products"] += 1
|
||||
|
||||
daily_sales = [{"date": k, **v} for k, v in sorted(daily_stats.items())]
|
||||
|
||||
# Data sources
|
||||
source_stats = {}
|
||||
for record in records:
|
||||
source = record.source
|
||||
if source not in source_stats:
|
||||
source_stats[source] = 0
|
||||
source_stats[source] += 1
|
||||
|
||||
return {
|
||||
"total_records": len(records),
|
||||
"total_revenue": round(total_revenue, 2),
|
||||
"total_quantity": total_quantity,
|
||||
"average_revenue_per_sale": round(total_revenue / len(records), 2) if records else 0,
|
||||
"date_range": {
|
||||
"start": start_date.isoformat(),
|
||||
"end": end_date.isoformat()
|
||||
},
|
||||
"top_products": top_products,
|
||||
"daily_sales": daily_sales,
|
||||
"data_sources": source_stats
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
logger.error("Failed to generate sales analytics", error=str(e))
|
||||
raise HTTPException(status_code=500, detail=f"Failed to generate analytics: {str(e)}")
|
||||
|
||||
@router.post("/export/{tenant_id}")
|
||||
async def export_sales_data(
|
||||
tenant_id: str,
|
||||
export_format: str = Query("csv", description="Export format: csv, excel, json"),
|
||||
start_date: datetime = Query(None, description="Start date"),
|
||||
end_date: datetime = Query(None, description="End date"),
|
||||
products: List[str] = Query(None, description="Filter by products"),
|
||||
start_date: Optional[datetime] = Query(None, description="Start date"),
|
||||
end_date: Optional[datetime] = Query(None, description="End date"),
|
||||
products: Optional[List[str]] = Query(None, description="Filter by products"),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
current_user: AuthInfo = Depends(get_current_user)
|
||||
):
|
||||
"""Export sales data in specified format"""
|
||||
try:
|
||||
# Build query
|
||||
query_conditions = [SalesData.tenant_id == tenant_id]
|
||||
logger.debug("API: Exporting sales data", tenant_id=tenant_id, format=export_format)
|
||||
|
||||
if start_date:
|
||||
query_conditions.append(SalesData.date >= start_date)
|
||||
if end_date:
|
||||
query_conditions.append(SalesData.date <= end_date)
|
||||
if products:
|
||||
query_conditions.append(SalesData.product_name.in_(products))
|
||||
export_result = await SalesService.export_sales_data(
|
||||
tenant_id, export_format, start_date, end_date, products, db
|
||||
)
|
||||
|
||||
stmt = select(SalesData).where(and_(*query_conditions)).order_by(SalesData.date.desc())
|
||||
|
||||
result = await db.execute(stmt)
|
||||
records = result.scalars().all()
|
||||
|
||||
if not records:
|
||||
if not export_result:
|
||||
raise HTTPException(status_code=404, detail="No data found for export")
|
||||
|
||||
# Convert to export format
|
||||
export_data = []
|
||||
for record in records:
|
||||
export_data.append({
|
||||
"fecha": record.date.strftime("%d/%m/%Y"),
|
||||
"producto": record.product_name,
|
||||
"cantidad": record.quantity_sold,
|
||||
"ingresos": record.revenue or 0.0,
|
||||
"ubicacion": record.location_id or "",
|
||||
"fuente": record.source
|
||||
})
|
||||
logger.debug("Export completed successfully", tenant_id=tenant_id, format=export_format)
|
||||
|
||||
if export_format.lower() == "csv":
|
||||
# Generate CSV
|
||||
output = io.StringIO()
|
||||
df = pd.DataFrame(export_data)
|
||||
df.to_csv(output, index=False)
|
||||
|
||||
return Response(
|
||||
content=output.getvalue(),
|
||||
media_type="text/csv",
|
||||
headers={"Content-Disposition": "attachment; filename=ventas_export.csv"}
|
||||
)
|
||||
|
||||
elif export_format.lower() == "excel":
|
||||
# Generate Excel
|
||||
output = io.BytesIO()
|
||||
df = pd.DataFrame(export_data)
|
||||
df.to_excel(output, index=False, sheet_name="Ventas")
|
||||
|
||||
return Response(
|
||||
content=output.getvalue(),
|
||||
media_type="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
|
||||
headers={"Content-Disposition": "attachment; filename=ventas_export.xlsx"}
|
||||
)
|
||||
|
||||
elif export_format.lower() == "json":
|
||||
return {
|
||||
"data": export_data,
|
||||
"total_records": len(export_data),
|
||||
"export_date": datetime.now().isoformat()
|
||||
}
|
||||
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="Formato no soportado. Use: csv, excel, json"
|
||||
)
|
||||
return StreamingResponse(
|
||||
iter([export_result["content"]]),
|
||||
media_type=export_result["media_type"],
|
||||
headers={"Content-Disposition": f"attachment; filename={export_result['filename']}"}
|
||||
)
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
logger.error("Failed to export sales data", error=str(e))
|
||||
raise HTTPException(status_code=500, detail=f"Failed to export sales data: {str(e)}")
|
||||
|
||||
@router.delete("/{record_id}")
|
||||
async def delete_sales_record(
|
||||
record_id: str,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
current_user: AuthInfo = Depends(get_current_user)
|
||||
):
|
||||
"""Delete a sales record"""
|
||||
try:
|
||||
logger.debug("API: Deleting sales record", record_id=record_id)
|
||||
|
||||
success = await SalesService.delete_sales_record(record_id, db)
|
||||
|
||||
if not success:
|
||||
raise HTTPException(status_code=404, detail="Sales record not found")
|
||||
|
||||
logger.debug("Sales record deleted successfully", record_id=record_id)
|
||||
return {"status": "success", "message": "Sales record deleted successfully"}
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Failed to delete sales record", error=str(e))
|
||||
raise HTTPException(status_code=500, detail=f"Failed to delete sales record: {str(e)}")
|
||||
Reference in New Issue
Block a user