Fix issues
This commit is contained in:
529
services/data/app/api/sales.py
Normal file
529
services/data/app/api/sales.py
Normal file
@@ -0,0 +1,529 @@
|
||||
# ================================================================
|
||||
# services/data/app/api/sales.py
|
||||
# ================================================================
|
||||
"""Sales data API endpoints"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, UploadFile, File, Form, Query
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from typing import List
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
|
||||
from app.core.database import get_db
|
||||
from app.core.auth import verify_token
|
||||
from app.services.sales_service import SalesService
|
||||
from app.services.data_import_service import DataImportService
|
||||
from app.services.messaging import data_publisher
|
||||
from app.schemas.sales import (
|
||||
SalesDataCreate,
|
||||
SalesDataResponse,
|
||||
SalesDataQuery,
|
||||
SalesDataImport
|
||||
)
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
@router.post("/", response_model=SalesDataResponse)
|
||||
async def create_sales_record(
|
||||
sales_data: SalesDataCreate,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
current_user: dict = Depends(verify_token)
|
||||
):
|
||||
"""Create a new sales record"""
|
||||
try:
|
||||
record = await SalesService.create_sales_record(sales_data, db)
|
||||
|
||||
# Publish event
|
||||
await data_publisher.publish_data_imported({
|
||||
"tenant_id": str(sales_data.tenant_id),
|
||||
"type": "sales_record",
|
||||
"source": sales_data.source,
|
||||
"timestamp": datetime.utcnow().isoformat()
|
||||
})
|
||||
|
||||
return record
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@router.post("/query", response_model=List[SalesDataResponse])
|
||||
async def get_sales_data(
|
||||
query: SalesDataQuery,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
current_user: dict = Depends(verify_token)
|
||||
):
|
||||
"""Get sales data by query parameters"""
|
||||
try:
|
||||
records = await SalesService.get_sales_data(query, db)
|
||||
return records
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@router.post("/import")
|
||||
async def import_sales_data(
|
||||
tenant_id: str = Form(...),
|
||||
file_format: str = Form(...),
|
||||
file: UploadFile = File(...),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
current_user: dict = Depends(verify_token)
|
||||
):
|
||||
"""Import sales data from file"""
|
||||
try:
|
||||
# Read file content
|
||||
content = await file.read()
|
||||
file_content = content.decode('utf-8')
|
||||
|
||||
# Process import
|
||||
result = await DataImportService.process_upload(
|
||||
tenant_id, file_content, file_format, db
|
||||
)
|
||||
|
||||
if result["success"]:
|
||||
# Publish event
|
||||
await data_publisher.publish_data_imported({
|
||||
"tenant_id": tenant_id,
|
||||
"type": "bulk_import",
|
||||
"format": file_format,
|
||||
"records_created": result["records_created"],
|
||||
"timestamp": datetime.utcnow().isoformat()
|
||||
})
|
||||
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@router.post("/import/json")
|
||||
async def import_sales_json(
|
||||
import_data: SalesDataImport,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
current_user: dict = Depends(verify_token)
|
||||
):
|
||||
"""Import sales data from JSON"""
|
||||
try:
|
||||
result = await DataImportService.process_upload(
|
||||
str(import_data.tenant_id),
|
||||
import_data.data,
|
||||
import_data.data_format,
|
||||
db
|
||||
)
|
||||
|
||||
if result["success"]:
|
||||
await data_publisher.publish_data_imported({
|
||||
"tenant_id": str(import_data.tenant_id),
|
||||
"type": "json_import",
|
||||
"records_created": result["records_created"],
|
||||
"timestamp": datetime.utcnow().isoformat()
|
||||
})
|
||||
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@router.post("/import/validate")
|
||||
async def validate_import_data(
|
||||
import_data: SalesDataImport,
|
||||
current_user: dict = Depends(verify_token)
|
||||
):
|
||||
"""Validate import data before processing"""
|
||||
try:
|
||||
validation = await DataImportService.validate_import_data(
|
||||
import_data.model_dump()
|
||||
)
|
||||
return validation
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@router.get("/import/template/{format_type}")
|
||||
async def get_import_template(
|
||||
format_type: str,
|
||||
current_user: dict = Depends(verify_token)
|
||||
):
|
||||
"""Get import template for specified format"""
|
||||
try:
|
||||
template = await DataImportService.get_import_template(format_type)
|
||||
|
||||
if "error" in template:
|
||||
raise HTTPException(status_code=400, detail=template["error"])
|
||||
|
||||
if format_type.lower() == "csv":
|
||||
return Response(
|
||||
content=template["template"],
|
||||
media_type="text/csv",
|
||||
headers={"Content-Disposition": f"attachment; filename={template['filename']}"}
|
||||
)
|
||||
elif format_type.lower() == "json":
|
||||
return Response(
|
||||
content=template["template"],
|
||||
media_type="application/json",
|
||||
headers={"Content-Disposition": f"attachment; filename={template['filename']}"}
|
||||
)
|
||||
elif format_type.lower() in ["excel", "xlsx"]:
|
||||
return Response(
|
||||
content=base64.b64decode(template["template"]),
|
||||
media_type=template["content_type"],
|
||||
headers={"Content-Disposition": f"attachment; filename={template['filename']}"}
|
||||
)
|
||||
else:
|
||||
return template
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@router.post("/import/advanced")
|
||||
async def import_sales_data_advanced(
|
||||
tenant_id: str = Form(...),
|
||||
file_format: str = Form(...),
|
||||
file: UploadFile = File(...),
|
||||
validate_only: bool = Form(False),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
current_user: dict = Depends(verify_token)
|
||||
):
|
||||
"""Advanced import with validation and preview options"""
|
||||
try:
|
||||
# Read file content
|
||||
content = await file.read()
|
||||
|
||||
# Determine encoding
|
||||
try:
|
||||
file_content = content.decode('utf-8')
|
||||
except UnicodeDecodeError:
|
||||
try:
|
||||
file_content = content.decode('latin-1')
|
||||
except UnicodeDecodeError:
|
||||
file_content = content.decode('cp1252')
|
||||
|
||||
# Validate first if requested
|
||||
if validate_only:
|
||||
validation = await DataImportService.validate_import_data({
|
||||
"tenant_id": tenant_id,
|
||||
"data": file_content,
|
||||
"data_format": file_format
|
||||
})
|
||||
|
||||
# Add file preview for validation
|
||||
if validation["valid"]:
|
||||
# Get first few lines for preview
|
||||
lines = file_content.split('\n')[:5]
|
||||
validation["preview"] = lines
|
||||
validation["total_lines"] = len(file_content.split('\n'))
|
||||
|
||||
return validation
|
||||
|
||||
# Process import
|
||||
result = await DataImportService.process_upload(
|
||||
tenant_id, file_content, file_format, db, file.filename
|
||||
)
|
||||
|
||||
if result["success"]:
|
||||
# Publish event
|
||||
await data_publisher.publish_data_imported({
|
||||
"tenant_id": tenant_id,
|
||||
"type": "advanced_import",
|
||||
"format": file_format,
|
||||
"filename": file.filename,
|
||||
"records_created": result["records_created"],
|
||||
"success_rate": result.get("success_rate", 0),
|
||||
"timestamp": datetime.utcnow().isoformat()
|
||||
})
|
||||
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@router.get("/import/history/{tenant_id}")
|
||||
async def get_import_history(
|
||||
tenant_id: str,
|
||||
limit: int = Query(10, ge=1, le=100),
|
||||
offset: int = Query(0, ge=0),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
current_user: dict = Depends(verify_token)
|
||||
):
|
||||
"""Get import history for tenant"""
|
||||
try:
|
||||
# Get recent imports by source and creation date
|
||||
stmt = select(SalesData).where(
|
||||
and_(
|
||||
SalesData.tenant_id == tenant_id,
|
||||
SalesData.source.in_(['csv', 'excel', 'json', 'pos', 'migrated'])
|
||||
)
|
||||
).order_by(SalesData.created_at.desc()).offset(offset).limit(limit)
|
||||
|
||||
result = await db.execute(stmt)
|
||||
records = result.scalars().all()
|
||||
|
||||
# Group by import session (same created_at)
|
||||
import_sessions = {}
|
||||
for record in records:
|
||||
session_key = f"{record.source}_{record.created_at.date()}"
|
||||
if session_key not in import_sessions:
|
||||
import_sessions[session_key] = {
|
||||
"date": record.created_at,
|
||||
"source": record.source,
|
||||
"records": [],
|
||||
"total_records": 0,
|
||||
"total_revenue": 0.0
|
||||
}
|
||||
|
||||
import_sessions[session_key]["records"].append({
|
||||
"id": str(record.id),
|
||||
"product_name": record.product_name,
|
||||
"quantity_sold": record.quantity_sold,
|
||||
"revenue": record.revenue or 0.0
|
||||
})
|
||||
import_sessions[session_key]["total_records"] += 1
|
||||
import_sessions[session_key]["total_revenue"] += record.revenue or 0.0
|
||||
|
||||
return {
|
||||
"import_sessions": list(import_sessions.values()),
|
||||
"total_sessions": len(import_sessions),
|
||||
"offset": offset,
|
||||
"limit": limit
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@router.delete("/import/{tenant_id}/{import_date}")
|
||||
async def delete_import_batch(
|
||||
tenant_id: str,
|
||||
import_date: str, # Format: YYYY-MM-DD
|
||||
source: str = Query(..., description="Import source (csv, excel, json, pos)"),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
current_user: dict = Depends(verify_token)
|
||||
):
|
||||
"""Delete an entire import batch"""
|
||||
try:
|
||||
# Parse date
|
||||
import_date_obj = datetime.strptime(import_date, "%Y-%m-%d").date()
|
||||
start_datetime = datetime.combine(import_date_obj, datetime.min.time())
|
||||
end_datetime = datetime.combine(import_date_obj, datetime.max.time())
|
||||
|
||||
# Find records to delete
|
||||
stmt = select(SalesData).where(
|
||||
and_(
|
||||
SalesData.tenant_id == tenant_id,
|
||||
SalesData.source == source,
|
||||
SalesData.created_at >= start_datetime,
|
||||
SalesData.created_at <= end_datetime
|
||||
)
|
||||
)
|
||||
|
||||
result = await db.execute(stmt)
|
||||
records_to_delete = result.scalars().all()
|
||||
|
||||
if not records_to_delete:
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail="No import batch found for the specified date and source"
|
||||
)
|
||||
|
||||
# Delete records
|
||||
for record in records_to_delete:
|
||||
await db.delete(record)
|
||||
|
||||
await db.commit()
|
||||
|
||||
# Publish event
|
||||
await data_publisher.publish_data_imported({
|
||||
"tenant_id": tenant_id,
|
||||
"type": "import_deleted",
|
||||
"source": source,
|
||||
"import_date": import_date,
|
||||
"records_deleted": len(records_to_delete),
|
||||
"timestamp": datetime.utcnow().isoformat()
|
||||
})
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"records_deleted": len(records_to_delete),
|
||||
"import_date": import_date,
|
||||
"source": source
|
||||
}
|
||||
|
||||
except ValueError:
|
||||
raise HTTPException(status_code=400, detail="Invalid date format. Use YYYY-MM-DD")
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@router.get("/stats/{tenant_id}")
|
||||
async def get_sales_statistics(
|
||||
tenant_id: str,
|
||||
start_date: datetime = Query(None, description="Start date for statistics"),
|
||||
end_date: datetime = Query(None, description="End date for statistics"),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
current_user: dict = Depends(verify_token)
|
||||
):
|
||||
"""Get sales statistics for tenant"""
|
||||
try:
|
||||
# Default to last 30 days if no dates provided
|
||||
if not start_date:
|
||||
start_date = datetime.now() - timedelta(days=30)
|
||||
if not end_date:
|
||||
end_date = datetime.now()
|
||||
|
||||
# Get sales data
|
||||
stmt = select(SalesData).where(
|
||||
and_(
|
||||
SalesData.tenant_id == tenant_id,
|
||||
SalesData.date >= start_date,
|
||||
SalesData.date <= end_date
|
||||
)
|
||||
)
|
||||
|
||||
result = await db.execute(stmt)
|
||||
records = result.scalars().all()
|
||||
|
||||
if not records:
|
||||
return {
|
||||
"total_records": 0,
|
||||
"total_revenue": 0.0,
|
||||
"total_quantity": 0,
|
||||
"top_products": [],
|
||||
"daily_sales": [],
|
||||
"data_sources": {}
|
||||
}
|
||||
|
||||
# Calculate statistics
|
||||
total_revenue = sum(record.revenue or 0 for record in records)
|
||||
total_quantity = sum(record.quantity_sold for record in records)
|
||||
|
||||
# Top products
|
||||
product_stats = {}
|
||||
for record in records:
|
||||
if record.product_name not in product_stats:
|
||||
product_stats[record.product_name] = {
|
||||
"quantity": 0,
|
||||
"revenue": 0.0,
|
||||
"occurrences": 0
|
||||
}
|
||||
product_stats[record.product_name]["quantity"] += record.quantity_sold
|
||||
product_stats[record.product_name]["revenue"] += record.revenue or 0
|
||||
product_stats[record.product_name]["occurrences"] += 1
|
||||
|
||||
top_products = sorted(
|
||||
[{"product": k, **v} for k, v in product_stats.items()],
|
||||
key=lambda x: x["quantity"],
|
||||
reverse=True
|
||||
)[:10]
|
||||
|
||||
# Daily sales
|
||||
daily_stats = {}
|
||||
for record in records:
|
||||
date_key = record.date.date().isoformat()
|
||||
if date_key not in daily_stats:
|
||||
daily_stats[date_key] = {"quantity": 0, "revenue": 0.0, "products": 0}
|
||||
daily_stats[date_key]["quantity"] += record.quantity_sold
|
||||
daily_stats[date_key]["revenue"] += record.revenue or 0
|
||||
daily_stats[date_key]["products"] += 1
|
||||
|
||||
daily_sales = [{"date": k, **v} for k, v in sorted(daily_stats.items())]
|
||||
|
||||
# Data sources
|
||||
source_stats = {}
|
||||
for record in records:
|
||||
source = record.source
|
||||
if source not in source_stats:
|
||||
source_stats[source] = 0
|
||||
source_stats[source] += 1
|
||||
|
||||
return {
|
||||
"total_records": len(records),
|
||||
"total_revenue": round(total_revenue, 2),
|
||||
"total_quantity": total_quantity,
|
||||
"average_revenue_per_sale": round(total_revenue / len(records), 2) if records else 0,
|
||||
"date_range": {
|
||||
"start": start_date.isoformat(),
|
||||
"end": end_date.isoformat()
|
||||
},
|
||||
"top_products": top_products,
|
||||
"daily_sales": daily_sales,
|
||||
"data_sources": source_stats
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@router.post("/export/{tenant_id}")
|
||||
async def export_sales_data(
|
||||
tenant_id: str,
|
||||
export_format: str = Query("csv", description="Export format: csv, excel, json"),
|
||||
start_date: datetime = Query(None, description="Start date"),
|
||||
end_date: datetime = Query(None, description="End date"),
|
||||
products: List[str] = Query(None, description="Filter by products"),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
current_user: dict = Depends(verify_token)
|
||||
):
|
||||
"""Export sales data in specified format"""
|
||||
try:
|
||||
# Build query
|
||||
query_conditions = [SalesData.tenant_id == tenant_id]
|
||||
|
||||
if start_date:
|
||||
query_conditions.append(SalesData.date >= start_date)
|
||||
if end_date:
|
||||
query_conditions.append(SalesData.date <= end_date)
|
||||
if products:
|
||||
query_conditions.append(SalesData.product_name.in_(products))
|
||||
|
||||
stmt = select(SalesData).where(and_(*query_conditions)).order_by(SalesData.date.desc())
|
||||
|
||||
result = await db.execute(stmt)
|
||||
records = result.scalars().all()
|
||||
|
||||
if not records:
|
||||
raise HTTPException(status_code=404, detail="No data found for export")
|
||||
|
||||
# Convert to export format
|
||||
export_data = []
|
||||
for record in records:
|
||||
export_data.append({
|
||||
"fecha": record.date.strftime("%d/%m/%Y"),
|
||||
"producto": record.product_name,
|
||||
"cantidad": record.quantity_sold,
|
||||
"ingresos": record.revenue or 0.0,
|
||||
"ubicacion": record.location_id or "",
|
||||
"fuente": record.source
|
||||
})
|
||||
|
||||
if export_format.lower() == "csv":
|
||||
# Generate CSV
|
||||
output = io.StringIO()
|
||||
df = pd.DataFrame(export_data)
|
||||
df.to_csv(output, index=False)
|
||||
|
||||
return Response(
|
||||
content=output.getvalue(),
|
||||
media_type="text/csv",
|
||||
headers={"Content-Disposition": "attachment; filename=ventas_export.csv"}
|
||||
)
|
||||
|
||||
elif export_format.lower() == "excel":
|
||||
# Generate Excel
|
||||
output = io.BytesIO()
|
||||
df = pd.DataFrame(export_data)
|
||||
df.to_excel(output, index=False, sheet_name="Ventas")
|
||||
|
||||
return Response(
|
||||
content=output.getvalue(),
|
||||
media_type="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
|
||||
headers={"Content-Disposition": "attachment; filename=ventas_export.xlsx"}
|
||||
)
|
||||
|
||||
elif export_format.lower() == "json":
|
||||
return {
|
||||
"data": export_data,
|
||||
"total_records": len(export_data),
|
||||
"export_date": datetime.now().isoformat()
|
||||
}
|
||||
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="Formato no soportado. Use: csv, excel, json"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
83
services/data/app/api/traffic.py
Normal file
83
services/data/app/api/traffic.py
Normal file
@@ -0,0 +1,83 @@
|
||||
# ================================================================
|
||||
# services/data/app/api/traffic.py
|
||||
# ================================================================
|
||||
"""Traffic data API endpoints"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from typing import List, Optional
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from app.core.database import get_db
|
||||
from app.core.auth import verify_token
|
||||
from app.services.traffic_service import TrafficService
|
||||
from app.services.messaging import data_publisher
|
||||
from app.schemas.external import (
|
||||
TrafficDataResponse,
|
||||
LocationRequest,
|
||||
DateRangeRequest
|
||||
)
|
||||
|
||||
router = APIRouter()
|
||||
traffic_service = TrafficService()
|
||||
|
||||
@router.get("/current", response_model=TrafficDataResponse)
|
||||
async def get_current_traffic(
|
||||
latitude: float = Query(..., description="Latitude"),
|
||||
longitude: float = Query(..., description="Longitude"),
|
||||
current_user: dict = Depends(verify_token)
|
||||
):
|
||||
"""Get current traffic data for location"""
|
||||
try:
|
||||
traffic = await traffic_service.get_current_traffic(latitude, longitude)
|
||||
if not traffic:
|
||||
raise HTTPException(status_code=404, detail="Traffic data not available")
|
||||
|
||||
# Publish event
|
||||
await data_publisher.publish_traffic_updated({
|
||||
"type": "current_requested",
|
||||
"latitude": latitude,
|
||||
"longitude": longitude,
|
||||
"timestamp": datetime.utcnow().isoformat()
|
||||
})
|
||||
|
||||
return traffic
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@router.get("/historical", response_model=List[TrafficDataResponse])
|
||||
async def get_historical_traffic(
|
||||
latitude: float = Query(..., description="Latitude"),
|
||||
longitude: float = Query(..., description="Longitude"),
|
||||
start_date: datetime = Query(..., description="Start date"),
|
||||
end_date: datetime = Query(..., description="End date"),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
current_user: dict = Depends(verify_token)
|
||||
):
|
||||
"""Get historical traffic data"""
|
||||
try:
|
||||
# Validate date range
|
||||
if end_date <= start_date:
|
||||
raise HTTPException(status_code=400, detail="End date must be after start date")
|
||||
|
||||
if (end_date - start_date).days > 90:
|
||||
raise HTTPException(status_code=400, detail="Date range cannot exceed 90 days")
|
||||
|
||||
historical_data = await traffic_service.get_historical_traffic(
|
||||
latitude, longitude, start_date, end_date, db
|
||||
)
|
||||
|
||||
# Publish event
|
||||
await data_publisher.publish_traffic_updated({
|
||||
"type": "historical_requested",
|
||||
"latitude": latitude,
|
||||
"longitude": longitude,
|
||||
"start_date": start_date.isoformat(),
|
||||
"end_date": end_date.isoformat(),
|
||||
"records_count": len(historical_data),
|
||||
"timestamp": datetime.utcnow().isoformat()
|
||||
})
|
||||
|
||||
return historical_data
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
100
services/data/app/api/weather.py
Normal file
100
services/data/app/api/weather.py
Normal file
@@ -0,0 +1,100 @@
|
||||
# ================================================================
|
||||
# services/data/app/api/weather.py
|
||||
# ================================================================
|
||||
"""Weather data API endpoints"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from typing import List, Optional
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from app.core.database import get_db
|
||||
from app.core.auth import verify_token
|
||||
from app.services.weather_service import WeatherService
|
||||
from app.services.messaging import data_publisher
|
||||
from app.schemas.external import (
|
||||
WeatherDataResponse,
|
||||
WeatherForecastResponse,
|
||||
LocationRequest,
|
||||
DateRangeRequest
|
||||
)
|
||||
|
||||
router = APIRouter()
|
||||
weather_service = WeatherService()
|
||||
|
||||
@router.get("/current", response_model=WeatherDataResponse)
|
||||
async def get_current_weather(
|
||||
latitude: float = Query(..., description="Latitude"),
|
||||
longitude: float = Query(..., description="Longitude"),
|
||||
current_user: dict = Depends(verify_token)
|
||||
):
|
||||
"""Get current weather for location"""
|
||||
try:
|
||||
weather = await weather_service.get_current_weather(latitude, longitude)
|
||||
if not weather:
|
||||
raise HTTPException(status_code=404, detail="Weather data not available")
|
||||
|
||||
return weather
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@router.get("/forecast", response_model=List[WeatherForecastResponse])
|
||||
async def get_weather_forecast(
|
||||
latitude: float = Query(..., description="Latitude"),
|
||||
longitude: float = Query(..., description="Longitude"),
|
||||
days: int = Query(7, description="Number of forecast days", ge=1, le=14),
|
||||
current_user: dict = Depends(verify_token)
|
||||
):
|
||||
"""Get weather forecast for location"""
|
||||
try:
|
||||
forecast = await weather_service.get_weather_forecast(latitude, longitude, days)
|
||||
|
||||
# Publish event
|
||||
await data_publisher.publish_weather_updated({
|
||||
"type": "forecast_requested",
|
||||
"latitude": latitude,
|
||||
"longitude": longitude,
|
||||
"days": days,
|
||||
"timestamp": datetime.utcnow().isoformat()
|
||||
})
|
||||
|
||||
return forecast
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@router.get("/historical", response_model=List[WeatherDataResponse])
|
||||
async def get_historical_weather(
|
||||
latitude: float = Query(..., description="Latitude"),
|
||||
longitude: float = Query(..., description="Longitude"),
|
||||
start_date: datetime = Query(..., description="Start date"),
|
||||
end_date: datetime = Query(..., description="End date"),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
current_user: dict = Depends(verify_token)
|
||||
):
|
||||
"""Get historical weather data"""
|
||||
try:
|
||||
# Validate date range
|
||||
if end_date <= start_date:
|
||||
raise HTTPException(status_code=400, detail="End date must be after start date")
|
||||
|
||||
if (end_date - start_date).days > 365:
|
||||
raise HTTPException(status_code=400, detail="Date range cannot exceed 365 days")
|
||||
|
||||
historical_data = await weather_service.get_historical_weather(
|
||||
latitude, longitude, start_date, end_date, db
|
||||
)
|
||||
|
||||
# Publish event
|
||||
await data_publisher.publish_weather_updated({
|
||||
"type": "historical_requested",
|
||||
"latitude": latitude,
|
||||
"longitude": longitude,
|
||||
"start_date": start_date.isoformat(),
|
||||
"end_date": end_date.isoformat(),
|
||||
"records_count": len(historical_data),
|
||||
"timestamp": datetime.utcnow().isoformat()
|
||||
})
|
||||
|
||||
return historical_data
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
Reference in New Issue
Block a user