New alert service
This commit is contained in:
160
services/sales/app/api/batch.py
Normal file
160
services/sales/app/api/batch.py
Normal file
@@ -0,0 +1,160 @@
|
||||
# services/sales/app/api/batch.py
|
||||
"""
|
||||
Sales Batch API - Batch operations for enterprise dashboards
|
||||
|
||||
Phase 2 optimization: Eliminate N+1 query patterns by fetching data for
|
||||
multiple tenants in a single request.
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Body, Path
|
||||
from typing import List, Dict, Any
|
||||
from datetime import date
|
||||
from uuid import UUID
|
||||
from pydantic import BaseModel, Field
|
||||
import structlog
|
||||
import asyncio
|
||||
|
||||
from app.services.sales_service import SalesService
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from shared.routing import RouteBuilder
|
||||
from shared.auth.access_control import require_user_role
|
||||
|
||||
route_builder = RouteBuilder('sales')
|
||||
router = APIRouter(tags=["sales-batch"])
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
def get_sales_service():
|
||||
"""Dependency injection for SalesService"""
|
||||
return SalesService()
|
||||
|
||||
|
||||
class SalesSummaryBatchRequest(BaseModel):
|
||||
"""Request model for batch sales summary"""
|
||||
tenant_ids: List[str] = Field(..., description="List of tenant IDs", max_length=100)
|
||||
start_date: date = Field(..., description="Start date for sales period")
|
||||
end_date: date = Field(..., description="End date for sales period")
|
||||
|
||||
|
||||
class SalesSummary(BaseModel):
|
||||
"""Sales summary for a single tenant"""
|
||||
tenant_id: str
|
||||
total_revenue: float
|
||||
total_orders: int
|
||||
average_order_value: float
|
||||
period_start: str
|
||||
period_end: str
|
||||
|
||||
|
||||
@router.post("/api/v1/batch/sales-summary", response_model=Dict[str, SalesSummary])
|
||||
async def get_sales_summary_batch(
|
||||
request: SalesSummaryBatchRequest = Body(...),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
sales_service: SalesService = Depends(get_sales_service)
|
||||
):
|
||||
"""
|
||||
Get sales summary for multiple tenants in a single request.
|
||||
|
||||
Optimized for enterprise dashboards to eliminate N+1 query patterns.
|
||||
Fetches sales data for all tenants in parallel.
|
||||
|
||||
Args:
|
||||
request: Batch request with tenant IDs and date range
|
||||
|
||||
Returns:
|
||||
Dictionary mapping tenant_id -> sales summary
|
||||
|
||||
Example:
|
||||
POST /api/v1/sales/batch/sales-summary
|
||||
{
|
||||
"tenant_ids": ["tenant-1", "tenant-2", "tenant-3"],
|
||||
"start_date": "2025-01-01",
|
||||
"end_date": "2025-01-31"
|
||||
}
|
||||
|
||||
Response:
|
||||
{
|
||||
"tenant-1": {"tenant_id": "tenant-1", "total_revenue": 50000, ...},
|
||||
"tenant-2": {"tenant_id": "tenant-2", "total_revenue": 45000", ...},
|
||||
"tenant-3": {"tenant_id": "tenant-3", "total_revenue": 52000, ...}
|
||||
}
|
||||
"""
|
||||
try:
|
||||
if len(request.tenant_ids) > 100:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="Maximum 100 tenant IDs allowed per batch request"
|
||||
)
|
||||
|
||||
if not request.tenant_ids:
|
||||
return {}
|
||||
|
||||
logger.info(
|
||||
"Batch fetching sales summaries",
|
||||
tenant_count=len(request.tenant_ids),
|
||||
start_date=str(request.start_date),
|
||||
end_date=str(request.end_date)
|
||||
)
|
||||
|
||||
async def fetch_tenant_sales(tenant_id: str) -> tuple[str, SalesSummary]:
|
||||
"""Fetch sales summary for a single tenant"""
|
||||
try:
|
||||
tenant_uuid = UUID(tenant_id)
|
||||
summary = await sales_service.get_sales_analytics(
|
||||
tenant_uuid,
|
||||
request.start_date,
|
||||
request.end_date
|
||||
)
|
||||
|
||||
return tenant_id, SalesSummary(
|
||||
tenant_id=tenant_id,
|
||||
total_revenue=float(summary.get('total_revenue', 0)),
|
||||
total_orders=int(summary.get('total_orders', 0)),
|
||||
average_order_value=float(summary.get('average_order_value', 0)),
|
||||
period_start=str(request.start_date),
|
||||
period_end=str(request.end_date)
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
"Failed to fetch sales for tenant in batch",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e)
|
||||
)
|
||||
return tenant_id, SalesSummary(
|
||||
tenant_id=tenant_id,
|
||||
total_revenue=0.0,
|
||||
total_orders=0,
|
||||
average_order_value=0.0,
|
||||
period_start=str(request.start_date),
|
||||
period_end=str(request.end_date)
|
||||
)
|
||||
|
||||
# Fetch all tenant sales in parallel
|
||||
tasks = [fetch_tenant_sales(tid) for tid in request.tenant_ids]
|
||||
results = await asyncio.gather(*tasks, return_exceptions=True)
|
||||
|
||||
# Build result dictionary
|
||||
result_dict = {}
|
||||
for result in results:
|
||||
if isinstance(result, Exception):
|
||||
logger.error("Exception in batch sales fetch", error=str(result))
|
||||
continue
|
||||
tenant_id, summary = result
|
||||
result_dict[tenant_id] = summary
|
||||
|
||||
logger.info(
|
||||
"Batch sales summaries retrieved",
|
||||
requested_count=len(request.tenant_ids),
|
||||
successful_count=len(result_dict)
|
||||
)
|
||||
|
||||
return result_dict
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Error in batch sales summary", error=str(e), exc_info=True)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to fetch batch sales summaries: {str(e)}"
|
||||
)
|
||||
535
services/sales/app/consumers/sales_event_consumer.py
Normal file
535
services/sales/app/consumers/sales_event_consumer.py
Normal file
@@ -0,0 +1,535 @@
|
||||
"""
|
||||
Sales Event Consumer
|
||||
Processes sales transaction events from RabbitMQ and updates analytics
|
||||
Handles completed sales and refunds from POS systems
|
||||
"""
|
||||
import json
|
||||
import structlog
|
||||
from typing import Dict, Any
|
||||
from datetime import datetime, date
|
||||
from decimal import Decimal
|
||||
from collections import defaultdict
|
||||
|
||||
from shared.messaging import RabbitMQClient
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, func
|
||||
from sqlalchemy.dialects.postgresql import insert
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class SalesEventConsumer:
|
||||
"""
|
||||
Consumes sales transaction events and updates sales analytics
|
||||
Processes events from POS consumer
|
||||
"""
|
||||
|
||||
def __init__(self, db_session: AsyncSession):
|
||||
self.db_session = db_session
|
||||
|
||||
async def consume_sales_events(
|
||||
self,
|
||||
rabbitmq_client: RabbitMQClient
|
||||
):
|
||||
"""
|
||||
Start consuming sales events from RabbitMQ
|
||||
"""
|
||||
async def process_message(message):
|
||||
"""Process a single sales event message"""
|
||||
try:
|
||||
async with message.process():
|
||||
# Parse event data
|
||||
event_data = json.loads(message.body.decode())
|
||||
logger.info(
|
||||
"Received sales event",
|
||||
event_id=event_data.get('event_id'),
|
||||
event_type=event_data.get('event_type'),
|
||||
tenant_id=event_data.get('tenant_id')
|
||||
)
|
||||
|
||||
# Process the event
|
||||
await self.process_sales_event(event_data)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error processing sales event",
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
# Start consuming events
|
||||
await rabbitmq_client.consume_events(
|
||||
exchange_name="sales.events",
|
||||
queue_name="sales.processing.queue",
|
||||
routing_key="sales.transaction.*",
|
||||
callback=process_message
|
||||
)
|
||||
|
||||
logger.info("Started consuming sales events")
|
||||
|
||||
async def process_sales_event(self, event_data: Dict[str, Any]) -> bool:
|
||||
"""
|
||||
Process a sales event based on type
|
||||
|
||||
Args:
|
||||
event_data: Full event payload from RabbitMQ
|
||||
|
||||
Returns:
|
||||
bool: True if processed successfully
|
||||
"""
|
||||
try:
|
||||
event_type = event_data.get('event_type')
|
||||
data = event_data.get('data', {})
|
||||
tenant_id = event_data.get('tenant_id')
|
||||
|
||||
if not tenant_id:
|
||||
logger.warning("Sales event missing tenant_id", event_data=event_data)
|
||||
return False
|
||||
|
||||
# Route to appropriate handler
|
||||
if event_type == 'sales.transaction.completed':
|
||||
success = await self._handle_transaction_completed(tenant_id, data)
|
||||
elif event_type == 'sales.transaction.refunded':
|
||||
success = await self._handle_transaction_refunded(tenant_id, data)
|
||||
else:
|
||||
logger.warning("Unknown sales event type", event_type=event_type)
|
||||
success = True # Mark as processed to avoid retry
|
||||
|
||||
if success:
|
||||
logger.info(
|
||||
"Sales event processed successfully",
|
||||
event_type=event_type,
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
else:
|
||||
logger.error(
|
||||
"Sales event processing failed",
|
||||
event_type=event_type,
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
|
||||
return success
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error in process_sales_event",
|
||||
error=str(e),
|
||||
event_id=event_data.get('event_id'),
|
||||
exc_info=True
|
||||
)
|
||||
return False
|
||||
|
||||
async def _handle_transaction_completed(
|
||||
self,
|
||||
tenant_id: str,
|
||||
data: Dict[str, Any]
|
||||
) -> bool:
|
||||
"""
|
||||
Handle completed sale transaction
|
||||
|
||||
Updates:
|
||||
- Daily sales analytics aggregates
|
||||
- Revenue tracking
|
||||
- Transaction counters
|
||||
- Product sales tracking
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
data: Transaction data from event
|
||||
|
||||
Returns:
|
||||
bool: True if handled successfully
|
||||
"""
|
||||
try:
|
||||
transaction_id = data.get('transaction_id')
|
||||
total_amount = Decimal(str(data.get('total_amount', 0)))
|
||||
transaction_date_str = data.get('transaction_date')
|
||||
items = data.get('items', [])
|
||||
pos_system = data.get('pos_system', 'unknown')
|
||||
|
||||
if not transaction_id:
|
||||
logger.warning("Transaction missing ID", data=data)
|
||||
return False
|
||||
|
||||
# Parse transaction date
|
||||
if transaction_date_str:
|
||||
if isinstance(transaction_date_str, str):
|
||||
transaction_date = datetime.fromisoformat(
|
||||
transaction_date_str.replace('Z', '+00:00')
|
||||
).date()
|
||||
else:
|
||||
transaction_date = datetime.utcnow().date()
|
||||
else:
|
||||
transaction_date = datetime.utcnow().date()
|
||||
|
||||
# Check for duplicate processing (idempotency)
|
||||
# In production, would check a processed_transactions table
|
||||
# For now, we rely on unique constraints in analytics table
|
||||
|
||||
# Update daily sales analytics
|
||||
await self._update_daily_analytics(
|
||||
tenant_id=tenant_id,
|
||||
transaction_date=transaction_date,
|
||||
revenue=total_amount,
|
||||
transaction_count=1,
|
||||
refund_amount=Decimal('0')
|
||||
)
|
||||
|
||||
# Update product sales tracking
|
||||
await self._update_product_sales(
|
||||
tenant_id=tenant_id,
|
||||
transaction_date=transaction_date,
|
||||
items=items
|
||||
)
|
||||
|
||||
# Store transaction record (optional detailed tracking)
|
||||
await self._store_transaction_record(
|
||||
tenant_id=tenant_id,
|
||||
transaction_id=transaction_id,
|
||||
transaction_date=transaction_date,
|
||||
total_amount=total_amount,
|
||||
items=items,
|
||||
pos_system=pos_system,
|
||||
transaction_type='sale'
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Transaction processed and analytics updated",
|
||||
tenant_id=tenant_id,
|
||||
transaction_id=transaction_id,
|
||||
total_amount=float(total_amount),
|
||||
date=str(transaction_date)
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error handling transaction completed",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id,
|
||||
transaction_id=data.get('transaction_id'),
|
||||
exc_info=True
|
||||
)
|
||||
return False
|
||||
|
||||
async def _handle_transaction_refunded(
|
||||
self,
|
||||
tenant_id: str,
|
||||
data: Dict[str, Any]
|
||||
) -> bool:
|
||||
"""
|
||||
Handle refunded sale transaction
|
||||
|
||||
Updates:
|
||||
- Daily sales analytics (negative revenue)
|
||||
- Refund counters
|
||||
- Product refund tracking
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
data: Refund data from event
|
||||
|
||||
Returns:
|
||||
bool: True if handled successfully
|
||||
"""
|
||||
try:
|
||||
refund_id = data.get('refund_id')
|
||||
original_transaction_id = data.get('original_transaction_id')
|
||||
refund_amount = Decimal(str(data.get('refund_amount', 0)))
|
||||
refund_date_str = data.get('refund_date')
|
||||
items = data.get('items', [])
|
||||
pos_system = data.get('pos_system', 'unknown')
|
||||
|
||||
if not refund_id:
|
||||
logger.warning("Refund missing ID", data=data)
|
||||
return False
|
||||
|
||||
# Parse refund date
|
||||
if refund_date_str:
|
||||
if isinstance(refund_date_str, str):
|
||||
refund_date = datetime.fromisoformat(
|
||||
refund_date_str.replace('Z', '+00:00')
|
||||
).date()
|
||||
else:
|
||||
refund_date = datetime.utcnow().date()
|
||||
else:
|
||||
refund_date = datetime.utcnow().date()
|
||||
|
||||
# Update daily sales analytics (subtract revenue, add refund)
|
||||
await self._update_daily_analytics(
|
||||
tenant_id=tenant_id,
|
||||
transaction_date=refund_date,
|
||||
revenue=-refund_amount, # Negative revenue
|
||||
transaction_count=0, # Don't increment transaction count for refunds
|
||||
refund_amount=refund_amount
|
||||
)
|
||||
|
||||
# Update product refund tracking
|
||||
await self._update_product_refunds(
|
||||
tenant_id=tenant_id,
|
||||
refund_date=refund_date,
|
||||
items=items
|
||||
)
|
||||
|
||||
# Store refund record
|
||||
await self._store_transaction_record(
|
||||
tenant_id=tenant_id,
|
||||
transaction_id=refund_id,
|
||||
transaction_date=refund_date,
|
||||
total_amount=-refund_amount,
|
||||
items=items,
|
||||
pos_system=pos_system,
|
||||
transaction_type='refund',
|
||||
original_transaction_id=original_transaction_id
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Refund processed and analytics updated",
|
||||
tenant_id=tenant_id,
|
||||
refund_id=refund_id,
|
||||
refund_amount=float(refund_amount),
|
||||
date=str(refund_date)
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error handling transaction refunded",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id,
|
||||
refund_id=data.get('refund_id'),
|
||||
exc_info=True
|
||||
)
|
||||
return False
|
||||
|
||||
async def _update_daily_analytics(
|
||||
self,
|
||||
tenant_id: str,
|
||||
transaction_date: date,
|
||||
revenue: Decimal,
|
||||
transaction_count: int,
|
||||
refund_amount: Decimal
|
||||
):
|
||||
"""
|
||||
Update or create daily sales analytics record
|
||||
|
||||
Uses UPSERT (INSERT ... ON CONFLICT UPDATE) for atomic updates
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
transaction_date: Date of transaction
|
||||
revenue: Revenue amount (negative for refunds)
|
||||
transaction_count: Number of transactions
|
||||
refund_amount: Refund amount
|
||||
"""
|
||||
try:
|
||||
# Note: This assumes a sales_analytics table exists
|
||||
# In production, ensure table is created via migration
|
||||
from app.models.sales_analytics import SalesAnalytics
|
||||
|
||||
# Use PostgreSQL UPSERT for atomic updates
|
||||
stmt = insert(SalesAnalytics).values(
|
||||
tenant_id=tenant_id,
|
||||
date=transaction_date,
|
||||
total_revenue=revenue,
|
||||
total_transactions=transaction_count,
|
||||
total_refunds=refund_amount,
|
||||
average_transaction_value=revenue if transaction_count > 0 else Decimal('0'),
|
||||
updated_at=datetime.utcnow()
|
||||
).on_conflict_do_update(
|
||||
index_elements=['tenant_id', 'date'],
|
||||
set_={
|
||||
'total_revenue': SalesAnalytics.total_revenue + revenue,
|
||||
'total_transactions': SalesAnalytics.total_transactions + transaction_count,
|
||||
'total_refunds': SalesAnalytics.total_refunds + refund_amount,
|
||||
'average_transaction_value': (
|
||||
(SalesAnalytics.total_revenue + revenue) /
|
||||
func.greatest(SalesAnalytics.total_transactions + transaction_count, 1)
|
||||
),
|
||||
'updated_at': datetime.utcnow()
|
||||
}
|
||||
)
|
||||
|
||||
await self.db_session.execute(stmt)
|
||||
await self.db_session.commit()
|
||||
|
||||
logger.info(
|
||||
"Daily analytics updated",
|
||||
tenant_id=tenant_id,
|
||||
date=str(transaction_date),
|
||||
revenue_delta=float(revenue),
|
||||
transaction_count_delta=transaction_count
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
await self.db_session.rollback()
|
||||
logger.error(
|
||||
"Failed to update daily analytics",
|
||||
tenant_id=tenant_id,
|
||||
date=str(transaction_date),
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
raise
|
||||
|
||||
async def _update_product_sales(
|
||||
self,
|
||||
tenant_id: str,
|
||||
transaction_date: date,
|
||||
items: list
|
||||
):
|
||||
"""
|
||||
Update product sales tracking
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
transaction_date: Date of transaction
|
||||
items: List of items sold
|
||||
"""
|
||||
try:
|
||||
# Aggregate items by product
|
||||
product_sales = defaultdict(lambda: {'quantity': 0, 'revenue': Decimal('0')})
|
||||
|
||||
for item in items:
|
||||
product_id = item.get('product_id')
|
||||
if not product_id:
|
||||
continue
|
||||
|
||||
quantity = item.get('quantity', 0)
|
||||
unit_price = Decimal(str(item.get('unit_price', 0)))
|
||||
revenue = quantity * unit_price
|
||||
|
||||
product_sales[product_id]['quantity'] += quantity
|
||||
product_sales[product_id]['revenue'] += revenue
|
||||
|
||||
# Update each product's sales (would need product_sales table)
|
||||
# For now, log the aggregation
|
||||
logger.info(
|
||||
"Product sales aggregated",
|
||||
tenant_id=tenant_id,
|
||||
date=str(transaction_date),
|
||||
products_count=len(product_sales)
|
||||
)
|
||||
|
||||
# In production, insert/update product_sales table here
|
||||
# Similar UPSERT pattern as daily analytics
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to update product sales",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e)
|
||||
)
|
||||
|
||||
async def _update_product_refunds(
|
||||
self,
|
||||
tenant_id: str,
|
||||
refund_date: date,
|
||||
items: list
|
||||
):
|
||||
"""
|
||||
Update product refund tracking
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
refund_date: Date of refund
|
||||
items: List of items refunded
|
||||
"""
|
||||
try:
|
||||
# Similar to product sales, but for refunds
|
||||
product_refunds = defaultdict(lambda: {'quantity': 0, 'amount': Decimal('0')})
|
||||
|
||||
for item in items:
|
||||
product_id = item.get('product_id')
|
||||
if not product_id:
|
||||
continue
|
||||
|
||||
quantity = item.get('quantity', 0)
|
||||
unit_price = Decimal(str(item.get('unit_price', 0)))
|
||||
amount = quantity * unit_price
|
||||
|
||||
product_refunds[product_id]['quantity'] += quantity
|
||||
product_refunds[product_id]['amount'] += amount
|
||||
|
||||
logger.info(
|
||||
"Product refunds aggregated",
|
||||
tenant_id=tenant_id,
|
||||
date=str(refund_date),
|
||||
products_count=len(product_refunds)
|
||||
)
|
||||
|
||||
# In production, update product_refunds table
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to update product refunds",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e)
|
||||
)
|
||||
|
||||
async def _store_transaction_record(
|
||||
self,
|
||||
tenant_id: str,
|
||||
transaction_id: str,
|
||||
transaction_date: date,
|
||||
total_amount: Decimal,
|
||||
items: list,
|
||||
pos_system: str,
|
||||
transaction_type: str,
|
||||
original_transaction_id: str = None
|
||||
):
|
||||
"""
|
||||
Store detailed transaction record
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
transaction_id: Transaction/refund ID
|
||||
transaction_date: Date of transaction
|
||||
total_amount: Total amount
|
||||
items: Transaction items
|
||||
pos_system: POS system name
|
||||
transaction_type: 'sale' or 'refund'
|
||||
original_transaction_id: For refunds, the original transaction ID
|
||||
"""
|
||||
try:
|
||||
# Would store in transactions table for detailed tracking
|
||||
# For now, just log
|
||||
logger.info(
|
||||
"Transaction record created",
|
||||
tenant_id=tenant_id,
|
||||
transaction_id=transaction_id,
|
||||
type=transaction_type,
|
||||
amount=float(total_amount),
|
||||
items_count=len(items),
|
||||
pos_system=pos_system
|
||||
)
|
||||
|
||||
# In production, insert into transactions table:
|
||||
# from app.models.transactions import Transaction
|
||||
# transaction = Transaction(
|
||||
# id=transaction_id,
|
||||
# tenant_id=tenant_id,
|
||||
# transaction_date=transaction_date,
|
||||
# total_amount=total_amount,
|
||||
# items=items,
|
||||
# pos_system=pos_system,
|
||||
# transaction_type=transaction_type,
|
||||
# original_transaction_id=original_transaction_id
|
||||
# )
|
||||
# self.db_session.add(transaction)
|
||||
# await self.db_session.commit()
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to store transaction record",
|
||||
transaction_id=transaction_id,
|
||||
error=str(e)
|
||||
)
|
||||
|
||||
|
||||
# Factory function for creating consumer instance
|
||||
def create_sales_event_consumer(db_session: AsyncSession) -> SalesEventConsumer:
|
||||
"""Create sales event consumer instance"""
|
||||
return SalesEventConsumer(db_session)
|
||||
@@ -10,7 +10,7 @@ from app.core.database import database_manager
|
||||
from shared.service_base import StandardFastAPIService
|
||||
|
||||
# Import API routers
|
||||
from app.api import sales_records, sales_operations, analytics, internal_demo, audit
|
||||
from app.api import sales_records, sales_operations, analytics, internal_demo, audit, batch
|
||||
|
||||
|
||||
class SalesService(StandardFastAPIService):
|
||||
@@ -147,6 +147,7 @@ service.setup_custom_endpoints()
|
||||
# Include routers
|
||||
# IMPORTANT: Register audit router FIRST to avoid route matching conflicts
|
||||
service.add_router(audit.router)
|
||||
service.add_router(batch.router)
|
||||
service.add_router(sales_records.router)
|
||||
service.add_router(sales_operations.router)
|
||||
service.add_router(analytics.router)
|
||||
|
||||
@@ -2,6 +2,5 @@
|
||||
|
||||
from .sales_service import SalesService
|
||||
from .data_import_service import DataImportService
|
||||
from .messaging import SalesEventPublisher, sales_publisher
|
||||
|
||||
__all__ = ["SalesService", "DataImportService", "SalesEventPublisher", "sales_publisher"]
|
||||
__all__ = ["SalesService", "DataImportService"]
|
||||
@@ -1,232 +0,0 @@
|
||||
# services/sales/app/services/messaging.py
|
||||
"""
|
||||
Sales Service Messaging - Event Publishing using shared messaging infrastructure
|
||||
"""
|
||||
|
||||
import structlog
|
||||
from typing import Dict, Any, Optional
|
||||
from uuid import UUID
|
||||
from datetime import datetime
|
||||
|
||||
from shared.messaging.rabbitmq import RabbitMQClient
|
||||
from shared.messaging.events import BaseEvent, DataImportedEvent
|
||||
from app.core.config import settings
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class SalesEventPublisher:
|
||||
"""Sales service event publisher using RabbitMQ"""
|
||||
|
||||
def __init__(self):
|
||||
self.enabled = True
|
||||
self._rabbitmq_client = None
|
||||
|
||||
async def _get_rabbitmq_client(self):
|
||||
"""Get or create RabbitMQ client"""
|
||||
if not self._rabbitmq_client:
|
||||
self._rabbitmq_client = RabbitMQClient(
|
||||
connection_url=settings.RABBITMQ_URL,
|
||||
service_name="sales-service"
|
||||
)
|
||||
await self._rabbitmq_client.connect()
|
||||
return self._rabbitmq_client
|
||||
|
||||
async def publish_sales_created(self, sales_data: Dict[str, Any], correlation_id: Optional[str] = None) -> bool:
|
||||
"""Publish sales created event"""
|
||||
try:
|
||||
if not self.enabled:
|
||||
return True
|
||||
|
||||
# Create event
|
||||
event = BaseEvent(
|
||||
service_name="sales-service",
|
||||
data={
|
||||
"record_id": str(sales_data.get("id")),
|
||||
"tenant_id": str(sales_data.get("tenant_id")),
|
||||
"product_name": sales_data.get("product_name"),
|
||||
"revenue": float(sales_data.get("revenue", 0)),
|
||||
"quantity_sold": sales_data.get("quantity_sold", 0),
|
||||
"timestamp": datetime.now().isoformat()
|
||||
},
|
||||
event_type="sales.created",
|
||||
correlation_id=correlation_id
|
||||
)
|
||||
|
||||
# Publish via RabbitMQ
|
||||
client = await self._get_rabbitmq_client()
|
||||
success = await client.publish_event(
|
||||
exchange_name="sales.events",
|
||||
routing_key="sales.created",
|
||||
event_data=event.to_dict()
|
||||
)
|
||||
|
||||
if success:
|
||||
logger.info("Sales record created event published",
|
||||
record_id=sales_data.get("id"),
|
||||
tenant_id=sales_data.get("tenant_id"),
|
||||
product=sales_data.get("product_name"))
|
||||
|
||||
return success
|
||||
|
||||
except Exception as e:
|
||||
logger.warning("Failed to publish sales created event", error=str(e))
|
||||
return False
|
||||
|
||||
async def publish_sales_updated(self, sales_data: Dict[str, Any], correlation_id: Optional[str] = None) -> bool:
|
||||
"""Publish sales updated event"""
|
||||
try:
|
||||
if not self.enabled:
|
||||
return True
|
||||
|
||||
event = BaseEvent(
|
||||
service_name="sales-service",
|
||||
data={
|
||||
"record_id": str(sales_data.get("id")),
|
||||
"tenant_id": str(sales_data.get("tenant_id")),
|
||||
"product_name": sales_data.get("product_name"),
|
||||
"timestamp": datetime.now().isoformat()
|
||||
},
|
||||
event_type="sales.updated",
|
||||
correlation_id=correlation_id
|
||||
)
|
||||
|
||||
client = await self._get_rabbitmq_client()
|
||||
success = await client.publish_event(
|
||||
exchange_name="sales.events",
|
||||
routing_key="sales.updated",
|
||||
event_data=event.to_dict()
|
||||
)
|
||||
|
||||
if success:
|
||||
logger.info("Sales record updated event published",
|
||||
record_id=sales_data.get("id"),
|
||||
tenant_id=sales_data.get("tenant_id"))
|
||||
|
||||
return success
|
||||
|
||||
except Exception as e:
|
||||
logger.warning("Failed to publish sales updated event", error=str(e))
|
||||
return False
|
||||
|
||||
async def publish_sales_deleted(self, record_id: UUID, tenant_id: UUID, correlation_id: Optional[str] = None) -> bool:
|
||||
"""Publish sales deleted event"""
|
||||
try:
|
||||
if not self.enabled:
|
||||
return True
|
||||
|
||||
event = BaseEvent(
|
||||
service_name="sales-service",
|
||||
data={
|
||||
"record_id": str(record_id),
|
||||
"tenant_id": str(tenant_id),
|
||||
"timestamp": datetime.now().isoformat()
|
||||
},
|
||||
event_type="sales.deleted",
|
||||
correlation_id=correlation_id
|
||||
)
|
||||
|
||||
client = await self._get_rabbitmq_client()
|
||||
success = await client.publish_event(
|
||||
exchange_name="sales.events",
|
||||
routing_key="sales.deleted",
|
||||
event_data=event.to_dict()
|
||||
)
|
||||
|
||||
if success:
|
||||
logger.info("Sales record deleted event published",
|
||||
record_id=record_id,
|
||||
tenant_id=tenant_id)
|
||||
|
||||
return success
|
||||
|
||||
except Exception as e:
|
||||
logger.warning("Failed to publish sales deleted event", error=str(e))
|
||||
return False
|
||||
|
||||
async def publish_data_imported(self, import_result: Dict[str, Any], correlation_id: Optional[str] = None) -> bool:
|
||||
"""Publish data imported event"""
|
||||
try:
|
||||
if not self.enabled:
|
||||
return True
|
||||
|
||||
event = DataImportedEvent(
|
||||
service_name="sales-service",
|
||||
data={
|
||||
"records_created": import_result.get("records_created", 0),
|
||||
"records_updated": import_result.get("records_updated", 0),
|
||||
"records_failed": import_result.get("records_failed", 0),
|
||||
"tenant_id": str(import_result.get("tenant_id")),
|
||||
"success": import_result.get("success", False),
|
||||
"file_name": import_result.get("file_name"),
|
||||
"timestamp": datetime.now().isoformat()
|
||||
},
|
||||
correlation_id=correlation_id
|
||||
)
|
||||
|
||||
client = await self._get_rabbitmq_client()
|
||||
success = await client.publish_event(
|
||||
exchange_name="data.events",
|
||||
routing_key="data.imported",
|
||||
event_data=event.to_dict()
|
||||
)
|
||||
|
||||
if success:
|
||||
logger.info("Sales data imported event published",
|
||||
records_created=import_result.get("records_created"),
|
||||
tenant_id=import_result.get("tenant_id"),
|
||||
success=import_result.get("success"))
|
||||
|
||||
return success
|
||||
|
||||
except Exception as e:
|
||||
logger.warning("Failed to publish data imported event", error=str(e))
|
||||
return False
|
||||
|
||||
async def publish_analytics_generated(self, analytics_data: Dict[str, Any], correlation_id: Optional[str] = None) -> bool:
|
||||
"""Publish analytics generated event"""
|
||||
try:
|
||||
if not self.enabled:
|
||||
return True
|
||||
|
||||
event = BaseEvent(
|
||||
service_name="sales-service",
|
||||
data={
|
||||
"tenant_id": str(analytics_data.get("tenant_id")),
|
||||
"total_revenue": float(analytics_data.get("total_revenue", 0)),
|
||||
"total_quantity": analytics_data.get("total_quantity", 0),
|
||||
"total_transactions": analytics_data.get("total_transactions", 0),
|
||||
"period_start": analytics_data.get("period_start"),
|
||||
"period_end": analytics_data.get("period_end"),
|
||||
"timestamp": datetime.now().isoformat()
|
||||
},
|
||||
event_type="analytics.generated",
|
||||
correlation_id=correlation_id
|
||||
)
|
||||
|
||||
client = await self._get_rabbitmq_client()
|
||||
success = await client.publish_event(
|
||||
exchange_name="analytics.events",
|
||||
routing_key="analytics.generated",
|
||||
event_data=event.to_dict()
|
||||
)
|
||||
|
||||
if success:
|
||||
logger.info("Sales analytics generated event published",
|
||||
tenant_id=analytics_data.get("tenant_id"),
|
||||
total_revenue=analytics_data.get("total_revenue"))
|
||||
|
||||
return success
|
||||
|
||||
except Exception as e:
|
||||
logger.warning("Failed to publish analytics generated event", error=str(e))
|
||||
return False
|
||||
|
||||
async def cleanup(self):
|
||||
"""Cleanup RabbitMQ connections"""
|
||||
if self._rabbitmq_client:
|
||||
await self._rabbitmq_client.disconnect()
|
||||
|
||||
|
||||
# Global instance
|
||||
sales_publisher = SalesEventPublisher()
|
||||
@@ -435,11 +435,39 @@ class SalesService:
|
||||
logger.warning("LOW_STOCK_ALERT",
|
||||
**alert_data)
|
||||
|
||||
# TODO: Implement actual notification delivery
|
||||
# Examples:
|
||||
# - await notification_service.send_alert(alert_data)
|
||||
# - await event_publisher.publish('inventory.low_stock', alert_data)
|
||||
# - await email_service.send_low_stock_email(tenant_id, alert_data)
|
||||
# Implement notification delivery via RabbitMQ event
|
||||
try:
|
||||
from shared.messaging import get_rabbitmq_client
|
||||
|
||||
rabbitmq_client = get_rabbitmq_client()
|
||||
if rabbitmq_client:
|
||||
# Publish low stock event for notification service to consume
|
||||
event_payload = {
|
||||
"event_id": str(uuid.uuid4()),
|
||||
"event_type": "inventory.low_stock",
|
||||
"timestamp": datetime.utcnow().isoformat(),
|
||||
"tenant_id": str(tenant_id),
|
||||
"data": alert_data
|
||||
}
|
||||
|
||||
await rabbitmq_client.publish_event(
|
||||
exchange_name="inventory.events",
|
||||
routing_key="inventory.low_stock",
|
||||
event_data=event_payload
|
||||
)
|
||||
|
||||
logger.info("Published low stock alert event",
|
||||
tenant_id=str(tenant_id),
|
||||
product_id=product_id,
|
||||
event_id=event_payload["event_id"])
|
||||
else:
|
||||
logger.warning("RabbitMQ client not available, notification not sent")
|
||||
|
||||
except Exception as notify_error:
|
||||
logger.error("Failed to publish low stock notification event",
|
||||
error=str(notify_error),
|
||||
tenant_id=str(tenant_id))
|
||||
# Don't fail the main operation if notification fails
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to trigger low stock alert",
|
||||
|
||||
96
services/sales/tests/unit/test_batch.py
Normal file
96
services/sales/tests/unit/test_batch.py
Normal file
@@ -0,0 +1,96 @@
|
||||
import pytest
|
||||
from fastapi.testclient import TestClient
|
||||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
from datetime import date
|
||||
import uuid
|
||||
|
||||
from app.main import app
|
||||
from app.api.batch import SalesSummaryBatchRequest, SalesSummary
|
||||
|
||||
client = TestClient(app)
|
||||
|
||||
@pytest.fixture
|
||||
def mock_sales_service():
|
||||
with patch("app.api.batch.get_sales_service") as mock:
|
||||
service = AsyncMock()
|
||||
mock.return_value = service
|
||||
yield service
|
||||
|
||||
@pytest.fixture
|
||||
def mock_current_user():
|
||||
with patch("app.api.batch.get_current_user_dep") as mock:
|
||||
mock.return_value = {
|
||||
"user_id": str(uuid.uuid4()),
|
||||
"role": "admin",
|
||||
"tenant_id": str(uuid.uuid4())
|
||||
}
|
||||
yield mock
|
||||
|
||||
def test_get_sales_summary_batch_success(mock_sales_service, mock_current_user):
|
||||
# Setup
|
||||
tenant_id_1 = str(uuid.uuid4())
|
||||
tenant_id_2 = str(uuid.uuid4())
|
||||
|
||||
request_data = {
|
||||
"tenant_ids": [tenant_id_1, tenant_id_2],
|
||||
"start_date": "2025-01-01",
|
||||
"end_date": "2025-01-31"
|
||||
}
|
||||
|
||||
# Mock service response
|
||||
mock_sales_service.get_sales_analytics.side_effect = [
|
||||
{
|
||||
"total_revenue": 1000.0,
|
||||
"total_orders": 10,
|
||||
"average_order_value": 100.0
|
||||
},
|
||||
{
|
||||
"total_revenue": 2000.0,
|
||||
"total_orders": 20,
|
||||
"average_order_value": 100.0
|
||||
}
|
||||
]
|
||||
|
||||
# Execute
|
||||
response = client.post("/api/v1/batch/sales-summary", json=request_data)
|
||||
|
||||
# Verify
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert len(data) == 2
|
||||
assert data[tenant_id_1]["total_revenue"] == 1000.0
|
||||
assert data[tenant_id_2]["total_revenue"] == 2000.0
|
||||
|
||||
# Verify service calls
|
||||
assert mock_sales_service.get_sales_analytics.call_count == 2
|
||||
|
||||
def test_get_sales_summary_batch_empty(mock_sales_service, mock_current_user):
|
||||
# Setup
|
||||
request_data = {
|
||||
"tenant_ids": [],
|
||||
"start_date": "2025-01-01",
|
||||
"end_date": "2025-01-31"
|
||||
}
|
||||
|
||||
# Execute
|
||||
response = client.post("/api/v1/batch/sales-summary", json=request_data)
|
||||
|
||||
# Verify
|
||||
assert response.status_code == 200
|
||||
assert response.json() == {}
|
||||
|
||||
def test_get_sales_summary_batch_limit_exceeded(mock_sales_service, mock_current_user):
|
||||
# Setup
|
||||
tenant_ids = [str(uuid.uuid4()) for _ in range(101)]
|
||||
request_data = {
|
||||
"tenant_ids": tenant_ids,
|
||||
"start_date": "2025-01-01",
|
||||
"end_date": "2025-01-31"
|
||||
}
|
||||
|
||||
# Execute
|
||||
response = client.post("/api/v1/batch/sales-summary", json=request_data)
|
||||
|
||||
# Verify
|
||||
assert response.status_code == 400
|
||||
assert "Maximum 100 tenant IDs allowed" in response.json()["detail"]
|
||||
Reference in New Issue
Block a user