Add POI feature and imporve the overall backend implementation
This commit is contained in:
@@ -16,6 +16,8 @@ from shared.auth.access_control import require_user_role, admin_role_required, s
|
||||
from shared.routing import RouteBuilder
|
||||
from app.services.pos_transaction_service import POSTransactionService
|
||||
from app.services.pos_config_service import POSConfigurationService
|
||||
from app.services.pos_webhook_service import POSWebhookService
|
||||
from app.services.pos_sync_service import POSSyncService
|
||||
from app.services.tenant_deletion_service import POSTenantDeletionService
|
||||
|
||||
router = APIRouter()
|
||||
@@ -44,20 +46,44 @@ async def trigger_sync(
|
||||
data_types = sync_request.get("data_types", ["transactions"])
|
||||
config_id = sync_request.get("config_id")
|
||||
|
||||
if not config_id:
|
||||
raise HTTPException(status_code=400, detail="config_id is required")
|
||||
|
||||
# Get POS configuration to determine system type
|
||||
config_service = POSConfigurationService()
|
||||
configs = await config_service.get_configurations_by_tenant(tenant_id, skip=0, limit=100)
|
||||
config = next((c for c in configs if str(c.id) == str(config_id)), None)
|
||||
|
||||
if not config:
|
||||
raise HTTPException(status_code=404, detail="POS configuration not found")
|
||||
|
||||
# Create sync job
|
||||
sync_service = POSSyncService(db)
|
||||
sync_log = await sync_service.create_sync_job(
|
||||
tenant_id=tenant_id,
|
||||
pos_config_id=UUID(config_id),
|
||||
pos_system=config.pos_system,
|
||||
sync_type=sync_type,
|
||||
data_types=data_types
|
||||
)
|
||||
|
||||
logger.info("Manual sync triggered",
|
||||
tenant_id=tenant_id,
|
||||
config_id=config_id,
|
||||
sync_id=str(sync_log.id),
|
||||
sync_type=sync_type,
|
||||
user_id=current_user.get("user_id"))
|
||||
|
||||
return {
|
||||
"message": "Sync triggered successfully",
|
||||
"sync_id": "placeholder-sync-id",
|
||||
"sync_id": str(sync_log.id),
|
||||
"status": "queued",
|
||||
"sync_type": sync_type,
|
||||
"data_types": data_types,
|
||||
"estimated_duration": "5-10 minutes"
|
||||
}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Failed to trigger sync", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to trigger sync: {str(e)}")
|
||||
@@ -78,6 +104,7 @@ async def get_sync_status(
|
||||
"""Get synchronization status and recent sync history"""
|
||||
try:
|
||||
transaction_service = POSTransactionService()
|
||||
sync_service = POSSyncService(db)
|
||||
|
||||
# Get sync metrics from transaction service
|
||||
sync_metrics = await transaction_service.get_sync_metrics(tenant_id)
|
||||
@@ -91,6 +118,13 @@ async def get_sync_status(
|
||||
synced = sync_status.get("synced", 0)
|
||||
success_rate = (synced / total * 100) if total > 0 else 100.0
|
||||
|
||||
# Calculate actual average duration from sync logs
|
||||
average_duration_minutes = await sync_service.calculate_average_duration(
|
||||
tenant_id=tenant_id,
|
||||
pos_config_id=config_id,
|
||||
days=30
|
||||
)
|
||||
|
||||
return {
|
||||
"current_sync": None,
|
||||
"last_successful_sync": last_successful_sync.isoformat() if last_successful_sync else None,
|
||||
@@ -98,7 +132,7 @@ async def get_sync_status(
|
||||
"sync_health": {
|
||||
"status": "healthy" if success_rate > 90 else "degraded" if success_rate > 70 else "unhealthy",
|
||||
"success_rate": round(success_rate, 2),
|
||||
"average_duration_minutes": 3.2, # Placeholder - could calculate from actual data
|
||||
"average_duration_minutes": average_duration_minutes,
|
||||
"last_error": None,
|
||||
"total_transactions": total,
|
||||
"synced_count": synced,
|
||||
@@ -128,11 +162,19 @@ async def get_sync_logs(
|
||||
):
|
||||
"""Get detailed sync logs"""
|
||||
try:
|
||||
return {
|
||||
"logs": [],
|
||||
"total": 0,
|
||||
"has_more": False
|
||||
}
|
||||
sync_service = POSSyncService(db)
|
||||
|
||||
logs_data = await sync_service.get_sync_logs(
|
||||
tenant_id=tenant_id,
|
||||
config_id=config_id,
|
||||
status=status,
|
||||
sync_type=sync_type,
|
||||
limit=limit,
|
||||
offset=offset
|
||||
)
|
||||
|
||||
return logs_data
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get sync logs", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get sync logs: {str(e)}")
|
||||
@@ -151,17 +193,44 @@ async def resync_failed_transactions(
|
||||
):
|
||||
"""Resync failed transactions from the specified time period (Admin/Owner only)"""
|
||||
try:
|
||||
# Get active POS configuration for tenant
|
||||
config_service = POSConfigurationService()
|
||||
configs = await config_service.get_configurations_by_tenant(
|
||||
tenant_id=tenant_id,
|
||||
is_active=True,
|
||||
skip=0,
|
||||
limit=1
|
||||
)
|
||||
|
||||
if not configs:
|
||||
raise HTTPException(status_code=404, detail="No active POS configuration found")
|
||||
|
||||
config = configs[0]
|
||||
|
||||
# Create resync job
|
||||
sync_service = POSSyncService(db)
|
||||
sync_log = await sync_service.create_sync_job(
|
||||
tenant_id=tenant_id,
|
||||
pos_config_id=config.id,
|
||||
pos_system=config.pos_system,
|
||||
sync_type="resync_failed",
|
||||
data_types=["transactions"]
|
||||
)
|
||||
|
||||
logger.info("Resync failed transactions requested",
|
||||
tenant_id=tenant_id,
|
||||
days_back=days_back,
|
||||
sync_id=str(sync_log.id),
|
||||
user_id=current_user.get("user_id"))
|
||||
|
||||
return {
|
||||
"message": "Resync job queued successfully",
|
||||
"job_id": "placeholder-resync-job-id",
|
||||
"job_id": str(sync_log.id),
|
||||
"scope": f"Failed transactions from last {days_back} days",
|
||||
"estimated_transactions": 0
|
||||
}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Failed to queue resync job", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to queue resync job: {str(e)}")
|
||||
@@ -229,12 +298,17 @@ async def receive_webhook(
|
||||
content_type: Optional[str] = Header(None),
|
||||
x_signature: Optional[str] = Header(None),
|
||||
x_webhook_signature: Optional[str] = Header(None),
|
||||
authorization: Optional[str] = Header(None)
|
||||
authorization: Optional[str] = Header(None),
|
||||
db=Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Receive webhooks from POS systems
|
||||
Supports Square, Toast, and Lightspeed webhook formats
|
||||
Includes signature verification, database logging, and duplicate detection
|
||||
"""
|
||||
webhook_service = POSWebhookService(db)
|
||||
start_time = datetime.utcnow()
|
||||
|
||||
try:
|
||||
# Validate POS system
|
||||
supported_systems = ["square", "toast", "lightspeed"]
|
||||
@@ -273,63 +347,112 @@ async def receive_webhook(
|
||||
# Determine signature from various header formats
|
||||
signature = x_signature or x_webhook_signature or authorization
|
||||
|
||||
# Log webhook receipt
|
||||
logger.info("Webhook received",
|
||||
pos_system=pos_system,
|
||||
method=method,
|
||||
url_path=url_path,
|
||||
payload_size=payload_size,
|
||||
client_ip=client_ip,
|
||||
has_signature=bool(signature),
|
||||
content_type=content_type)
|
||||
|
||||
# TODO: Store webhook log in database
|
||||
# TODO: Verify webhook signature
|
||||
# TODO: Extract tenant_id from payload
|
||||
# TODO: Process webhook based on POS system type
|
||||
# TODO: Queue for async processing if needed
|
||||
|
||||
# Parse webhook type based on POS system
|
||||
webhook_type = None
|
||||
event_id = None
|
||||
# Parse webhook event details
|
||||
event_details = webhook_service.parse_webhook_event_details(pos_system, parsed_payload or {})
|
||||
webhook_type = event_details.get("webhook_type") or "unknown"
|
||||
event_id = event_details.get("event_id")
|
||||
transaction_id = event_details.get("transaction_id")
|
||||
order_id = event_details.get("order_id")
|
||||
|
||||
# Extract tenant_id from payload
|
||||
tenant_id = None
|
||||
if parsed_payload:
|
||||
if pos_system.lower() == "square":
|
||||
webhook_type = parsed_payload.get("type")
|
||||
event_id = parsed_payload.get("event_id")
|
||||
elif pos_system.lower() == "toast":
|
||||
webhook_type = parsed_payload.get("eventType")
|
||||
event_id = parsed_payload.get("guid")
|
||||
elif pos_system.lower() == "lightspeed":
|
||||
webhook_type = parsed_payload.get("action")
|
||||
event_id = parsed_payload.get("id")
|
||||
tenant_id = await webhook_service.extract_tenant_id_from_payload(pos_system, parsed_payload)
|
||||
|
||||
logger.info("Webhook processed successfully",
|
||||
# Check for duplicate webhook
|
||||
is_duplicate = False
|
||||
if event_id:
|
||||
is_duplicate, _ = await webhook_service.check_duplicate_webhook(
|
||||
pos_system, event_id, tenant_id
|
||||
)
|
||||
|
||||
# Verify webhook signature if tenant is identified
|
||||
is_signature_valid = None
|
||||
if signature and tenant_id:
|
||||
webhook_secret = await webhook_service.get_webhook_secret(pos_system, tenant_id)
|
||||
if webhook_secret:
|
||||
is_signature_valid = await webhook_service.verify_webhook_signature(
|
||||
pos_system, raw_payload, signature, webhook_secret
|
||||
)
|
||||
|
||||
if not is_signature_valid:
|
||||
logger.warning("Webhook signature verification failed",
|
||||
pos_system=pos_system,
|
||||
tenant_id=str(tenant_id))
|
||||
|
||||
# Log webhook receipt to database
|
||||
webhook_log = await webhook_service.log_webhook(
|
||||
pos_system=pos_system,
|
||||
webhook_type=webhook_type,
|
||||
method=method,
|
||||
url_path=url_path,
|
||||
query_params=query_params,
|
||||
headers=headers,
|
||||
raw_payload=raw_payload,
|
||||
payload_size=payload_size,
|
||||
content_type=content_type,
|
||||
signature=signature,
|
||||
is_signature_valid=is_signature_valid,
|
||||
source_ip=client_ip,
|
||||
event_id=event_id,
|
||||
tenant_id=tenant_id,
|
||||
transaction_id=transaction_id,
|
||||
order_id=order_id
|
||||
)
|
||||
|
||||
# Mark as duplicate if detected
|
||||
if is_duplicate:
|
||||
await webhook_service.update_webhook_status(
|
||||
webhook_log.id,
|
||||
status="duplicate",
|
||||
error_message="Duplicate event already processed"
|
||||
)
|
||||
logger.info("Duplicate webhook ignored", event_id=event_id)
|
||||
return _get_webhook_response(pos_system, success=True)
|
||||
|
||||
# TODO: Queue for async processing if needed
|
||||
# For now, mark as received and ready for processing
|
||||
processing_duration_ms = int((datetime.utcnow() - start_time).total_seconds() * 1000)
|
||||
await webhook_service.update_webhook_status(
|
||||
webhook_log.id,
|
||||
status="queued",
|
||||
processing_duration_ms=processing_duration_ms
|
||||
)
|
||||
|
||||
logger.info("Webhook processed and queued successfully",
|
||||
pos_system=pos_system,
|
||||
webhook_type=webhook_type,
|
||||
event_id=event_id)
|
||||
event_id=event_id,
|
||||
tenant_id=str(tenant_id) if tenant_id else None,
|
||||
webhook_log_id=str(webhook_log.id))
|
||||
|
||||
# Return appropriate response based on POS system requirements
|
||||
if pos_system.lower() == "square":
|
||||
return {"status": "success"}
|
||||
elif pos_system.lower() == "toast":
|
||||
return {"success": True}
|
||||
elif pos_system.lower() == "lightspeed":
|
||||
return {"received": True}
|
||||
else:
|
||||
return {"status": "received"}
|
||||
return _get_webhook_response(pos_system, success=True)
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Webhook processing failed",
|
||||
error=str(e),
|
||||
pos_system=pos_system)
|
||||
pos_system=pos_system,
|
||||
exc_info=True)
|
||||
|
||||
# Return 500 to trigger POS system retry
|
||||
raise HTTPException(status_code=500, detail="Webhook processing failed")
|
||||
|
||||
|
||||
def _get_webhook_response(pos_system: str, success: bool = True) -> Dict[str, Any]:
|
||||
"""Get POS-specific webhook response format"""
|
||||
if pos_system.lower() == "square":
|
||||
return {"status": "success" if success else "error"}
|
||||
elif pos_system.lower() == "toast":
|
||||
return {"success": success}
|
||||
elif pos_system.lower() == "lightspeed":
|
||||
return {"received": success}
|
||||
else:
|
||||
return {"status": "received" if success else "error"}
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_webhook_route("{pos_system}/status"),
|
||||
response_model=dict
|
||||
@@ -495,3 +618,189 @@ async def preview_tenant_data_deletion(
|
||||
status_code=500,
|
||||
detail=f"Failed to preview tenant data deletion: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
# ================================================================
|
||||
# POS TO SALES SYNC ENDPOINTS
|
||||
# ================================================================
|
||||
|
||||
@router.post(
|
||||
"/tenants/{tenant_id}/pos/transactions/{transaction_id}/sync-to-sales",
|
||||
summary="Sync single transaction to sales",
|
||||
description="Manually sync a specific POS transaction to the sales service"
|
||||
)
|
||||
async def sync_transaction_to_sales(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
transaction_id: UUID = Path(..., description="Transaction ID to sync"),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep)
|
||||
):
|
||||
"""
|
||||
Sync a single POS transaction to the sales service
|
||||
|
||||
This endpoint:
|
||||
- Creates sales records for each item in the transaction
|
||||
- Automatically decreases inventory stock
|
||||
- Updates sync status flags
|
||||
- Returns detailed sync results
|
||||
"""
|
||||
try:
|
||||
from app.services.pos_transaction_service import POSTransactionService
|
||||
|
||||
transaction_service = POSTransactionService()
|
||||
|
||||
result = await transaction_service.sync_transaction_to_sales(
|
||||
transaction_id=transaction_id,
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
|
||||
if result.get("success"):
|
||||
logger.info("Transaction synced to sales via API",
|
||||
transaction_id=transaction_id,
|
||||
tenant_id=tenant_id,
|
||||
user_id=current_user.get("user_id"))
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"message": "Transaction synced successfully",
|
||||
**result
|
||||
}
|
||||
else:
|
||||
logger.warning("Transaction sync failed via API",
|
||||
transaction_id=transaction_id,
|
||||
tenant_id=tenant_id,
|
||||
error=result.get("error"))
|
||||
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=result.get("error", "Failed to sync transaction")
|
||||
)
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Failed to sync transaction to sales",
|
||||
error=str(e),
|
||||
transaction_id=transaction_id,
|
||||
tenant_id=tenant_id,
|
||||
exc_info=True)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to sync transaction: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.post(
|
||||
"/tenants/{tenant_id}/pos/transactions/sync-all-to-sales",
|
||||
summary="Batch sync unsynced transactions",
|
||||
description="Sync all unsynced POS transactions to the sales service"
|
||||
)
|
||||
async def sync_all_transactions_to_sales(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
limit: int = Query(50, ge=1, le=200, description="Max transactions to sync in one batch"),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep)
|
||||
):
|
||||
"""
|
||||
Batch sync all unsynced POS transactions to the sales service
|
||||
|
||||
This endpoint:
|
||||
- Finds all unsynced completed transactions
|
||||
- Syncs each one to the sales service
|
||||
- Creates sales records and decreases inventory
|
||||
- Returns summary with success/failure counts
|
||||
|
||||
Use this to:
|
||||
- Manually trigger sync after POS webhooks are received
|
||||
- Recover from sync failures
|
||||
- Initial migration of historical POS data
|
||||
"""
|
||||
try:
|
||||
from app.services.pos_transaction_service import POSTransactionService
|
||||
|
||||
transaction_service = POSTransactionService()
|
||||
|
||||
result = await transaction_service.sync_unsynced_transactions(
|
||||
tenant_id=tenant_id,
|
||||
limit=limit
|
||||
)
|
||||
|
||||
logger.info("Batch sync completed via API",
|
||||
tenant_id=tenant_id,
|
||||
total=result.get("total_transactions"),
|
||||
synced=result.get("synced"),
|
||||
failed=result.get("failed"),
|
||||
user_id=current_user.get("user_id"))
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"message": f"Synced {result.get('synced')} of {result.get('total_transactions')} transactions",
|
||||
**result
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to batch sync transactions to sales",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id,
|
||||
exc_info=True)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to batch sync transactions: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/tenants/{tenant_id}/pos/transactions/sync-status",
|
||||
summary="Get sync status summary",
|
||||
description="Get summary of synced vs unsynced transactions"
|
||||
)
|
||||
async def get_sync_status(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep)
|
||||
):
|
||||
"""
|
||||
Get sync status summary for POS transactions
|
||||
|
||||
Returns counts of:
|
||||
- Total completed transactions
|
||||
- Synced transactions
|
||||
- Unsynced transactions
|
||||
- Failed sync attempts
|
||||
"""
|
||||
try:
|
||||
from app.services.pos_transaction_service import POSTransactionService
|
||||
|
||||
transaction_service = POSTransactionService()
|
||||
|
||||
# Get counts for different sync states
|
||||
total_completed = await transaction_service.count_transactions_by_tenant(
|
||||
tenant_id=tenant_id,
|
||||
status="completed"
|
||||
)
|
||||
|
||||
synced = await transaction_service.count_transactions_by_tenant(
|
||||
tenant_id=tenant_id,
|
||||
status="completed",
|
||||
is_synced=True
|
||||
)
|
||||
|
||||
unsynced = await transaction_service.count_transactions_by_tenant(
|
||||
tenant_id=tenant_id,
|
||||
status="completed",
|
||||
is_synced=False
|
||||
)
|
||||
|
||||
return {
|
||||
"total_completed_transactions": total_completed,
|
||||
"synced_to_sales": synced,
|
||||
"pending_sync": unsynced,
|
||||
"sync_rate": round((synced / total_completed * 100) if total_completed > 0 else 0, 2)
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get sync status",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id,
|
||||
exc_info=True)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to get sync status: {str(e)}"
|
||||
)
|
||||
|
||||
217
services/pos/app/jobs/sync_pos_to_sales.py
Normal file
217
services/pos/app/jobs/sync_pos_to_sales.py
Normal file
@@ -0,0 +1,217 @@
|
||||
"""
|
||||
Background Job: Sync POS Transactions to Sales Service
|
||||
|
||||
This job runs periodically to sync unsynced POS transactions to the sales service,
|
||||
which automatically decreases inventory stock levels.
|
||||
|
||||
Schedule: Every 5 minutes (configurable)
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Dict, Any
|
||||
import structlog
|
||||
|
||||
from app.services.pos_transaction_service import POSTransactionService
|
||||
from app.repositories.pos_config_repository import POSConfigRepository
|
||||
from app.core.database import get_db_transaction
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class POSToSalesSyncJob:
|
||||
"""Background job for syncing POS transactions to sales service"""
|
||||
|
||||
def __init__(self):
|
||||
self.transaction_service = POSTransactionService()
|
||||
self.batch_size = 50 # Process 50 transactions per batch
|
||||
self.max_retries = 3 # Max retry attempts for failed syncs
|
||||
|
||||
async def run(self):
|
||||
"""
|
||||
Main job execution method
|
||||
|
||||
This method:
|
||||
1. Finds all tenants with active POS configurations
|
||||
2. For each tenant, syncs unsynced transactions
|
||||
3. Logs results and errors
|
||||
"""
|
||||
start_time = datetime.utcnow()
|
||||
logger.info("Starting POS to Sales sync job")
|
||||
|
||||
try:
|
||||
# Get all tenants with active POS configurations
|
||||
tenants_to_sync = await self._get_active_tenants()
|
||||
|
||||
if not tenants_to_sync:
|
||||
logger.info("No active tenants found for sync")
|
||||
return {
|
||||
"success": True,
|
||||
"tenants_processed": 0,
|
||||
"total_synced": 0,
|
||||
"total_failed": 0
|
||||
}
|
||||
|
||||
total_synced = 0
|
||||
total_failed = 0
|
||||
results = []
|
||||
|
||||
for tenant_id in tenants_to_sync:
|
||||
try:
|
||||
result = await self.transaction_service.sync_unsynced_transactions(
|
||||
tenant_id=tenant_id,
|
||||
limit=self.batch_size
|
||||
)
|
||||
|
||||
synced = result.get("synced", 0)
|
||||
failed = result.get("failed", 0)
|
||||
|
||||
total_synced += synced
|
||||
total_failed += failed
|
||||
|
||||
results.append({
|
||||
"tenant_id": str(tenant_id),
|
||||
"synced": synced,
|
||||
"failed": failed
|
||||
})
|
||||
|
||||
logger.info("Tenant sync completed",
|
||||
tenant_id=str(tenant_id),
|
||||
synced=synced,
|
||||
failed=failed)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to sync tenant",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e))
|
||||
results.append({
|
||||
"tenant_id": str(tenant_id),
|
||||
"error": str(e)
|
||||
})
|
||||
|
||||
duration = (datetime.utcnow() - start_time).total_seconds()
|
||||
|
||||
logger.info("POS to Sales sync job completed",
|
||||
duration_seconds=duration,
|
||||
tenants_processed=len(tenants_to_sync),
|
||||
total_synced=total_synced,
|
||||
total_failed=total_failed)
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"tenants_processed": len(tenants_to_sync),
|
||||
"total_synced": total_synced,
|
||||
"total_failed": total_failed,
|
||||
"duration_seconds": duration,
|
||||
"results": results
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
duration = (datetime.utcnow() - start_time).total_seconds()
|
||||
logger.error("POS to Sales sync job failed",
|
||||
error=str(e),
|
||||
duration_seconds=duration,
|
||||
exc_info=True)
|
||||
|
||||
return {
|
||||
"success": False,
|
||||
"error": str(e),
|
||||
"duration_seconds": duration
|
||||
}
|
||||
|
||||
async def _get_active_tenants(self):
|
||||
"""Get list of tenant IDs with active POS configurations"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
repository = POSConfigRepository(db)
|
||||
|
||||
# Get all active POS configurations
|
||||
configs = await repository.get_all_active_configs()
|
||||
|
||||
# Extract unique tenant IDs
|
||||
tenant_ids = list(set(config.tenant_id for config in configs))
|
||||
|
||||
logger.info("Found tenants with active POS configs",
|
||||
count=len(tenant_ids))
|
||||
|
||||
return tenant_ids
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get active tenants", error=str(e))
|
||||
return []
|
||||
|
||||
async def sync_specific_tenant(self, tenant_id: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Sync transactions for a specific tenant (for manual triggering)
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID as string
|
||||
|
||||
Returns:
|
||||
Sync result dictionary
|
||||
"""
|
||||
try:
|
||||
from uuid import UUID
|
||||
tenant_uuid = UUID(tenant_id)
|
||||
|
||||
result = await self.transaction_service.sync_unsynced_transactions(
|
||||
tenant_id=tenant_uuid,
|
||||
limit=self.batch_size
|
||||
)
|
||||
|
||||
logger.info("Manual tenant sync completed",
|
||||
tenant_id=tenant_id,
|
||||
synced=result.get("synced"),
|
||||
failed=result.get("failed"))
|
||||
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to sync specific tenant",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
return {
|
||||
"success": False,
|
||||
"error": str(e)
|
||||
}
|
||||
|
||||
|
||||
# Singleton instance for use in schedulers
|
||||
pos_to_sales_sync_job = POSToSalesSyncJob()
|
||||
|
||||
|
||||
async def run_pos_to_sales_sync():
|
||||
"""
|
||||
Entry point for scheduler
|
||||
|
||||
Usage with APScheduler:
|
||||
```python
|
||||
from apscheduler.schedulers.asyncio import AsyncIOScheduler
|
||||
from app.jobs.sync_pos_to_sales import run_pos_to_sales_sync
|
||||
|
||||
scheduler = AsyncIOScheduler()
|
||||
scheduler.add_job(
|
||||
run_pos_to_sales_sync,
|
||||
'interval',
|
||||
minutes=5,
|
||||
id='pos_to_sales_sync'
|
||||
)
|
||||
scheduler.start()
|
||||
```
|
||||
|
||||
Usage with Celery:
|
||||
```python
|
||||
from celery import Celery
|
||||
from app.jobs.sync_pos_to_sales import run_pos_to_sales_sync
|
||||
|
||||
@celery.task
|
||||
def sync_pos_transactions():
|
||||
asyncio.run(run_pos_to_sales_sync())
|
||||
```
|
||||
"""
|
||||
return await pos_to_sales_sync_job.run()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# For testing: Run sync manually
|
||||
asyncio.run(run_pos_to_sales_sync())
|
||||
@@ -89,11 +89,28 @@ class POSService(StandardFastAPIService):
|
||||
|
||||
async def on_startup(self, app: FastAPI):
|
||||
"""Custom startup logic for POS service"""
|
||||
# Start background scheduler for POS-to-Sales sync
|
||||
try:
|
||||
from app.scheduler import start_scheduler
|
||||
start_scheduler()
|
||||
self.logger.info("Background scheduler started successfully")
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to start background scheduler: {e}", exc_info=True)
|
||||
# Don't fail startup if scheduler fails
|
||||
|
||||
# Custom startup completed
|
||||
self.logger.info("POS Integration Service started successfully")
|
||||
|
||||
async def on_shutdown(self, app: FastAPI):
|
||||
"""Custom shutdown logic for POS service"""
|
||||
# Shutdown background scheduler
|
||||
try:
|
||||
from app.scheduler import shutdown_scheduler
|
||||
shutdown_scheduler()
|
||||
self.logger.info("Background scheduler stopped successfully")
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to stop background scheduler: {e}", exc_info=True)
|
||||
|
||||
# Database cleanup is handled by the base class
|
||||
pass
|
||||
|
||||
|
||||
@@ -80,3 +80,40 @@ class POSConfigurationRepository(BaseRepository[POSConfiguration, dict, dict]):
|
||||
except Exception as e:
|
||||
logger.error("Failed to count configurations by tenant", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def get_by_pos_identifier(
|
||||
self,
|
||||
pos_system: str,
|
||||
identifier: str
|
||||
) -> Optional[POSConfiguration]:
|
||||
"""
|
||||
Get POS configuration by POS-specific identifier
|
||||
|
||||
Args:
|
||||
pos_system: POS system name (square, toast, lightspeed)
|
||||
identifier: merchant_id, location_id, or other POS-specific ID
|
||||
|
||||
Returns:
|
||||
POSConfiguration if found, None otherwise
|
||||
"""
|
||||
try:
|
||||
query = select(self.model).where(
|
||||
and_(
|
||||
self.model.pos_system == pos_system,
|
||||
or_(
|
||||
self.model.merchant_id == identifier,
|
||||
self.model.location_id == identifier
|
||||
),
|
||||
self.model.is_active == True
|
||||
)
|
||||
).order_by(self.model.created_at.desc())
|
||||
|
||||
result = await self.session.execute(query)
|
||||
return result.scalars().first()
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get config by POS identifier",
|
||||
error=str(e),
|
||||
pos_system=pos_system,
|
||||
identifier=identifier)
|
||||
raise
|
||||
|
||||
145
services/pos/app/scheduler.py
Normal file
145
services/pos/app/scheduler.py
Normal file
@@ -0,0 +1,145 @@
|
||||
"""
|
||||
Background Task Scheduler for POS Service
|
||||
|
||||
Sets up periodic background jobs for:
|
||||
- Syncing POS transactions to sales service
|
||||
- Other maintenance tasks as needed
|
||||
|
||||
To enable scheduling, add to main.py startup:
|
||||
```python
|
||||
from app.scheduler import start_scheduler, shutdown_scheduler
|
||||
|
||||
@app.on_event("startup")
|
||||
async def startup_event():
|
||||
start_scheduler()
|
||||
|
||||
@app.on_event("shutdown")
|
||||
async def shutdown_event():
|
||||
shutdown_scheduler()
|
||||
```
|
||||
"""
|
||||
|
||||
import structlog
|
||||
from apscheduler.schedulers.asyncio import AsyncIOScheduler
|
||||
from apscheduler.triggers.interval import IntervalTrigger
|
||||
from datetime import datetime
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
# Global scheduler instance
|
||||
scheduler = None
|
||||
|
||||
|
||||
def start_scheduler():
|
||||
"""
|
||||
Initialize and start the background scheduler
|
||||
|
||||
Jobs configured:
|
||||
- POS to Sales Sync: Every 5 minutes
|
||||
"""
|
||||
global scheduler
|
||||
|
||||
if scheduler is not None:
|
||||
logger.warning("Scheduler already running")
|
||||
return
|
||||
|
||||
try:
|
||||
scheduler = AsyncIOScheduler()
|
||||
|
||||
# Job 1: Sync POS transactions to sales service
|
||||
from app.jobs.sync_pos_to_sales import run_pos_to_sales_sync
|
||||
|
||||
scheduler.add_job(
|
||||
run_pos_to_sales_sync,
|
||||
trigger=IntervalTrigger(minutes=5),
|
||||
id='pos_to_sales_sync',
|
||||
name='Sync POS Transactions to Sales',
|
||||
replace_existing=True,
|
||||
max_instances=1, # Prevent concurrent runs
|
||||
coalesce=True, # Combine multiple missed runs into one
|
||||
misfire_grace_time=60 # Allow 60 seconds grace for missed runs
|
||||
)
|
||||
|
||||
scheduler.start()
|
||||
logger.info("Background scheduler started",
|
||||
jobs=len(scheduler.get_jobs()),
|
||||
next_run=scheduler.get_jobs()[0].next_run_time if scheduler.get_jobs() else None)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to start scheduler", error=str(e), exc_info=True)
|
||||
scheduler = None
|
||||
|
||||
|
||||
def shutdown_scheduler():
|
||||
"""Gracefully shutdown the scheduler"""
|
||||
global scheduler
|
||||
|
||||
if scheduler is None:
|
||||
logger.warning("Scheduler not running")
|
||||
return
|
||||
|
||||
try:
|
||||
scheduler.shutdown(wait=True)
|
||||
logger.info("Background scheduler stopped")
|
||||
scheduler = None
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to shutdown scheduler", error=str(e), exc_info=True)
|
||||
|
||||
|
||||
def get_scheduler_status():
|
||||
"""
|
||||
Get current scheduler status
|
||||
|
||||
Returns:
|
||||
Dict with scheduler info and job statuses
|
||||
"""
|
||||
if scheduler is None:
|
||||
return {
|
||||
"running": False,
|
||||
"jobs": []
|
||||
}
|
||||
|
||||
jobs = []
|
||||
for job in scheduler.get_jobs():
|
||||
jobs.append({
|
||||
"id": job.id,
|
||||
"name": job.name,
|
||||
"next_run": job.next_run_time.isoformat() if job.next_run_time else None,
|
||||
"trigger": str(job.trigger)
|
||||
})
|
||||
|
||||
return {
|
||||
"running": True,
|
||||
"jobs": jobs,
|
||||
"state": scheduler.state
|
||||
}
|
||||
|
||||
|
||||
def trigger_job_now(job_id: str):
|
||||
"""
|
||||
Manually trigger a scheduled job immediately
|
||||
|
||||
Args:
|
||||
job_id: Job identifier (e.g., 'pos_to_sales_sync')
|
||||
|
||||
Returns:
|
||||
True if job was triggered, False otherwise
|
||||
"""
|
||||
if scheduler is None:
|
||||
logger.error("Cannot trigger job, scheduler not running")
|
||||
return False
|
||||
|
||||
try:
|
||||
job = scheduler.get_job(job_id)
|
||||
if job:
|
||||
scheduler.modify_job(job_id, next_run_time=datetime.now())
|
||||
logger.info("Job triggered manually", job_id=job_id)
|
||||
return True
|
||||
else:
|
||||
logger.warning("Job not found", job_id=job_id)
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to trigger job", job_id=job_id, error=str(e))
|
||||
return False
|
||||
234
services/pos/app/services/pos_sync_service.py
Normal file
234
services/pos/app/services/pos_sync_service.py
Normal file
@@ -0,0 +1,234 @@
|
||||
"""
|
||||
POS Sync Service - Business Logic Layer
|
||||
Handles sync job creation, tracking, and metrics
|
||||
"""
|
||||
|
||||
from typing import Optional, List, Dict, Any
|
||||
from uuid import UUID, uuid4
|
||||
from datetime import datetime, timedelta
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, func, and_, desc
|
||||
import structlog
|
||||
|
||||
from app.models.pos_sync import POSSyncLog
|
||||
from app.core.database import get_db_transaction
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class POSSyncService:
|
||||
"""Service layer for POS sync operations"""
|
||||
|
||||
def __init__(self, db: Optional[AsyncSession] = None):
|
||||
self.db = db
|
||||
|
||||
async def create_sync_job(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
pos_config_id: UUID,
|
||||
pos_system: str,
|
||||
sync_type: str = "manual",
|
||||
data_types: List[str] = None
|
||||
) -> POSSyncLog:
|
||||
"""
|
||||
Create a new sync job
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
pos_config_id: POS configuration UUID
|
||||
pos_system: POS system name
|
||||
sync_type: Type of sync (manual, scheduled, incremental, full)
|
||||
data_types: List of data types to sync
|
||||
|
||||
Returns:
|
||||
Created sync log
|
||||
"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
sync_log = POSSyncLog(
|
||||
tenant_id=tenant_id,
|
||||
pos_config_id=pos_config_id,
|
||||
pos_system=pos_system,
|
||||
sync_type=sync_type,
|
||||
sync_direction="inbound",
|
||||
data_type=",".join(data_types) if data_types else "transactions",
|
||||
status="started",
|
||||
started_at=datetime.utcnow(),
|
||||
triggered_by="user"
|
||||
)
|
||||
|
||||
db.add(sync_log)
|
||||
await db.commit()
|
||||
await db.refresh(sync_log)
|
||||
|
||||
logger.info("Sync job created",
|
||||
sync_id=str(sync_log.id),
|
||||
tenant_id=str(tenant_id),
|
||||
pos_system=pos_system,
|
||||
sync_type=sync_type)
|
||||
|
||||
return sync_log
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to create sync job", error=str(e))
|
||||
raise
|
||||
|
||||
async def get_sync_by_id(self, sync_id: UUID) -> Optional[POSSyncLog]:
|
||||
"""Get sync log by ID"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
return await db.get(POSSyncLog, sync_id)
|
||||
except Exception as e:
|
||||
logger.error("Failed to get sync log", error=str(e), sync_id=str(sync_id))
|
||||
raise
|
||||
|
||||
async def update_sync_status(
|
||||
self,
|
||||
sync_id: UUID,
|
||||
status: str,
|
||||
error_message: Optional[str] = None,
|
||||
stats: Optional[Dict[str, int]] = None
|
||||
) -> None:
|
||||
"""Update sync job status"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
sync_log = await db.get(POSSyncLog, sync_id)
|
||||
|
||||
if sync_log:
|
||||
sync_log.status = status
|
||||
sync_log.completed_at = datetime.utcnow()
|
||||
|
||||
if sync_log.started_at:
|
||||
duration = (datetime.utcnow() - sync_log.started_at).total_seconds()
|
||||
sync_log.duration_seconds = duration
|
||||
|
||||
if error_message:
|
||||
sync_log.error_message = error_message
|
||||
|
||||
if stats:
|
||||
sync_log.records_processed = stats.get("processed", 0)
|
||||
sync_log.records_created = stats.get("created", 0)
|
||||
sync_log.records_updated = stats.get("updated", 0)
|
||||
sync_log.records_failed = stats.get("failed", 0)
|
||||
|
||||
await db.commit()
|
||||
|
||||
logger.info("Sync status updated",
|
||||
sync_id=str(sync_id),
|
||||
status=status)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to update sync status", error=str(e))
|
||||
raise
|
||||
|
||||
async def get_sync_logs(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
config_id: Optional[UUID] = None,
|
||||
status: Optional[str] = None,
|
||||
sync_type: Optional[str] = None,
|
||||
limit: int = 50,
|
||||
offset: int = 0
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Get sync logs with filtering
|
||||
|
||||
Returns:
|
||||
Dict with logs and pagination info
|
||||
"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
query = select(POSSyncLog).where(POSSyncLog.tenant_id == tenant_id)
|
||||
|
||||
# Apply filters
|
||||
if config_id:
|
||||
query = query.where(POSSyncLog.pos_config_id == config_id)
|
||||
if status:
|
||||
query = query.where(POSSyncLog.status == status)
|
||||
if sync_type:
|
||||
query = query.where(POSSyncLog.sync_type == sync_type)
|
||||
|
||||
# Get total count
|
||||
count_query = select(func.count()).select_from(query.subquery())
|
||||
result = await db.execute(count_query)
|
||||
total = result.scalar() or 0
|
||||
|
||||
# Get paginated results
|
||||
query = query.order_by(desc(POSSyncLog.started_at)).offset(offset).limit(limit)
|
||||
result = await db.execute(query)
|
||||
logs = result.scalars().all()
|
||||
|
||||
return {
|
||||
"logs": [self._sync_log_to_dict(log) for log in logs],
|
||||
"total": total,
|
||||
"has_more": offset + len(logs) < total
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get sync logs", error=str(e))
|
||||
raise
|
||||
|
||||
async def calculate_average_duration(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
pos_config_id: Optional[UUID] = None,
|
||||
days: int = 30
|
||||
) -> float:
|
||||
"""
|
||||
Calculate average sync duration for recent successful syncs
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
pos_config_id: Optional POS config filter
|
||||
days: Number of days to look back
|
||||
|
||||
Returns:
|
||||
Average duration in minutes
|
||||
"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
cutoff_date = datetime.utcnow() - timedelta(days=days)
|
||||
|
||||
query = select(func.avg(POSSyncLog.duration_seconds)).where(
|
||||
and_(
|
||||
POSSyncLog.tenant_id == tenant_id,
|
||||
POSSyncLog.status == "completed",
|
||||
POSSyncLog.started_at >= cutoff_date,
|
||||
POSSyncLog.duration_seconds.isnot(None)
|
||||
)
|
||||
)
|
||||
|
||||
if pos_config_id:
|
||||
query = query.where(POSSyncLog.pos_config_id == pos_config_id)
|
||||
|
||||
result = await db.execute(query)
|
||||
avg_seconds = result.scalar()
|
||||
|
||||
if avg_seconds:
|
||||
return round(float(avg_seconds) / 60, 2) # Convert to minutes
|
||||
else:
|
||||
return 0.0
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to calculate average duration", error=str(e))
|
||||
return 0.0
|
||||
|
||||
def _sync_log_to_dict(self, sync_log: POSSyncLog) -> Dict[str, Any]:
|
||||
"""Convert sync log to dictionary"""
|
||||
return {
|
||||
"id": str(sync_log.id),
|
||||
"tenant_id": str(sync_log.tenant_id),
|
||||
"pos_config_id": str(sync_log.pos_config_id),
|
||||
"pos_system": sync_log.pos_system,
|
||||
"sync_type": sync_log.sync_type,
|
||||
"data_type": sync_log.data_type,
|
||||
"status": sync_log.status,
|
||||
"started_at": sync_log.started_at.isoformat() if sync_log.started_at else None,
|
||||
"completed_at": sync_log.completed_at.isoformat() if sync_log.completed_at else None,
|
||||
"duration_seconds": float(sync_log.duration_seconds) if sync_log.duration_seconds else None,
|
||||
"records_processed": sync_log.records_processed,
|
||||
"records_created": sync_log.records_created,
|
||||
"records_updated": sync_log.records_updated,
|
||||
"records_failed": sync_log.records_failed,
|
||||
"error_message": sync_log.error_message
|
||||
}
|
||||
@@ -237,3 +237,246 @@ class POSTransactionService:
|
||||
except Exception as e:
|
||||
logger.error("Failed to calculate transaction analytics", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def sync_transaction_to_sales(
|
||||
self,
|
||||
transaction_id: UUID,
|
||||
tenant_id: UUID
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Sync a single POS transaction to the sales service
|
||||
|
||||
Args:
|
||||
transaction_id: Transaction UUID
|
||||
tenant_id: Tenant UUID
|
||||
|
||||
Returns:
|
||||
Dict with sync status and details
|
||||
"""
|
||||
try:
|
||||
from shared.clients.sales_client import SalesServiceClient
|
||||
from app.core.config import settings
|
||||
|
||||
async with get_db_transaction() as db:
|
||||
transaction_repo = POSTransactionRepository(db)
|
||||
items_repo = POSTransactionItemRepository(db)
|
||||
|
||||
# Get transaction
|
||||
transaction = await transaction_repo.get_by_id(transaction_id)
|
||||
if not transaction or transaction.tenant_id != tenant_id:
|
||||
return {
|
||||
"success": False,
|
||||
"error": "Transaction not found or unauthorized"
|
||||
}
|
||||
|
||||
# Check if already synced
|
||||
if transaction.is_synced_to_sales:
|
||||
logger.info("Transaction already synced to sales",
|
||||
transaction_id=transaction_id,
|
||||
sales_record_id=transaction.sales_record_id)
|
||||
return {
|
||||
"success": True,
|
||||
"already_synced": True,
|
||||
"sales_record_id": str(transaction.sales_record_id)
|
||||
}
|
||||
|
||||
# Get transaction items
|
||||
items = await items_repo.get_by_transaction_id(transaction_id)
|
||||
|
||||
# Initialize sales client
|
||||
sales_client = SalesServiceClient(settings, calling_service_name="pos")
|
||||
|
||||
# Create sales records for each item
|
||||
sales_record_ids = []
|
||||
failed_items = []
|
||||
|
||||
for item in items:
|
||||
try:
|
||||
sales_data = {
|
||||
"inventory_product_id": str(item.product_id) if item.product_id else None,
|
||||
"product_name": item.product_name,
|
||||
"product_category": "finished_product",
|
||||
"quantity_sold": float(item.quantity),
|
||||
"unit_price": float(item.unit_price),
|
||||
"total_amount": float(item.subtotal),
|
||||
"sale_date": transaction.transaction_date.strftime("%Y-%m-%d"),
|
||||
"sales_channel": "pos",
|
||||
"source": f"pos_sync_{transaction.pos_system}",
|
||||
"payment_method": transaction.payment_method or "unknown",
|
||||
"notes": f"POS Transaction: {transaction.external_transaction_id or transaction_id}"
|
||||
}
|
||||
|
||||
result = await sales_client.create_sales_record(
|
||||
tenant_id=str(tenant_id),
|
||||
sales_data=sales_data
|
||||
)
|
||||
|
||||
if result and result.get("id"):
|
||||
sales_record_ids.append(result["id"])
|
||||
logger.info("Synced item to sales",
|
||||
transaction_id=transaction_id,
|
||||
item_id=item.id,
|
||||
sales_record_id=result["id"])
|
||||
else:
|
||||
failed_items.append({
|
||||
"item_id": str(item.id),
|
||||
"product_name": item.product_name,
|
||||
"error": "No sales record ID returned"
|
||||
})
|
||||
|
||||
except Exception as item_error:
|
||||
logger.error("Failed to sync item to sales",
|
||||
error=str(item_error),
|
||||
transaction_id=transaction_id,
|
||||
item_id=item.id)
|
||||
failed_items.append({
|
||||
"item_id": str(item.id),
|
||||
"product_name": item.product_name,
|
||||
"error": str(item_error)
|
||||
})
|
||||
|
||||
# Update transaction sync status
|
||||
if sales_record_ids and len(failed_items) == 0:
|
||||
# Full success
|
||||
transaction.is_synced_to_sales = True
|
||||
transaction.sales_record_id = UUID(sales_record_ids[0]) # Store first record ID
|
||||
transaction.sync_completed_at = datetime.utcnow()
|
||||
await db.commit()
|
||||
|
||||
logger.info("Transaction fully synced to sales",
|
||||
transaction_id=transaction_id,
|
||||
items_synced=len(sales_record_ids))
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"items_synced": len(sales_record_ids),
|
||||
"sales_record_ids": sales_record_ids,
|
||||
"failed_items": []
|
||||
}
|
||||
|
||||
elif sales_record_ids and len(failed_items) > 0:
|
||||
# Partial success
|
||||
transaction.sync_attempted_at = datetime.utcnow()
|
||||
transaction.sync_error = f"Partial sync: {len(failed_items)} items failed"
|
||||
transaction.sync_retry_count = (transaction.sync_retry_count or 0) + 1
|
||||
await db.commit()
|
||||
|
||||
logger.warning("Transaction partially synced to sales",
|
||||
transaction_id=transaction_id,
|
||||
items_synced=len(sales_record_ids),
|
||||
items_failed=len(failed_items))
|
||||
|
||||
return {
|
||||
"success": False,
|
||||
"partial_success": True,
|
||||
"items_synced": len(sales_record_ids),
|
||||
"sales_record_ids": sales_record_ids,
|
||||
"failed_items": failed_items
|
||||
}
|
||||
|
||||
else:
|
||||
# Complete failure
|
||||
transaction.sync_attempted_at = datetime.utcnow()
|
||||
transaction.sync_error = "All items failed to sync"
|
||||
transaction.sync_retry_count = (transaction.sync_retry_count or 0) + 1
|
||||
await db.commit()
|
||||
|
||||
logger.error("Transaction sync failed completely",
|
||||
transaction_id=transaction_id,
|
||||
items_failed=len(failed_items))
|
||||
|
||||
return {
|
||||
"success": False,
|
||||
"items_synced": 0,
|
||||
"failed_items": failed_items
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to sync transaction to sales",
|
||||
error=str(e),
|
||||
transaction_id=transaction_id,
|
||||
tenant_id=tenant_id)
|
||||
return {
|
||||
"success": False,
|
||||
"error": str(e)
|
||||
}
|
||||
|
||||
async def sync_unsynced_transactions(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
limit: int = 50
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Sync all unsynced transactions to the sales service
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
limit: Maximum number of transactions to sync in one batch
|
||||
|
||||
Returns:
|
||||
Dict with sync summary
|
||||
"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
repository = POSTransactionRepository(db)
|
||||
|
||||
# Get unsynced transactions
|
||||
unsynced_transactions = await repository.get_transactions_by_tenant(
|
||||
tenant_id=tenant_id,
|
||||
is_synced=False,
|
||||
status="completed", # Only sync completed transactions
|
||||
limit=limit
|
||||
)
|
||||
|
||||
if not unsynced_transactions:
|
||||
logger.info("No unsynced transactions found", tenant_id=tenant_id)
|
||||
return {
|
||||
"success": True,
|
||||
"total_transactions": 0,
|
||||
"synced": 0,
|
||||
"failed": 0
|
||||
}
|
||||
|
||||
synced_count = 0
|
||||
failed_count = 0
|
||||
results = []
|
||||
|
||||
for transaction in unsynced_transactions:
|
||||
result = await self.sync_transaction_to_sales(
|
||||
transaction.id,
|
||||
tenant_id
|
||||
)
|
||||
|
||||
if result.get("success"):
|
||||
synced_count += 1
|
||||
else:
|
||||
failed_count += 1
|
||||
|
||||
results.append({
|
||||
"transaction_id": str(transaction.id),
|
||||
"external_id": transaction.external_transaction_id,
|
||||
"result": result
|
||||
})
|
||||
|
||||
logger.info("Batch sync completed",
|
||||
tenant_id=tenant_id,
|
||||
total=len(unsynced_transactions),
|
||||
synced=synced_count,
|
||||
failed=failed_count)
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"total_transactions": len(unsynced_transactions),
|
||||
"synced": synced_count,
|
||||
"failed": failed_count,
|
||||
"results": results
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to batch sync transactions",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id)
|
||||
return {
|
||||
"success": False,
|
||||
"error": str(e)
|
||||
}
|
||||
|
||||
409
services/pos/app/services/pos_webhook_service.py
Normal file
409
services/pos/app/services/pos_webhook_service.py
Normal file
@@ -0,0 +1,409 @@
|
||||
"""
|
||||
POS Webhook Service - Business Logic Layer
|
||||
Handles webhook processing, signature verification, and logging
|
||||
"""
|
||||
|
||||
from typing import Optional, Dict, Any, Tuple
|
||||
from uuid import UUID
|
||||
import structlog
|
||||
import hashlib
|
||||
import hmac
|
||||
import base64
|
||||
import json
|
||||
from datetime import datetime
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from app.models.pos_webhook import POSWebhookLog
|
||||
from app.repositories.pos_config_repository import POSConfigurationRepository
|
||||
from app.core.database import get_db_transaction
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class POSWebhookService:
|
||||
"""Service layer for POS webhook operations"""
|
||||
|
||||
def __init__(self, db: Optional[AsyncSession] = None):
|
||||
self.db = db
|
||||
|
||||
async def verify_webhook_signature(
|
||||
self,
|
||||
pos_system: str,
|
||||
payload: str,
|
||||
signature: str,
|
||||
webhook_secret: str
|
||||
) -> bool:
|
||||
"""
|
||||
Verify webhook signature based on POS system
|
||||
|
||||
Args:
|
||||
pos_system: POS system name (square, toast, lightspeed)
|
||||
payload: Raw webhook payload
|
||||
signature: Signature from webhook headers
|
||||
webhook_secret: Secret key from POS configuration
|
||||
|
||||
Returns:
|
||||
True if signature is valid, False otherwise
|
||||
"""
|
||||
try:
|
||||
if pos_system.lower() == "square":
|
||||
return self._verify_square_signature(payload, signature, webhook_secret)
|
||||
elif pos_system.lower() == "toast":
|
||||
return self._verify_toast_signature(payload, signature, webhook_secret)
|
||||
elif pos_system.lower() == "lightspeed":
|
||||
return self._verify_lightspeed_signature(payload, signature, webhook_secret)
|
||||
else:
|
||||
logger.warning("Unknown POS system for signature verification", pos_system=pos_system)
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Signature verification failed", error=str(e), pos_system=pos_system)
|
||||
return False
|
||||
|
||||
def _verify_square_signature(self, payload: str, signature: str, secret: str) -> bool:
|
||||
"""Verify Square webhook signature using HMAC-SHA256"""
|
||||
try:
|
||||
# Square combines URL + body for signature
|
||||
# Format: <notification_url> + <request_body>
|
||||
# For simplicity, we'll just verify the body
|
||||
expected_signature = hmac.new(
|
||||
secret.encode('utf-8'),
|
||||
payload.encode('utf-8'),
|
||||
hashlib.sha256
|
||||
).digest()
|
||||
|
||||
# Square sends base64-encoded signature
|
||||
expected_b64 = base64.b64encode(expected_signature).decode('utf-8')
|
||||
|
||||
return hmac.compare_digest(signature, expected_b64)
|
||||
except Exception as e:
|
||||
logger.error("Square signature verification error", error=str(e))
|
||||
return False
|
||||
|
||||
def _verify_toast_signature(self, payload: str, signature: str, secret: str) -> bool:
|
||||
"""Verify Toast webhook signature using HMAC-SHA256"""
|
||||
try:
|
||||
expected_signature = hmac.new(
|
||||
secret.encode('utf-8'),
|
||||
payload.encode('utf-8'),
|
||||
hashlib.sha256
|
||||
).hexdigest()
|
||||
|
||||
return hmac.compare_digest(signature, expected_signature)
|
||||
except Exception as e:
|
||||
logger.error("Toast signature verification error", error=str(e))
|
||||
return False
|
||||
|
||||
def _verify_lightspeed_signature(self, payload: str, signature: str, secret: str) -> bool:
|
||||
"""Verify Lightspeed webhook signature using HMAC-SHA256"""
|
||||
try:
|
||||
expected_signature = hmac.new(
|
||||
secret.encode('utf-8'),
|
||||
payload.encode('utf-8'),
|
||||
hashlib.sha256
|
||||
).hexdigest()
|
||||
|
||||
return hmac.compare_digest(signature.lower(), expected_signature.lower())
|
||||
except Exception as e:
|
||||
logger.error("Lightspeed signature verification error", error=str(e))
|
||||
return False
|
||||
|
||||
async def extract_tenant_id_from_payload(
|
||||
self,
|
||||
pos_system: str,
|
||||
parsed_payload: Dict[str, Any]
|
||||
) -> Optional[UUID]:
|
||||
"""
|
||||
Extract tenant_id from webhook payload by matching POS system identifiers
|
||||
|
||||
Args:
|
||||
pos_system: POS system name
|
||||
parsed_payload: Parsed JSON payload
|
||||
|
||||
Returns:
|
||||
tenant_id if found, None otherwise
|
||||
"""
|
||||
try:
|
||||
# Extract POS-specific identifiers
|
||||
pos_identifier = None
|
||||
|
||||
if pos_system.lower() == "square":
|
||||
# Square uses merchant_id or location_id
|
||||
pos_identifier = (
|
||||
parsed_payload.get("merchant_id") or
|
||||
parsed_payload.get("data", {}).get("object", {}).get("merchant_id") or
|
||||
parsed_payload.get("location_id")
|
||||
)
|
||||
elif pos_system.lower() == "toast":
|
||||
# Toast uses restaurantGuid
|
||||
pos_identifier = (
|
||||
parsed_payload.get("restaurantGuid") or
|
||||
parsed_payload.get("restaurant", {}).get("guid")
|
||||
)
|
||||
elif pos_system.lower() == "lightspeed":
|
||||
# Lightspeed uses accountID
|
||||
pos_identifier = (
|
||||
parsed_payload.get("accountID") or
|
||||
parsed_payload.get("account", {}).get("id")
|
||||
)
|
||||
|
||||
if not pos_identifier:
|
||||
logger.warning("Could not extract POS identifier from payload", pos_system=pos_system)
|
||||
return None
|
||||
|
||||
# Query database to find tenant_id by POS identifier
|
||||
async with get_db_transaction() as db:
|
||||
repository = POSConfigurationRepository(db)
|
||||
config = await repository.get_by_pos_identifier(pos_system, pos_identifier)
|
||||
|
||||
if config:
|
||||
return config.tenant_id
|
||||
else:
|
||||
logger.warning("No tenant found for POS identifier",
|
||||
pos_system=pos_system,
|
||||
identifier=pos_identifier)
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to extract tenant_id", error=str(e), pos_system=pos_system)
|
||||
return None
|
||||
|
||||
async def log_webhook(
|
||||
self,
|
||||
pos_system: str,
|
||||
webhook_type: str,
|
||||
method: str,
|
||||
url_path: str,
|
||||
query_params: Dict[str, Any],
|
||||
headers: Dict[str, str],
|
||||
raw_payload: str,
|
||||
payload_size: int,
|
||||
content_type: Optional[str],
|
||||
signature: Optional[str],
|
||||
is_signature_valid: Optional[bool],
|
||||
source_ip: Optional[str],
|
||||
event_id: Optional[str] = None,
|
||||
tenant_id: Optional[UUID] = None,
|
||||
transaction_id: Optional[str] = None,
|
||||
order_id: Optional[str] = None
|
||||
) -> POSWebhookLog:
|
||||
"""
|
||||
Create a webhook log entry in the database
|
||||
|
||||
Returns:
|
||||
Created POSWebhookLog instance
|
||||
"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
webhook_log = POSWebhookLog(
|
||||
tenant_id=tenant_id,
|
||||
pos_system=pos_system,
|
||||
webhook_type=webhook_type,
|
||||
method=method,
|
||||
url_path=url_path,
|
||||
query_params=query_params,
|
||||
headers=headers,
|
||||
raw_payload=raw_payload,
|
||||
payload_size=payload_size,
|
||||
content_type=content_type,
|
||||
signature=signature,
|
||||
is_signature_valid=is_signature_valid,
|
||||
source_ip=source_ip,
|
||||
status="received",
|
||||
event_id=event_id,
|
||||
transaction_id=transaction_id,
|
||||
order_id=order_id,
|
||||
received_at=datetime.utcnow(),
|
||||
user_agent=headers.get("user-agent"),
|
||||
forwarded_for=headers.get("x-forwarded-for"),
|
||||
request_id=headers.get("x-request-id")
|
||||
)
|
||||
|
||||
db.add(webhook_log)
|
||||
await db.commit()
|
||||
await db.refresh(webhook_log)
|
||||
|
||||
logger.info("Webhook logged to database",
|
||||
webhook_log_id=str(webhook_log.id),
|
||||
pos_system=pos_system,
|
||||
webhook_type=webhook_type,
|
||||
tenant_id=str(tenant_id) if tenant_id else None)
|
||||
|
||||
return webhook_log
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to log webhook", error=str(e), pos_system=pos_system)
|
||||
raise
|
||||
|
||||
async def get_webhook_secret(
|
||||
self,
|
||||
pos_system: str,
|
||||
tenant_id: Optional[UUID] = None
|
||||
) -> Optional[str]:
|
||||
"""
|
||||
Get webhook secret for signature verification
|
||||
|
||||
Args:
|
||||
pos_system: POS system name
|
||||
tenant_id: Optional tenant_id if known
|
||||
|
||||
Returns:
|
||||
Webhook secret if found
|
||||
"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
repository = POSConfigurationRepository(db)
|
||||
|
||||
if tenant_id:
|
||||
# Get active config for tenant and POS system
|
||||
configs = await repository.get_configurations_by_tenant(
|
||||
tenant_id=tenant_id,
|
||||
pos_system=pos_system,
|
||||
is_active=True,
|
||||
skip=0,
|
||||
limit=1
|
||||
)
|
||||
|
||||
if configs:
|
||||
return configs[0].webhook_secret
|
||||
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get webhook secret", error=str(e))
|
||||
return None
|
||||
|
||||
async def update_webhook_status(
|
||||
self,
|
||||
webhook_log_id: UUID,
|
||||
status: str,
|
||||
error_message: Optional[str] = None,
|
||||
processing_duration_ms: Optional[int] = None
|
||||
) -> None:
|
||||
"""Update webhook processing status"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
webhook_log = await db.get(POSWebhookLog, webhook_log_id)
|
||||
|
||||
if webhook_log:
|
||||
webhook_log.status = status
|
||||
webhook_log.processing_completed_at = datetime.utcnow()
|
||||
|
||||
if error_message:
|
||||
webhook_log.error_message = error_message
|
||||
webhook_log.retry_count += 1
|
||||
|
||||
if processing_duration_ms:
|
||||
webhook_log.processing_duration_ms = processing_duration_ms
|
||||
|
||||
await db.commit()
|
||||
|
||||
logger.info("Webhook status updated",
|
||||
webhook_log_id=str(webhook_log_id),
|
||||
status=status)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to update webhook status", error=str(e))
|
||||
raise
|
||||
|
||||
async def check_duplicate_webhook(
|
||||
self,
|
||||
pos_system: str,
|
||||
event_id: str,
|
||||
tenant_id: Optional[UUID] = None
|
||||
) -> Tuple[bool, Optional[UUID]]:
|
||||
"""
|
||||
Check if webhook has already been processed
|
||||
|
||||
Returns:
|
||||
Tuple of (is_duplicate, original_webhook_id)
|
||||
"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
from sqlalchemy import select
|
||||
|
||||
query = select(POSWebhookLog).where(
|
||||
POSWebhookLog.pos_system == pos_system,
|
||||
POSWebhookLog.event_id == event_id,
|
||||
POSWebhookLog.status == "processed"
|
||||
)
|
||||
|
||||
if tenant_id:
|
||||
query = query.where(POSWebhookLog.tenant_id == tenant_id)
|
||||
|
||||
result = await db.execute(query)
|
||||
existing = result.scalar_one_or_none()
|
||||
|
||||
if existing:
|
||||
logger.info("Duplicate webhook detected",
|
||||
pos_system=pos_system,
|
||||
event_id=event_id,
|
||||
original_id=str(existing.id))
|
||||
return True, existing.id
|
||||
|
||||
return False, None
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to check duplicate webhook", error=str(e))
|
||||
return False, None
|
||||
|
||||
def parse_webhook_event_details(
|
||||
self,
|
||||
pos_system: str,
|
||||
parsed_payload: Dict[str, Any]
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Extract standardized event details from POS-specific payload
|
||||
|
||||
Returns:
|
||||
Dict with event_id, webhook_type, transaction_id, order_id, etc.
|
||||
"""
|
||||
details = {
|
||||
"event_id": None,
|
||||
"webhook_type": None,
|
||||
"transaction_id": None,
|
||||
"order_id": None,
|
||||
"customer_id": None,
|
||||
"event_timestamp": None
|
||||
}
|
||||
|
||||
try:
|
||||
if pos_system.lower() == "square":
|
||||
details["event_id"] = parsed_payload.get("event_id")
|
||||
details["webhook_type"] = parsed_payload.get("type")
|
||||
|
||||
data = parsed_payload.get("data", {}).get("object", {})
|
||||
details["transaction_id"] = data.get("id")
|
||||
details["order_id"] = data.get("order_id")
|
||||
details["customer_id"] = data.get("customer_id")
|
||||
|
||||
created_at = parsed_payload.get("created_at")
|
||||
if created_at:
|
||||
details["event_timestamp"] = datetime.fromisoformat(created_at.replace('Z', '+00:00'))
|
||||
|
||||
elif pos_system.lower() == "toast":
|
||||
details["event_id"] = parsed_payload.get("guid")
|
||||
details["webhook_type"] = parsed_payload.get("eventType")
|
||||
details["order_id"] = parsed_payload.get("entityId")
|
||||
|
||||
created_at = parsed_payload.get("eventTime")
|
||||
if created_at:
|
||||
try:
|
||||
details["event_timestamp"] = datetime.fromtimestamp(created_at / 1000)
|
||||
except:
|
||||
pass
|
||||
|
||||
elif pos_system.lower() == "lightspeed":
|
||||
details["event_id"] = parsed_payload.get("id")
|
||||
details["webhook_type"] = parsed_payload.get("action")
|
||||
details["transaction_id"] = parsed_payload.get("objectID")
|
||||
|
||||
created_at = parsed_payload.get("createdAt")
|
||||
if created_at:
|
||||
details["event_timestamp"] = datetime.fromisoformat(created_at.replace('Z', '+00:00'))
|
||||
|
||||
return details
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to parse webhook event details", error=str(e))
|
||||
return details
|
||||
@@ -19,3 +19,4 @@ aio-pika==9.4.3
|
||||
email-validator==2.2.0
|
||||
psycopg2-binary==2.9.10
|
||||
pytz==2024.2
|
||||
apscheduler==3.10.4
|
||||
|
||||
Reference in New Issue
Block a user