2025-10-06 15:27:01 +02:00
|
|
|
"""
|
|
|
|
|
POS Operations API Endpoints
|
|
|
|
|
BUSINESS layer - Sync operations, webhooks, reconciliation, and test connection
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
from fastapi import APIRouter, Depends, HTTPException, Path, Query, Body, Request, Header
|
|
|
|
|
from typing import Optional, Dict, Any
|
|
|
|
|
from uuid import UUID
|
|
|
|
|
from datetime import datetime
|
|
|
|
|
import structlog
|
|
|
|
|
import json
|
|
|
|
|
|
|
|
|
|
from app.core.database import get_db
|
|
|
|
|
from shared.auth.decorators import get_current_user_dep
|
2025-10-31 11:54:19 +01:00
|
|
|
from shared.auth.access_control import require_user_role, admin_role_required, service_only_access
|
2025-10-06 15:27:01 +02:00
|
|
|
from shared.routing import RouteBuilder
|
2025-10-23 07:44:54 +02:00
|
|
|
from app.services.pos_transaction_service import POSTransactionService
|
|
|
|
|
from app.services.pos_config_service import POSConfigurationService
|
2025-11-12 15:34:10 +01:00
|
|
|
from app.services.pos_webhook_service import POSWebhookService
|
|
|
|
|
from app.services.pos_sync_service import POSSyncService
|
2025-10-31 11:54:19 +01:00
|
|
|
from app.services.tenant_deletion_service import POSTenantDeletionService
|
2025-10-06 15:27:01 +02:00
|
|
|
|
|
|
|
|
router = APIRouter()
|
|
|
|
|
logger = structlog.get_logger()
|
|
|
|
|
route_builder = RouteBuilder('pos')
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
# Sync Operations
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
|
|
|
|
@router.post(
|
|
|
|
|
route_builder.build_operations_route("sync"),
|
|
|
|
|
response_model=dict
|
|
|
|
|
)
|
|
|
|
|
@require_user_role(['member', 'admin', 'owner'])
|
|
|
|
|
async def trigger_sync(
|
|
|
|
|
sync_request: Dict[str, Any],
|
|
|
|
|
tenant_id: UUID = Path(...),
|
|
|
|
|
current_user: dict = Depends(get_current_user_dep),
|
|
|
|
|
db=Depends(get_db)
|
|
|
|
|
):
|
|
|
|
|
"""Trigger manual synchronization with POS system (Member+)"""
|
|
|
|
|
try:
|
|
|
|
|
sync_type = sync_request.get("sync_type", "incremental")
|
|
|
|
|
data_types = sync_request.get("data_types", ["transactions"])
|
|
|
|
|
config_id = sync_request.get("config_id")
|
|
|
|
|
|
2025-11-12 15:34:10 +01:00
|
|
|
if not config_id:
|
|
|
|
|
raise HTTPException(status_code=400, detail="config_id is required")
|
|
|
|
|
|
|
|
|
|
# Get POS configuration to determine system type
|
|
|
|
|
config_service = POSConfigurationService()
|
|
|
|
|
configs = await config_service.get_configurations_by_tenant(tenant_id, skip=0, limit=100)
|
|
|
|
|
config = next((c for c in configs if str(c.id) == str(config_id)), None)
|
|
|
|
|
|
|
|
|
|
if not config:
|
|
|
|
|
raise HTTPException(status_code=404, detail="POS configuration not found")
|
|
|
|
|
|
|
|
|
|
# Create sync job
|
|
|
|
|
sync_service = POSSyncService(db)
|
|
|
|
|
sync_log = await sync_service.create_sync_job(
|
|
|
|
|
tenant_id=tenant_id,
|
|
|
|
|
pos_config_id=UUID(config_id),
|
|
|
|
|
pos_system=config.pos_system,
|
|
|
|
|
sync_type=sync_type,
|
|
|
|
|
data_types=data_types
|
|
|
|
|
)
|
|
|
|
|
|
2025-10-06 15:27:01 +02:00
|
|
|
logger.info("Manual sync triggered",
|
|
|
|
|
tenant_id=tenant_id,
|
|
|
|
|
config_id=config_id,
|
2025-11-12 15:34:10 +01:00
|
|
|
sync_id=str(sync_log.id),
|
2025-10-06 15:27:01 +02:00
|
|
|
sync_type=sync_type,
|
|
|
|
|
user_id=current_user.get("user_id"))
|
|
|
|
|
|
|
|
|
|
return {
|
|
|
|
|
"message": "Sync triggered successfully",
|
2025-11-12 15:34:10 +01:00
|
|
|
"sync_id": str(sync_log.id),
|
2025-10-06 15:27:01 +02:00
|
|
|
"status": "queued",
|
|
|
|
|
"sync_type": sync_type,
|
|
|
|
|
"data_types": data_types,
|
|
|
|
|
"estimated_duration": "5-10 minutes"
|
|
|
|
|
}
|
2025-11-12 15:34:10 +01:00
|
|
|
except HTTPException:
|
|
|
|
|
raise
|
2025-10-06 15:27:01 +02:00
|
|
|
except Exception as e:
|
|
|
|
|
logger.error("Failed to trigger sync", error=str(e), tenant_id=tenant_id)
|
|
|
|
|
raise HTTPException(status_code=500, detail=f"Failed to trigger sync: {str(e)}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@router.get(
|
|
|
|
|
route_builder.build_operations_route("sync-status"),
|
|
|
|
|
response_model=dict
|
|
|
|
|
)
|
|
|
|
|
@require_user_role(['viewer', 'member', 'admin', 'owner'])
|
|
|
|
|
async def get_sync_status(
|
|
|
|
|
tenant_id: UUID = Path(...),
|
|
|
|
|
config_id: Optional[UUID] = Query(None),
|
|
|
|
|
limit: int = Query(10, ge=1, le=100),
|
|
|
|
|
current_user: dict = Depends(get_current_user_dep),
|
|
|
|
|
db=Depends(get_db)
|
|
|
|
|
):
|
|
|
|
|
"""Get synchronization status and recent sync history"""
|
|
|
|
|
try:
|
2025-10-23 07:44:54 +02:00
|
|
|
transaction_service = POSTransactionService()
|
2025-11-12 15:34:10 +01:00
|
|
|
sync_service = POSSyncService(db)
|
2025-10-23 07:44:54 +02:00
|
|
|
|
|
|
|
|
# Get sync metrics from transaction service
|
|
|
|
|
sync_metrics = await transaction_service.get_sync_metrics(tenant_id)
|
|
|
|
|
|
|
|
|
|
# Get last successful sync time
|
|
|
|
|
sync_status = sync_metrics["sync_status"]
|
|
|
|
|
last_successful_sync = sync_status.get("last_sync_at")
|
|
|
|
|
|
|
|
|
|
# Calculate sync success rate
|
|
|
|
|
total = sync_metrics["total_transactions"]
|
|
|
|
|
synced = sync_status.get("synced", 0)
|
|
|
|
|
success_rate = (synced / total * 100) if total > 0 else 100.0
|
|
|
|
|
|
2025-11-12 15:34:10 +01:00
|
|
|
# Calculate actual average duration from sync logs
|
|
|
|
|
average_duration_minutes = await sync_service.calculate_average_duration(
|
|
|
|
|
tenant_id=tenant_id,
|
|
|
|
|
pos_config_id=config_id,
|
|
|
|
|
days=30
|
|
|
|
|
)
|
|
|
|
|
|
2025-10-06 15:27:01 +02:00
|
|
|
return {
|
|
|
|
|
"current_sync": None,
|
2025-10-23 07:44:54 +02:00
|
|
|
"last_successful_sync": last_successful_sync.isoformat() if last_successful_sync else None,
|
|
|
|
|
"recent_syncs": [], # Could be enhanced with actual sync history
|
2025-10-06 15:27:01 +02:00
|
|
|
"sync_health": {
|
2025-10-23 07:44:54 +02:00
|
|
|
"status": "healthy" if success_rate > 90 else "degraded" if success_rate > 70 else "unhealthy",
|
|
|
|
|
"success_rate": round(success_rate, 2),
|
2025-11-12 15:34:10 +01:00
|
|
|
"average_duration_minutes": average_duration_minutes,
|
2025-10-23 07:44:54 +02:00
|
|
|
"last_error": None,
|
|
|
|
|
"total_transactions": total,
|
|
|
|
|
"synced_count": synced,
|
|
|
|
|
"pending_count": sync_status.get("pending", 0),
|
|
|
|
|
"failed_count": sync_status.get("failed", 0)
|
2025-10-06 15:27:01 +02:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error("Failed to get sync status", error=str(e), tenant_id=tenant_id)
|
|
|
|
|
raise HTTPException(status_code=500, detail=f"Failed to get sync status: {str(e)}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@router.get(
|
|
|
|
|
route_builder.build_operations_route("sync-logs"),
|
|
|
|
|
response_model=dict
|
|
|
|
|
)
|
|
|
|
|
@require_user_role(['viewer', 'member', 'admin', 'owner'])
|
|
|
|
|
async def get_sync_logs(
|
|
|
|
|
tenant_id: UUID = Path(...),
|
|
|
|
|
config_id: Optional[UUID] = Query(None),
|
|
|
|
|
limit: int = Query(50, ge=1, le=200),
|
|
|
|
|
offset: int = Query(0, ge=0),
|
|
|
|
|
status: Optional[str] = Query(None),
|
|
|
|
|
sync_type: Optional[str] = Query(None),
|
|
|
|
|
current_user: dict = Depends(get_current_user_dep),
|
|
|
|
|
db=Depends(get_db)
|
|
|
|
|
):
|
|
|
|
|
"""Get detailed sync logs"""
|
|
|
|
|
try:
|
2025-11-12 15:34:10 +01:00
|
|
|
sync_service = POSSyncService(db)
|
|
|
|
|
|
|
|
|
|
logs_data = await sync_service.get_sync_logs(
|
|
|
|
|
tenant_id=tenant_id,
|
|
|
|
|
config_id=config_id,
|
|
|
|
|
status=status,
|
|
|
|
|
sync_type=sync_type,
|
|
|
|
|
limit=limit,
|
|
|
|
|
offset=offset
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
return logs_data
|
|
|
|
|
|
2025-10-06 15:27:01 +02:00
|
|
|
except Exception as e:
|
|
|
|
|
logger.error("Failed to get sync logs", error=str(e), tenant_id=tenant_id)
|
|
|
|
|
raise HTTPException(status_code=500, detail=f"Failed to get sync logs: {str(e)}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@router.post(
|
|
|
|
|
route_builder.build_operations_route("resync-failed"),
|
|
|
|
|
response_model=dict
|
|
|
|
|
)
|
|
|
|
|
@admin_role_required
|
|
|
|
|
async def resync_failed_transactions(
|
|
|
|
|
tenant_id: UUID = Path(...),
|
|
|
|
|
days_back: int = Query(7, ge=1, le=90),
|
|
|
|
|
current_user: dict = Depends(get_current_user_dep),
|
|
|
|
|
db=Depends(get_db)
|
|
|
|
|
):
|
|
|
|
|
"""Resync failed transactions from the specified time period (Admin/Owner only)"""
|
|
|
|
|
try:
|
2025-11-12 15:34:10 +01:00
|
|
|
# Get active POS configuration for tenant
|
|
|
|
|
config_service = POSConfigurationService()
|
|
|
|
|
configs = await config_service.get_configurations_by_tenant(
|
|
|
|
|
tenant_id=tenant_id,
|
|
|
|
|
is_active=True,
|
|
|
|
|
skip=0,
|
|
|
|
|
limit=1
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
if not configs:
|
|
|
|
|
raise HTTPException(status_code=404, detail="No active POS configuration found")
|
|
|
|
|
|
|
|
|
|
config = configs[0]
|
|
|
|
|
|
|
|
|
|
# Create resync job
|
|
|
|
|
sync_service = POSSyncService(db)
|
|
|
|
|
sync_log = await sync_service.create_sync_job(
|
|
|
|
|
tenant_id=tenant_id,
|
|
|
|
|
pos_config_id=config.id,
|
|
|
|
|
pos_system=config.pos_system,
|
|
|
|
|
sync_type="resync_failed",
|
|
|
|
|
data_types=["transactions"]
|
|
|
|
|
)
|
|
|
|
|
|
2025-10-06 15:27:01 +02:00
|
|
|
logger.info("Resync failed transactions requested",
|
|
|
|
|
tenant_id=tenant_id,
|
|
|
|
|
days_back=days_back,
|
2025-11-12 15:34:10 +01:00
|
|
|
sync_id=str(sync_log.id),
|
2025-10-06 15:27:01 +02:00
|
|
|
user_id=current_user.get("user_id"))
|
|
|
|
|
|
|
|
|
|
return {
|
|
|
|
|
"message": "Resync job queued successfully",
|
2025-11-12 15:34:10 +01:00
|
|
|
"job_id": str(sync_log.id),
|
2025-10-06 15:27:01 +02:00
|
|
|
"scope": f"Failed transactions from last {days_back} days",
|
|
|
|
|
"estimated_transactions": 0
|
|
|
|
|
}
|
2025-11-12 15:34:10 +01:00
|
|
|
except HTTPException:
|
|
|
|
|
raise
|
2025-10-06 15:27:01 +02:00
|
|
|
except Exception as e:
|
|
|
|
|
logger.error("Failed to queue resync job", error=str(e), tenant_id=tenant_id)
|
|
|
|
|
raise HTTPException(status_code=500, detail=f"Failed to queue resync job: {str(e)}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@router.post(
|
|
|
|
|
route_builder.build_operations_route("test-connection"),
|
|
|
|
|
response_model=dict
|
|
|
|
|
)
|
|
|
|
|
@admin_role_required
|
|
|
|
|
async def test_pos_connection(
|
|
|
|
|
tenant_id: UUID = Path(...),
|
|
|
|
|
config_id: UUID = Query(...),
|
|
|
|
|
current_user: dict = Depends(get_current_user_dep),
|
|
|
|
|
db=Depends(get_db)
|
|
|
|
|
):
|
|
|
|
|
"""Test connection to POS system (Admin/Owner only)"""
|
|
|
|
|
try:
|
2025-10-23 07:44:54 +02:00
|
|
|
config_service = POSConfigurationService()
|
|
|
|
|
|
|
|
|
|
# Get the configuration to verify it exists
|
|
|
|
|
configurations = await config_service.get_configurations_by_tenant(
|
|
|
|
|
tenant_id=tenant_id,
|
|
|
|
|
skip=0,
|
|
|
|
|
limit=100
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
config = next((c for c in configurations if str(c.id) == str(config_id)), None)
|
|
|
|
|
|
|
|
|
|
if not config:
|
|
|
|
|
raise HTTPException(status_code=404, detail="Configuration not found")
|
|
|
|
|
|
|
|
|
|
# For demo purposes, we assume connection is successful if config exists
|
|
|
|
|
# In production, this would actually test the POS API connection
|
|
|
|
|
is_connected = config.is_connected and config.is_active
|
|
|
|
|
|
2025-10-06 15:27:01 +02:00
|
|
|
return {
|
2025-10-23 07:44:54 +02:00
|
|
|
"success": is_connected,
|
|
|
|
|
"status": "success" if is_connected else "failed",
|
|
|
|
|
"message": f"Connection test {'successful' if is_connected else 'failed'} for {config.pos_system}",
|
2025-10-06 15:27:01 +02:00
|
|
|
"tested_at": datetime.utcnow().isoformat(),
|
2025-10-23 07:44:54 +02:00
|
|
|
"config_id": str(config_id),
|
|
|
|
|
"pos_system": config.pos_system,
|
|
|
|
|
"health_status": config.health_status
|
2025-10-06 15:27:01 +02:00
|
|
|
}
|
2025-10-23 07:44:54 +02:00
|
|
|
except HTTPException:
|
|
|
|
|
raise
|
2025-10-06 15:27:01 +02:00
|
|
|
except Exception as e:
|
|
|
|
|
logger.error("Failed to test POS connection", error=str(e),
|
|
|
|
|
tenant_id=tenant_id, config_id=config_id)
|
|
|
|
|
raise HTTPException(status_code=500, detail=f"Failed to test connection: {str(e)}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
# Webhook Operations
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
|
|
|
|
@router.post(
|
|
|
|
|
route_builder.build_webhook_route("{pos_system}"),
|
|
|
|
|
response_model=dict
|
|
|
|
|
)
|
|
|
|
|
async def receive_webhook(
|
|
|
|
|
request: Request,
|
|
|
|
|
pos_system: str = Path(..., description="POS system name"),
|
|
|
|
|
content_type: Optional[str] = Header(None),
|
|
|
|
|
x_signature: Optional[str] = Header(None),
|
|
|
|
|
x_webhook_signature: Optional[str] = Header(None),
|
2025-11-12 15:34:10 +01:00
|
|
|
authorization: Optional[str] = Header(None),
|
|
|
|
|
db=Depends(get_db)
|
2025-10-06 15:27:01 +02:00
|
|
|
):
|
|
|
|
|
"""
|
|
|
|
|
Receive webhooks from POS systems
|
|
|
|
|
Supports Square, Toast, and Lightspeed webhook formats
|
2025-11-12 15:34:10 +01:00
|
|
|
Includes signature verification, database logging, and duplicate detection
|
2025-10-06 15:27:01 +02:00
|
|
|
"""
|
2025-11-12 15:34:10 +01:00
|
|
|
webhook_service = POSWebhookService(db)
|
|
|
|
|
start_time = datetime.utcnow()
|
|
|
|
|
|
2025-10-06 15:27:01 +02:00
|
|
|
try:
|
|
|
|
|
# Validate POS system
|
|
|
|
|
supported_systems = ["square", "toast", "lightspeed"]
|
|
|
|
|
if pos_system.lower() not in supported_systems:
|
|
|
|
|
raise HTTPException(status_code=400, detail=f"Unsupported POS system: {pos_system}")
|
|
|
|
|
|
|
|
|
|
# Get request details
|
|
|
|
|
method = request.method
|
|
|
|
|
url_path = str(request.url.path)
|
|
|
|
|
query_params = dict(request.query_params)
|
|
|
|
|
headers = dict(request.headers)
|
|
|
|
|
|
|
|
|
|
# Get client IP
|
|
|
|
|
client_ip = None
|
|
|
|
|
if hasattr(request, 'client') and request.client:
|
|
|
|
|
client_ip = request.client.host
|
|
|
|
|
|
|
|
|
|
# Read payload
|
|
|
|
|
try:
|
|
|
|
|
body = await request.body()
|
|
|
|
|
raw_payload = body.decode('utf-8') if body else ""
|
|
|
|
|
payload_size = len(body) if body else 0
|
|
|
|
|
|
|
|
|
|
# Parse JSON if possible
|
|
|
|
|
parsed_payload = None
|
|
|
|
|
if raw_payload:
|
|
|
|
|
try:
|
|
|
|
|
parsed_payload = json.loads(raw_payload)
|
|
|
|
|
except json.JSONDecodeError:
|
|
|
|
|
logger.warning("Failed to parse webhook payload as JSON",
|
|
|
|
|
pos_system=pos_system, payload_size=payload_size)
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error("Failed to read webhook payload", error=str(e))
|
|
|
|
|
raise HTTPException(status_code=400, detail="Failed to read request payload")
|
|
|
|
|
|
|
|
|
|
# Determine signature from various header formats
|
|
|
|
|
signature = x_signature or x_webhook_signature or authorization
|
|
|
|
|
|
2025-11-12 15:34:10 +01:00
|
|
|
# Parse webhook event details
|
|
|
|
|
event_details = webhook_service.parse_webhook_event_details(pos_system, parsed_payload or {})
|
|
|
|
|
webhook_type = event_details.get("webhook_type") or "unknown"
|
|
|
|
|
event_id = event_details.get("event_id")
|
|
|
|
|
transaction_id = event_details.get("transaction_id")
|
|
|
|
|
order_id = event_details.get("order_id")
|
2025-10-06 15:27:01 +02:00
|
|
|
|
2025-11-12 15:34:10 +01:00
|
|
|
# Extract tenant_id from payload
|
|
|
|
|
tenant_id = None
|
2025-10-06 15:27:01 +02:00
|
|
|
if parsed_payload:
|
2025-11-12 15:34:10 +01:00
|
|
|
tenant_id = await webhook_service.extract_tenant_id_from_payload(pos_system, parsed_payload)
|
|
|
|
|
|
|
|
|
|
# Check for duplicate webhook
|
|
|
|
|
is_duplicate = False
|
|
|
|
|
if event_id:
|
|
|
|
|
is_duplicate, _ = await webhook_service.check_duplicate_webhook(
|
|
|
|
|
pos_system, event_id, tenant_id
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
# Verify webhook signature if tenant is identified
|
|
|
|
|
is_signature_valid = None
|
|
|
|
|
if signature and tenant_id:
|
|
|
|
|
webhook_secret = await webhook_service.get_webhook_secret(pos_system, tenant_id)
|
|
|
|
|
if webhook_secret:
|
|
|
|
|
is_signature_valid = await webhook_service.verify_webhook_signature(
|
|
|
|
|
pos_system, raw_payload, signature, webhook_secret
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
if not is_signature_valid:
|
|
|
|
|
logger.warning("Webhook signature verification failed",
|
|
|
|
|
pos_system=pos_system,
|
|
|
|
|
tenant_id=str(tenant_id))
|
|
|
|
|
|
|
|
|
|
# Log webhook receipt to database
|
|
|
|
|
webhook_log = await webhook_service.log_webhook(
|
|
|
|
|
pos_system=pos_system,
|
|
|
|
|
webhook_type=webhook_type,
|
|
|
|
|
method=method,
|
|
|
|
|
url_path=url_path,
|
|
|
|
|
query_params=query_params,
|
|
|
|
|
headers=headers,
|
|
|
|
|
raw_payload=raw_payload,
|
|
|
|
|
payload_size=payload_size,
|
|
|
|
|
content_type=content_type,
|
|
|
|
|
signature=signature,
|
|
|
|
|
is_signature_valid=is_signature_valid,
|
|
|
|
|
source_ip=client_ip,
|
|
|
|
|
event_id=event_id,
|
|
|
|
|
tenant_id=tenant_id,
|
|
|
|
|
transaction_id=transaction_id,
|
|
|
|
|
order_id=order_id
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
# Mark as duplicate if detected
|
|
|
|
|
if is_duplicate:
|
|
|
|
|
await webhook_service.update_webhook_status(
|
|
|
|
|
webhook_log.id,
|
|
|
|
|
status="duplicate",
|
|
|
|
|
error_message="Duplicate event already processed"
|
|
|
|
|
)
|
|
|
|
|
logger.info("Duplicate webhook ignored", event_id=event_id)
|
|
|
|
|
return _get_webhook_response(pos_system, success=True)
|
|
|
|
|
|
2025-12-05 20:07:01 +01:00
|
|
|
# Queue for async processing via RabbitMQ
|
|
|
|
|
try:
|
|
|
|
|
from shared.messaging import get_rabbitmq_client
|
|
|
|
|
import uuid as uuid_module
|
|
|
|
|
|
|
|
|
|
rabbitmq_client = get_rabbitmq_client()
|
|
|
|
|
if rabbitmq_client:
|
|
|
|
|
# Publish POS transaction event for async processing
|
|
|
|
|
event_payload = {
|
|
|
|
|
"event_id": str(uuid_module.uuid4()),
|
|
|
|
|
"event_type": f"pos.{webhook_type}",
|
|
|
|
|
"timestamp": datetime.utcnow().isoformat(),
|
|
|
|
|
"tenant_id": str(tenant_id) if tenant_id else None,
|
|
|
|
|
"data": {
|
|
|
|
|
"webhook_log_id": str(webhook_log.id),
|
|
|
|
|
"pos_system": pos_system,
|
|
|
|
|
"webhook_type": webhook_type,
|
|
|
|
|
"payload": webhook_data,
|
|
|
|
|
"event_id": event_id
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
await rabbitmq_client.publish_event(
|
|
|
|
|
exchange_name="pos.events",
|
|
|
|
|
routing_key=f"pos.{webhook_type}",
|
|
|
|
|
event_data=event_payload
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
logger.info("POS transaction queued for async processing",
|
|
|
|
|
event_id=event_payload["event_id"],
|
|
|
|
|
webhook_log_id=str(webhook_log.id))
|
|
|
|
|
|
|
|
|
|
# Update status to queued
|
|
|
|
|
processing_duration_ms = int((datetime.utcnow() - start_time).total_seconds() * 1000)
|
|
|
|
|
await webhook_service.update_webhook_status(
|
|
|
|
|
webhook_log.id,
|
|
|
|
|
status="queued",
|
|
|
|
|
processing_duration_ms=processing_duration_ms
|
|
|
|
|
)
|
|
|
|
|
else:
|
|
|
|
|
logger.warning("RabbitMQ client not available, marking as received only")
|
|
|
|
|
processing_duration_ms = int((datetime.utcnow() - start_time).total_seconds() * 1000)
|
|
|
|
|
await webhook_service.update_webhook_status(
|
|
|
|
|
webhook_log.id,
|
|
|
|
|
status="received",
|
|
|
|
|
processing_duration_ms=processing_duration_ms
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
except Exception as queue_error:
|
|
|
|
|
logger.error("Failed to queue POS transaction for async processing",
|
|
|
|
|
error=str(queue_error),
|
|
|
|
|
webhook_log_id=str(webhook_log.id))
|
|
|
|
|
# Mark as received even if queuing fails
|
|
|
|
|
processing_duration_ms = int((datetime.utcnow() - start_time).total_seconds() * 1000)
|
|
|
|
|
await webhook_service.update_webhook_status(
|
|
|
|
|
webhook_log.id,
|
|
|
|
|
status="received",
|
|
|
|
|
processing_duration_ms=processing_duration_ms
|
|
|
|
|
)
|
2025-11-12 15:34:10 +01:00
|
|
|
|
|
|
|
|
logger.info("Webhook processed and queued successfully",
|
2025-10-06 15:27:01 +02:00
|
|
|
pos_system=pos_system,
|
|
|
|
|
webhook_type=webhook_type,
|
2025-11-12 15:34:10 +01:00
|
|
|
event_id=event_id,
|
|
|
|
|
tenant_id=str(tenant_id) if tenant_id else None,
|
|
|
|
|
webhook_log_id=str(webhook_log.id))
|
2025-10-06 15:27:01 +02:00
|
|
|
|
|
|
|
|
# Return appropriate response based on POS system requirements
|
2025-11-12 15:34:10 +01:00
|
|
|
return _get_webhook_response(pos_system, success=True)
|
2025-10-06 15:27:01 +02:00
|
|
|
|
|
|
|
|
except HTTPException:
|
|
|
|
|
raise
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error("Webhook processing failed",
|
|
|
|
|
error=str(e),
|
2025-11-12 15:34:10 +01:00
|
|
|
pos_system=pos_system,
|
|
|
|
|
exc_info=True)
|
2025-10-06 15:27:01 +02:00
|
|
|
|
|
|
|
|
# Return 500 to trigger POS system retry
|
|
|
|
|
raise HTTPException(status_code=500, detail="Webhook processing failed")
|
|
|
|
|
|
|
|
|
|
|
2025-11-12 15:34:10 +01:00
|
|
|
def _get_webhook_response(pos_system: str, success: bool = True) -> Dict[str, Any]:
|
|
|
|
|
"""Get POS-specific webhook response format"""
|
|
|
|
|
if pos_system.lower() == "square":
|
|
|
|
|
return {"status": "success" if success else "error"}
|
|
|
|
|
elif pos_system.lower() == "toast":
|
|
|
|
|
return {"success": success}
|
|
|
|
|
elif pos_system.lower() == "lightspeed":
|
|
|
|
|
return {"received": success}
|
|
|
|
|
else:
|
|
|
|
|
return {"status": "received" if success else "error"}
|
|
|
|
|
|
|
|
|
|
|
2025-10-06 15:27:01 +02:00
|
|
|
@router.get(
|
|
|
|
|
route_builder.build_webhook_route("{pos_system}/status"),
|
|
|
|
|
response_model=dict
|
|
|
|
|
)
|
|
|
|
|
async def get_webhook_status(pos_system: str = Path(..., description="POS system name")):
|
|
|
|
|
"""Get webhook endpoint status for a POS system"""
|
|
|
|
|
try:
|
|
|
|
|
supported_systems = ["square", "toast", "lightspeed"]
|
|
|
|
|
if pos_system.lower() not in supported_systems:
|
|
|
|
|
raise HTTPException(status_code=400, detail=f"Unsupported POS system: {pos_system}")
|
|
|
|
|
|
|
|
|
|
return {
|
|
|
|
|
"pos_system": pos_system,
|
|
|
|
|
"status": "active",
|
|
|
|
|
"endpoint": f"/api/v1/webhooks/{pos_system}",
|
|
|
|
|
"supported_events": _get_supported_events(pos_system),
|
|
|
|
|
"last_received": None,
|
|
|
|
|
"total_received": 0
|
|
|
|
|
}
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error("Failed to get webhook status", error=str(e), pos_system=pos_system)
|
|
|
|
|
raise HTTPException(status_code=500, detail=f"Failed to get webhook status: {str(e)}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _get_supported_events(pos_system: str) -> Dict[str, Any]:
|
|
|
|
|
"""Get supported webhook events for each POS system"""
|
|
|
|
|
events = {
|
|
|
|
|
"square": [
|
|
|
|
|
"payment.created",
|
|
|
|
|
"payment.updated",
|
|
|
|
|
"order.created",
|
|
|
|
|
"order.updated",
|
|
|
|
|
"order.fulfilled",
|
|
|
|
|
"inventory.count.updated"
|
|
|
|
|
],
|
|
|
|
|
"toast": [
|
|
|
|
|
"OrderCreated",
|
|
|
|
|
"OrderUpdated",
|
|
|
|
|
"OrderPaid",
|
|
|
|
|
"OrderCanceled",
|
|
|
|
|
"OrderVoided"
|
|
|
|
|
],
|
|
|
|
|
"lightspeed": [
|
|
|
|
|
"order.created",
|
|
|
|
|
"order.updated",
|
|
|
|
|
"order.paid",
|
|
|
|
|
"sale.created",
|
|
|
|
|
"sale.updated"
|
|
|
|
|
]
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return {
|
|
|
|
|
"events": events.get(pos_system.lower(), []),
|
|
|
|
|
"format": "JSON",
|
|
|
|
|
"authentication": "signature_verification"
|
|
|
|
|
}
|
2025-10-31 11:54:19 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
# ============================================================================
|
|
|
|
|
# Tenant Data Deletion Operations (Internal Service Only)
|
|
|
|
|
# ============================================================================
|
|
|
|
|
|
|
|
|
|
@router.delete(
|
|
|
|
|
route_builder.build_base_route("tenant/{tenant_id}", include_tenant_prefix=False),
|
|
|
|
|
response_model=dict
|
|
|
|
|
)
|
|
|
|
|
@service_only_access
|
|
|
|
|
async def delete_tenant_data(
|
|
|
|
|
tenant_id: str = Path(..., description="Tenant ID to delete data for"),
|
|
|
|
|
current_user: dict = Depends(get_current_user_dep),
|
|
|
|
|
db=Depends(get_db)
|
|
|
|
|
):
|
|
|
|
|
"""
|
|
|
|
|
Delete all POS data for a tenant (Internal service only)
|
|
|
|
|
|
|
|
|
|
This endpoint is called by the orchestrator during tenant deletion.
|
|
|
|
|
It permanently deletes all POS-related data including:
|
|
|
|
|
- POS configurations
|
|
|
|
|
- POS transactions and items
|
|
|
|
|
- Webhook logs
|
|
|
|
|
- Sync logs
|
|
|
|
|
- Audit logs
|
|
|
|
|
|
|
|
|
|
**WARNING**: This operation is irreversible!
|
|
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
|
Deletion summary with counts of deleted records
|
|
|
|
|
"""
|
|
|
|
|
try:
|
|
|
|
|
logger.info("pos.tenant_deletion.api_called", tenant_id=tenant_id)
|
|
|
|
|
|
|
|
|
|
deletion_service = POSTenantDeletionService(db)
|
|
|
|
|
result = await deletion_service.safe_delete_tenant_data(tenant_id)
|
|
|
|
|
|
|
|
|
|
if not result.success:
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
status_code=500,
|
|
|
|
|
detail=f"Tenant data deletion failed: {', '.join(result.errors)}"
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
return {
|
|
|
|
|
"message": "Tenant data deletion completed successfully",
|
|
|
|
|
"summary": result.to_dict()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
except HTTPException:
|
|
|
|
|
raise
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error("pos.tenant_deletion.api_error",
|
|
|
|
|
tenant_id=tenant_id,
|
|
|
|
|
error=str(e),
|
|
|
|
|
exc_info=True)
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
status_code=500,
|
|
|
|
|
detail=f"Failed to delete tenant data: {str(e)}"
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@router.get(
|
|
|
|
|
route_builder.build_base_route("tenant/{tenant_id}/deletion-preview", include_tenant_prefix=False),
|
|
|
|
|
response_model=dict
|
|
|
|
|
)
|
|
|
|
|
@service_only_access
|
|
|
|
|
async def preview_tenant_data_deletion(
|
|
|
|
|
tenant_id: str = Path(..., description="Tenant ID to preview deletion for"),
|
|
|
|
|
current_user: dict = Depends(get_current_user_dep),
|
|
|
|
|
db=Depends(get_db)
|
|
|
|
|
):
|
|
|
|
|
"""
|
|
|
|
|
Preview what data would be deleted for a tenant (dry-run)
|
|
|
|
|
|
|
|
|
|
This endpoint shows counts of all data that would be deleted
|
|
|
|
|
without actually deleting anything. Useful for:
|
|
|
|
|
- Confirming deletion scope before execution
|
|
|
|
|
- Auditing and compliance
|
|
|
|
|
- Troubleshooting
|
|
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
|
Dictionary with entity names and their counts
|
|
|
|
|
"""
|
|
|
|
|
try:
|
|
|
|
|
logger.info("pos.tenant_deletion.preview_called", tenant_id=tenant_id)
|
|
|
|
|
|
|
|
|
|
deletion_service = POSTenantDeletionService(db)
|
|
|
|
|
preview = await deletion_service.get_tenant_data_preview(tenant_id)
|
|
|
|
|
|
|
|
|
|
total_records = sum(preview.values())
|
|
|
|
|
|
|
|
|
|
return {
|
|
|
|
|
"tenant_id": tenant_id,
|
|
|
|
|
"service": "pos",
|
|
|
|
|
"preview": preview,
|
|
|
|
|
"total_records": total_records,
|
|
|
|
|
"warning": "These records will be permanently deleted and cannot be recovered"
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error("pos.tenant_deletion.preview_error",
|
|
|
|
|
tenant_id=tenant_id,
|
|
|
|
|
error=str(e),
|
|
|
|
|
exc_info=True)
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
status_code=500,
|
|
|
|
|
detail=f"Failed to preview tenant data deletion: {str(e)}"
|
|
|
|
|
)
|
2025-11-12 15:34:10 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
# ================================================================
|
|
|
|
|
# POS TO SALES SYNC ENDPOINTS
|
|
|
|
|
# ================================================================
|
|
|
|
|
|
|
|
|
|
@router.post(
|
|
|
|
|
"/tenants/{tenant_id}/pos/transactions/{transaction_id}/sync-to-sales",
|
|
|
|
|
summary="Sync single transaction to sales",
|
|
|
|
|
description="Manually sync a specific POS transaction to the sales service"
|
|
|
|
|
)
|
|
|
|
|
async def sync_transaction_to_sales(
|
|
|
|
|
tenant_id: UUID = Path(..., description="Tenant ID"),
|
|
|
|
|
transaction_id: UUID = Path(..., description="Transaction ID to sync"),
|
|
|
|
|
current_user: Dict[str, Any] = Depends(get_current_user_dep)
|
|
|
|
|
):
|
|
|
|
|
"""
|
|
|
|
|
Sync a single POS transaction to the sales service
|
|
|
|
|
|
|
|
|
|
This endpoint:
|
|
|
|
|
- Creates sales records for each item in the transaction
|
|
|
|
|
- Automatically decreases inventory stock
|
|
|
|
|
- Updates sync status flags
|
|
|
|
|
- Returns detailed sync results
|
|
|
|
|
"""
|
|
|
|
|
try:
|
|
|
|
|
from app.services.pos_transaction_service import POSTransactionService
|
|
|
|
|
|
|
|
|
|
transaction_service = POSTransactionService()
|
|
|
|
|
|
|
|
|
|
result = await transaction_service.sync_transaction_to_sales(
|
|
|
|
|
transaction_id=transaction_id,
|
|
|
|
|
tenant_id=tenant_id
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
if result.get("success"):
|
|
|
|
|
logger.info("Transaction synced to sales via API",
|
|
|
|
|
transaction_id=transaction_id,
|
|
|
|
|
tenant_id=tenant_id,
|
|
|
|
|
user_id=current_user.get("user_id"))
|
|
|
|
|
|
|
|
|
|
return {
|
|
|
|
|
"success": True,
|
|
|
|
|
"message": "Transaction synced successfully",
|
|
|
|
|
**result
|
|
|
|
|
}
|
|
|
|
|
else:
|
|
|
|
|
logger.warning("Transaction sync failed via API",
|
|
|
|
|
transaction_id=transaction_id,
|
|
|
|
|
tenant_id=tenant_id,
|
|
|
|
|
error=result.get("error"))
|
|
|
|
|
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
status_code=400,
|
|
|
|
|
detail=result.get("error", "Failed to sync transaction")
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
except HTTPException:
|
|
|
|
|
raise
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error("Failed to sync transaction to sales",
|
|
|
|
|
error=str(e),
|
|
|
|
|
transaction_id=transaction_id,
|
|
|
|
|
tenant_id=tenant_id,
|
|
|
|
|
exc_info=True)
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
status_code=500,
|
|
|
|
|
detail=f"Failed to sync transaction: {str(e)}"
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@router.post(
|
|
|
|
|
"/tenants/{tenant_id}/pos/transactions/sync-all-to-sales",
|
|
|
|
|
summary="Batch sync unsynced transactions",
|
|
|
|
|
description="Sync all unsynced POS transactions to the sales service"
|
|
|
|
|
)
|
|
|
|
|
async def sync_all_transactions_to_sales(
|
|
|
|
|
tenant_id: UUID = Path(..., description="Tenant ID"),
|
|
|
|
|
limit: int = Query(50, ge=1, le=200, description="Max transactions to sync in one batch"),
|
|
|
|
|
current_user: Dict[str, Any] = Depends(get_current_user_dep)
|
|
|
|
|
):
|
|
|
|
|
"""
|
|
|
|
|
Batch sync all unsynced POS transactions to the sales service
|
|
|
|
|
|
|
|
|
|
This endpoint:
|
|
|
|
|
- Finds all unsynced completed transactions
|
|
|
|
|
- Syncs each one to the sales service
|
|
|
|
|
- Creates sales records and decreases inventory
|
|
|
|
|
- Returns summary with success/failure counts
|
|
|
|
|
|
|
|
|
|
Use this to:
|
|
|
|
|
- Manually trigger sync after POS webhooks are received
|
|
|
|
|
- Recover from sync failures
|
|
|
|
|
- Initial migration of historical POS data
|
|
|
|
|
"""
|
|
|
|
|
try:
|
|
|
|
|
from app.services.pos_transaction_service import POSTransactionService
|
|
|
|
|
|
|
|
|
|
transaction_service = POSTransactionService()
|
|
|
|
|
|
|
|
|
|
result = await transaction_service.sync_unsynced_transactions(
|
|
|
|
|
tenant_id=tenant_id,
|
|
|
|
|
limit=limit
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
logger.info("Batch sync completed via API",
|
|
|
|
|
tenant_id=tenant_id,
|
|
|
|
|
total=result.get("total_transactions"),
|
|
|
|
|
synced=result.get("synced"),
|
|
|
|
|
failed=result.get("failed"),
|
|
|
|
|
user_id=current_user.get("user_id"))
|
|
|
|
|
|
|
|
|
|
return {
|
|
|
|
|
"success": True,
|
|
|
|
|
"message": f"Synced {result.get('synced')} of {result.get('total_transactions')} transactions",
|
|
|
|
|
**result
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error("Failed to batch sync transactions to sales",
|
|
|
|
|
error=str(e),
|
|
|
|
|
tenant_id=tenant_id,
|
|
|
|
|
exc_info=True)
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
status_code=500,
|
|
|
|
|
detail=f"Failed to batch sync transactions: {str(e)}"
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@router.get(
|
|
|
|
|
"/tenants/{tenant_id}/pos/transactions/sync-status",
|
|
|
|
|
summary="Get sync status summary",
|
|
|
|
|
description="Get summary of synced vs unsynced transactions"
|
|
|
|
|
)
|
|
|
|
|
async def get_sync_status(
|
|
|
|
|
tenant_id: UUID = Path(..., description="Tenant ID"),
|
|
|
|
|
current_user: Dict[str, Any] = Depends(get_current_user_dep)
|
|
|
|
|
):
|
|
|
|
|
"""
|
|
|
|
|
Get sync status summary for POS transactions
|
|
|
|
|
|
|
|
|
|
Returns counts of:
|
|
|
|
|
- Total completed transactions
|
|
|
|
|
- Synced transactions
|
|
|
|
|
- Unsynced transactions
|
|
|
|
|
- Failed sync attempts
|
|
|
|
|
"""
|
|
|
|
|
try:
|
|
|
|
|
from app.services.pos_transaction_service import POSTransactionService
|
|
|
|
|
|
|
|
|
|
transaction_service = POSTransactionService()
|
|
|
|
|
|
|
|
|
|
# Get counts for different sync states
|
|
|
|
|
total_completed = await transaction_service.count_transactions_by_tenant(
|
|
|
|
|
tenant_id=tenant_id,
|
|
|
|
|
status="completed"
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
synced = await transaction_service.count_transactions_by_tenant(
|
|
|
|
|
tenant_id=tenant_id,
|
|
|
|
|
status="completed",
|
|
|
|
|
is_synced=True
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
unsynced = await transaction_service.count_transactions_by_tenant(
|
|
|
|
|
tenant_id=tenant_id,
|
|
|
|
|
status="completed",
|
|
|
|
|
is_synced=False
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
return {
|
|
|
|
|
"total_completed_transactions": total_completed,
|
|
|
|
|
"synced_to_sales": synced,
|
|
|
|
|
"pending_sync": unsynced,
|
|
|
|
|
"sync_rate": round((synced / total_completed * 100) if total_completed > 0 else 0, 2)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error("Failed to get sync status",
|
|
|
|
|
error=str(e),
|
|
|
|
|
tenant_id=tenant_id,
|
|
|
|
|
exc_info=True)
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
status_code=500,
|
|
|
|
|
detail=f"Failed to get sync status: {str(e)}"
|
|
|
|
|
)
|