Add POI feature and imporve the overall backend implementation

This commit is contained in:
Urtzi Alfaro
2025-11-12 15:34:10 +01:00
parent e8096cd979
commit 5783c7ed05
173 changed files with 16862 additions and 9078 deletions

View File

@@ -0,0 +1,234 @@
"""
POS Sync Service - Business Logic Layer
Handles sync job creation, tracking, and metrics
"""
from typing import Optional, List, Dict, Any
from uuid import UUID, uuid4
from datetime import datetime, timedelta
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select, func, and_, desc
import structlog
from app.models.pos_sync import POSSyncLog
from app.core.database import get_db_transaction
logger = structlog.get_logger()
class POSSyncService:
"""Service layer for POS sync operations"""
def __init__(self, db: Optional[AsyncSession] = None):
self.db = db
async def create_sync_job(
self,
tenant_id: UUID,
pos_config_id: UUID,
pos_system: str,
sync_type: str = "manual",
data_types: List[str] = None
) -> POSSyncLog:
"""
Create a new sync job
Args:
tenant_id: Tenant UUID
pos_config_id: POS configuration UUID
pos_system: POS system name
sync_type: Type of sync (manual, scheduled, incremental, full)
data_types: List of data types to sync
Returns:
Created sync log
"""
try:
async with get_db_transaction() as db:
sync_log = POSSyncLog(
tenant_id=tenant_id,
pos_config_id=pos_config_id,
pos_system=pos_system,
sync_type=sync_type,
sync_direction="inbound",
data_type=",".join(data_types) if data_types else "transactions",
status="started",
started_at=datetime.utcnow(),
triggered_by="user"
)
db.add(sync_log)
await db.commit()
await db.refresh(sync_log)
logger.info("Sync job created",
sync_id=str(sync_log.id),
tenant_id=str(tenant_id),
pos_system=pos_system,
sync_type=sync_type)
return sync_log
except Exception as e:
logger.error("Failed to create sync job", error=str(e))
raise
async def get_sync_by_id(self, sync_id: UUID) -> Optional[POSSyncLog]:
"""Get sync log by ID"""
try:
async with get_db_transaction() as db:
return await db.get(POSSyncLog, sync_id)
except Exception as e:
logger.error("Failed to get sync log", error=str(e), sync_id=str(sync_id))
raise
async def update_sync_status(
self,
sync_id: UUID,
status: str,
error_message: Optional[str] = None,
stats: Optional[Dict[str, int]] = None
) -> None:
"""Update sync job status"""
try:
async with get_db_transaction() as db:
sync_log = await db.get(POSSyncLog, sync_id)
if sync_log:
sync_log.status = status
sync_log.completed_at = datetime.utcnow()
if sync_log.started_at:
duration = (datetime.utcnow() - sync_log.started_at).total_seconds()
sync_log.duration_seconds = duration
if error_message:
sync_log.error_message = error_message
if stats:
sync_log.records_processed = stats.get("processed", 0)
sync_log.records_created = stats.get("created", 0)
sync_log.records_updated = stats.get("updated", 0)
sync_log.records_failed = stats.get("failed", 0)
await db.commit()
logger.info("Sync status updated",
sync_id=str(sync_id),
status=status)
except Exception as e:
logger.error("Failed to update sync status", error=str(e))
raise
async def get_sync_logs(
self,
tenant_id: UUID,
config_id: Optional[UUID] = None,
status: Optional[str] = None,
sync_type: Optional[str] = None,
limit: int = 50,
offset: int = 0
) -> Dict[str, Any]:
"""
Get sync logs with filtering
Returns:
Dict with logs and pagination info
"""
try:
async with get_db_transaction() as db:
query = select(POSSyncLog).where(POSSyncLog.tenant_id == tenant_id)
# Apply filters
if config_id:
query = query.where(POSSyncLog.pos_config_id == config_id)
if status:
query = query.where(POSSyncLog.status == status)
if sync_type:
query = query.where(POSSyncLog.sync_type == sync_type)
# Get total count
count_query = select(func.count()).select_from(query.subquery())
result = await db.execute(count_query)
total = result.scalar() or 0
# Get paginated results
query = query.order_by(desc(POSSyncLog.started_at)).offset(offset).limit(limit)
result = await db.execute(query)
logs = result.scalars().all()
return {
"logs": [self._sync_log_to_dict(log) for log in logs],
"total": total,
"has_more": offset + len(logs) < total
}
except Exception as e:
logger.error("Failed to get sync logs", error=str(e))
raise
async def calculate_average_duration(
self,
tenant_id: UUID,
pos_config_id: Optional[UUID] = None,
days: int = 30
) -> float:
"""
Calculate average sync duration for recent successful syncs
Args:
tenant_id: Tenant UUID
pos_config_id: Optional POS config filter
days: Number of days to look back
Returns:
Average duration in minutes
"""
try:
async with get_db_transaction() as db:
cutoff_date = datetime.utcnow() - timedelta(days=days)
query = select(func.avg(POSSyncLog.duration_seconds)).where(
and_(
POSSyncLog.tenant_id == tenant_id,
POSSyncLog.status == "completed",
POSSyncLog.started_at >= cutoff_date,
POSSyncLog.duration_seconds.isnot(None)
)
)
if pos_config_id:
query = query.where(POSSyncLog.pos_config_id == pos_config_id)
result = await db.execute(query)
avg_seconds = result.scalar()
if avg_seconds:
return round(float(avg_seconds) / 60, 2) # Convert to minutes
else:
return 0.0
except Exception as e:
logger.error("Failed to calculate average duration", error=str(e))
return 0.0
def _sync_log_to_dict(self, sync_log: POSSyncLog) -> Dict[str, Any]:
"""Convert sync log to dictionary"""
return {
"id": str(sync_log.id),
"tenant_id": str(sync_log.tenant_id),
"pos_config_id": str(sync_log.pos_config_id),
"pos_system": sync_log.pos_system,
"sync_type": sync_log.sync_type,
"data_type": sync_log.data_type,
"status": sync_log.status,
"started_at": sync_log.started_at.isoformat() if sync_log.started_at else None,
"completed_at": sync_log.completed_at.isoformat() if sync_log.completed_at else None,
"duration_seconds": float(sync_log.duration_seconds) if sync_log.duration_seconds else None,
"records_processed": sync_log.records_processed,
"records_created": sync_log.records_created,
"records_updated": sync_log.records_updated,
"records_failed": sync_log.records_failed,
"error_message": sync_log.error_message
}

View File

@@ -237,3 +237,246 @@ class POSTransactionService:
except Exception as e:
logger.error("Failed to calculate transaction analytics", error=str(e), tenant_id=tenant_id)
raise
async def sync_transaction_to_sales(
self,
transaction_id: UUID,
tenant_id: UUID
) -> Dict[str, Any]:
"""
Sync a single POS transaction to the sales service
Args:
transaction_id: Transaction UUID
tenant_id: Tenant UUID
Returns:
Dict with sync status and details
"""
try:
from shared.clients.sales_client import SalesServiceClient
from app.core.config import settings
async with get_db_transaction() as db:
transaction_repo = POSTransactionRepository(db)
items_repo = POSTransactionItemRepository(db)
# Get transaction
transaction = await transaction_repo.get_by_id(transaction_id)
if not transaction or transaction.tenant_id != tenant_id:
return {
"success": False,
"error": "Transaction not found or unauthorized"
}
# Check if already synced
if transaction.is_synced_to_sales:
logger.info("Transaction already synced to sales",
transaction_id=transaction_id,
sales_record_id=transaction.sales_record_id)
return {
"success": True,
"already_synced": True,
"sales_record_id": str(transaction.sales_record_id)
}
# Get transaction items
items = await items_repo.get_by_transaction_id(transaction_id)
# Initialize sales client
sales_client = SalesServiceClient(settings, calling_service_name="pos")
# Create sales records for each item
sales_record_ids = []
failed_items = []
for item in items:
try:
sales_data = {
"inventory_product_id": str(item.product_id) if item.product_id else None,
"product_name": item.product_name,
"product_category": "finished_product",
"quantity_sold": float(item.quantity),
"unit_price": float(item.unit_price),
"total_amount": float(item.subtotal),
"sale_date": transaction.transaction_date.strftime("%Y-%m-%d"),
"sales_channel": "pos",
"source": f"pos_sync_{transaction.pos_system}",
"payment_method": transaction.payment_method or "unknown",
"notes": f"POS Transaction: {transaction.external_transaction_id or transaction_id}"
}
result = await sales_client.create_sales_record(
tenant_id=str(tenant_id),
sales_data=sales_data
)
if result and result.get("id"):
sales_record_ids.append(result["id"])
logger.info("Synced item to sales",
transaction_id=transaction_id,
item_id=item.id,
sales_record_id=result["id"])
else:
failed_items.append({
"item_id": str(item.id),
"product_name": item.product_name,
"error": "No sales record ID returned"
})
except Exception as item_error:
logger.error("Failed to sync item to sales",
error=str(item_error),
transaction_id=transaction_id,
item_id=item.id)
failed_items.append({
"item_id": str(item.id),
"product_name": item.product_name,
"error": str(item_error)
})
# Update transaction sync status
if sales_record_ids and len(failed_items) == 0:
# Full success
transaction.is_synced_to_sales = True
transaction.sales_record_id = UUID(sales_record_ids[0]) # Store first record ID
transaction.sync_completed_at = datetime.utcnow()
await db.commit()
logger.info("Transaction fully synced to sales",
transaction_id=transaction_id,
items_synced=len(sales_record_ids))
return {
"success": True,
"items_synced": len(sales_record_ids),
"sales_record_ids": sales_record_ids,
"failed_items": []
}
elif sales_record_ids and len(failed_items) > 0:
# Partial success
transaction.sync_attempted_at = datetime.utcnow()
transaction.sync_error = f"Partial sync: {len(failed_items)} items failed"
transaction.sync_retry_count = (transaction.sync_retry_count or 0) + 1
await db.commit()
logger.warning("Transaction partially synced to sales",
transaction_id=transaction_id,
items_synced=len(sales_record_ids),
items_failed=len(failed_items))
return {
"success": False,
"partial_success": True,
"items_synced": len(sales_record_ids),
"sales_record_ids": sales_record_ids,
"failed_items": failed_items
}
else:
# Complete failure
transaction.sync_attempted_at = datetime.utcnow()
transaction.sync_error = "All items failed to sync"
transaction.sync_retry_count = (transaction.sync_retry_count or 0) + 1
await db.commit()
logger.error("Transaction sync failed completely",
transaction_id=transaction_id,
items_failed=len(failed_items))
return {
"success": False,
"items_synced": 0,
"failed_items": failed_items
}
except Exception as e:
logger.error("Failed to sync transaction to sales",
error=str(e),
transaction_id=transaction_id,
tenant_id=tenant_id)
return {
"success": False,
"error": str(e)
}
async def sync_unsynced_transactions(
self,
tenant_id: UUID,
limit: int = 50
) -> Dict[str, Any]:
"""
Sync all unsynced transactions to the sales service
Args:
tenant_id: Tenant UUID
limit: Maximum number of transactions to sync in one batch
Returns:
Dict with sync summary
"""
try:
async with get_db_transaction() as db:
repository = POSTransactionRepository(db)
# Get unsynced transactions
unsynced_transactions = await repository.get_transactions_by_tenant(
tenant_id=tenant_id,
is_synced=False,
status="completed", # Only sync completed transactions
limit=limit
)
if not unsynced_transactions:
logger.info("No unsynced transactions found", tenant_id=tenant_id)
return {
"success": True,
"total_transactions": 0,
"synced": 0,
"failed": 0
}
synced_count = 0
failed_count = 0
results = []
for transaction in unsynced_transactions:
result = await self.sync_transaction_to_sales(
transaction.id,
tenant_id
)
if result.get("success"):
synced_count += 1
else:
failed_count += 1
results.append({
"transaction_id": str(transaction.id),
"external_id": transaction.external_transaction_id,
"result": result
})
logger.info("Batch sync completed",
tenant_id=tenant_id,
total=len(unsynced_transactions),
synced=synced_count,
failed=failed_count)
return {
"success": True,
"total_transactions": len(unsynced_transactions),
"synced": synced_count,
"failed": failed_count,
"results": results
}
except Exception as e:
logger.error("Failed to batch sync transactions",
error=str(e),
tenant_id=tenant_id)
return {
"success": False,
"error": str(e)
}

View File

@@ -0,0 +1,409 @@
"""
POS Webhook Service - Business Logic Layer
Handles webhook processing, signature verification, and logging
"""
from typing import Optional, Dict, Any, Tuple
from uuid import UUID
import structlog
import hashlib
import hmac
import base64
import json
from datetime import datetime
from sqlalchemy.ext.asyncio import AsyncSession
from app.models.pos_webhook import POSWebhookLog
from app.repositories.pos_config_repository import POSConfigurationRepository
from app.core.database import get_db_transaction
logger = structlog.get_logger()
class POSWebhookService:
"""Service layer for POS webhook operations"""
def __init__(self, db: Optional[AsyncSession] = None):
self.db = db
async def verify_webhook_signature(
self,
pos_system: str,
payload: str,
signature: str,
webhook_secret: str
) -> bool:
"""
Verify webhook signature based on POS system
Args:
pos_system: POS system name (square, toast, lightspeed)
payload: Raw webhook payload
signature: Signature from webhook headers
webhook_secret: Secret key from POS configuration
Returns:
True if signature is valid, False otherwise
"""
try:
if pos_system.lower() == "square":
return self._verify_square_signature(payload, signature, webhook_secret)
elif pos_system.lower() == "toast":
return self._verify_toast_signature(payload, signature, webhook_secret)
elif pos_system.lower() == "lightspeed":
return self._verify_lightspeed_signature(payload, signature, webhook_secret)
else:
logger.warning("Unknown POS system for signature verification", pos_system=pos_system)
return False
except Exception as e:
logger.error("Signature verification failed", error=str(e), pos_system=pos_system)
return False
def _verify_square_signature(self, payload: str, signature: str, secret: str) -> bool:
"""Verify Square webhook signature using HMAC-SHA256"""
try:
# Square combines URL + body for signature
# Format: <notification_url> + <request_body>
# For simplicity, we'll just verify the body
expected_signature = hmac.new(
secret.encode('utf-8'),
payload.encode('utf-8'),
hashlib.sha256
).digest()
# Square sends base64-encoded signature
expected_b64 = base64.b64encode(expected_signature).decode('utf-8')
return hmac.compare_digest(signature, expected_b64)
except Exception as e:
logger.error("Square signature verification error", error=str(e))
return False
def _verify_toast_signature(self, payload: str, signature: str, secret: str) -> bool:
"""Verify Toast webhook signature using HMAC-SHA256"""
try:
expected_signature = hmac.new(
secret.encode('utf-8'),
payload.encode('utf-8'),
hashlib.sha256
).hexdigest()
return hmac.compare_digest(signature, expected_signature)
except Exception as e:
logger.error("Toast signature verification error", error=str(e))
return False
def _verify_lightspeed_signature(self, payload: str, signature: str, secret: str) -> bool:
"""Verify Lightspeed webhook signature using HMAC-SHA256"""
try:
expected_signature = hmac.new(
secret.encode('utf-8'),
payload.encode('utf-8'),
hashlib.sha256
).hexdigest()
return hmac.compare_digest(signature.lower(), expected_signature.lower())
except Exception as e:
logger.error("Lightspeed signature verification error", error=str(e))
return False
async def extract_tenant_id_from_payload(
self,
pos_system: str,
parsed_payload: Dict[str, Any]
) -> Optional[UUID]:
"""
Extract tenant_id from webhook payload by matching POS system identifiers
Args:
pos_system: POS system name
parsed_payload: Parsed JSON payload
Returns:
tenant_id if found, None otherwise
"""
try:
# Extract POS-specific identifiers
pos_identifier = None
if pos_system.lower() == "square":
# Square uses merchant_id or location_id
pos_identifier = (
parsed_payload.get("merchant_id") or
parsed_payload.get("data", {}).get("object", {}).get("merchant_id") or
parsed_payload.get("location_id")
)
elif pos_system.lower() == "toast":
# Toast uses restaurantGuid
pos_identifier = (
parsed_payload.get("restaurantGuid") or
parsed_payload.get("restaurant", {}).get("guid")
)
elif pos_system.lower() == "lightspeed":
# Lightspeed uses accountID
pos_identifier = (
parsed_payload.get("accountID") or
parsed_payload.get("account", {}).get("id")
)
if not pos_identifier:
logger.warning("Could not extract POS identifier from payload", pos_system=pos_system)
return None
# Query database to find tenant_id by POS identifier
async with get_db_transaction() as db:
repository = POSConfigurationRepository(db)
config = await repository.get_by_pos_identifier(pos_system, pos_identifier)
if config:
return config.tenant_id
else:
logger.warning("No tenant found for POS identifier",
pos_system=pos_system,
identifier=pos_identifier)
return None
except Exception as e:
logger.error("Failed to extract tenant_id", error=str(e), pos_system=pos_system)
return None
async def log_webhook(
self,
pos_system: str,
webhook_type: str,
method: str,
url_path: str,
query_params: Dict[str, Any],
headers: Dict[str, str],
raw_payload: str,
payload_size: int,
content_type: Optional[str],
signature: Optional[str],
is_signature_valid: Optional[bool],
source_ip: Optional[str],
event_id: Optional[str] = None,
tenant_id: Optional[UUID] = None,
transaction_id: Optional[str] = None,
order_id: Optional[str] = None
) -> POSWebhookLog:
"""
Create a webhook log entry in the database
Returns:
Created POSWebhookLog instance
"""
try:
async with get_db_transaction() as db:
webhook_log = POSWebhookLog(
tenant_id=tenant_id,
pos_system=pos_system,
webhook_type=webhook_type,
method=method,
url_path=url_path,
query_params=query_params,
headers=headers,
raw_payload=raw_payload,
payload_size=payload_size,
content_type=content_type,
signature=signature,
is_signature_valid=is_signature_valid,
source_ip=source_ip,
status="received",
event_id=event_id,
transaction_id=transaction_id,
order_id=order_id,
received_at=datetime.utcnow(),
user_agent=headers.get("user-agent"),
forwarded_for=headers.get("x-forwarded-for"),
request_id=headers.get("x-request-id")
)
db.add(webhook_log)
await db.commit()
await db.refresh(webhook_log)
logger.info("Webhook logged to database",
webhook_log_id=str(webhook_log.id),
pos_system=pos_system,
webhook_type=webhook_type,
tenant_id=str(tenant_id) if tenant_id else None)
return webhook_log
except Exception as e:
logger.error("Failed to log webhook", error=str(e), pos_system=pos_system)
raise
async def get_webhook_secret(
self,
pos_system: str,
tenant_id: Optional[UUID] = None
) -> Optional[str]:
"""
Get webhook secret for signature verification
Args:
pos_system: POS system name
tenant_id: Optional tenant_id if known
Returns:
Webhook secret if found
"""
try:
async with get_db_transaction() as db:
repository = POSConfigurationRepository(db)
if tenant_id:
# Get active config for tenant and POS system
configs = await repository.get_configurations_by_tenant(
tenant_id=tenant_id,
pos_system=pos_system,
is_active=True,
skip=0,
limit=1
)
if configs:
return configs[0].webhook_secret
return None
except Exception as e:
logger.error("Failed to get webhook secret", error=str(e))
return None
async def update_webhook_status(
self,
webhook_log_id: UUID,
status: str,
error_message: Optional[str] = None,
processing_duration_ms: Optional[int] = None
) -> None:
"""Update webhook processing status"""
try:
async with get_db_transaction() as db:
webhook_log = await db.get(POSWebhookLog, webhook_log_id)
if webhook_log:
webhook_log.status = status
webhook_log.processing_completed_at = datetime.utcnow()
if error_message:
webhook_log.error_message = error_message
webhook_log.retry_count += 1
if processing_duration_ms:
webhook_log.processing_duration_ms = processing_duration_ms
await db.commit()
logger.info("Webhook status updated",
webhook_log_id=str(webhook_log_id),
status=status)
except Exception as e:
logger.error("Failed to update webhook status", error=str(e))
raise
async def check_duplicate_webhook(
self,
pos_system: str,
event_id: str,
tenant_id: Optional[UUID] = None
) -> Tuple[bool, Optional[UUID]]:
"""
Check if webhook has already been processed
Returns:
Tuple of (is_duplicate, original_webhook_id)
"""
try:
async with get_db_transaction() as db:
from sqlalchemy import select
query = select(POSWebhookLog).where(
POSWebhookLog.pos_system == pos_system,
POSWebhookLog.event_id == event_id,
POSWebhookLog.status == "processed"
)
if tenant_id:
query = query.where(POSWebhookLog.tenant_id == tenant_id)
result = await db.execute(query)
existing = result.scalar_one_or_none()
if existing:
logger.info("Duplicate webhook detected",
pos_system=pos_system,
event_id=event_id,
original_id=str(existing.id))
return True, existing.id
return False, None
except Exception as e:
logger.error("Failed to check duplicate webhook", error=str(e))
return False, None
def parse_webhook_event_details(
self,
pos_system: str,
parsed_payload: Dict[str, Any]
) -> Dict[str, Any]:
"""
Extract standardized event details from POS-specific payload
Returns:
Dict with event_id, webhook_type, transaction_id, order_id, etc.
"""
details = {
"event_id": None,
"webhook_type": None,
"transaction_id": None,
"order_id": None,
"customer_id": None,
"event_timestamp": None
}
try:
if pos_system.lower() == "square":
details["event_id"] = parsed_payload.get("event_id")
details["webhook_type"] = parsed_payload.get("type")
data = parsed_payload.get("data", {}).get("object", {})
details["transaction_id"] = data.get("id")
details["order_id"] = data.get("order_id")
details["customer_id"] = data.get("customer_id")
created_at = parsed_payload.get("created_at")
if created_at:
details["event_timestamp"] = datetime.fromisoformat(created_at.replace('Z', '+00:00'))
elif pos_system.lower() == "toast":
details["event_id"] = parsed_payload.get("guid")
details["webhook_type"] = parsed_payload.get("eventType")
details["order_id"] = parsed_payload.get("entityId")
created_at = parsed_payload.get("eventTime")
if created_at:
try:
details["event_timestamp"] = datetime.fromtimestamp(created_at / 1000)
except:
pass
elif pos_system.lower() == "lightspeed":
details["event_id"] = parsed_payload.get("id")
details["webhook_type"] = parsed_payload.get("action")
details["transaction_id"] = parsed_payload.get("objectID")
created_at = parsed_payload.get("createdAt")
if created_at:
details["event_timestamp"] = datetime.fromisoformat(created_at.replace('Z', '+00:00'))
return details
except Exception as e:
logger.error("Failed to parse webhook event details", error=str(e))
return details