Initial commit - production deployment
This commit is contained in:
58
services/pos/Dockerfile
Normal file
58
services/pos/Dockerfile
Normal file
@@ -0,0 +1,58 @@
|
||||
# =============================================================================
|
||||
# POS Service Dockerfile - Environment-Configurable Base Images
|
||||
# =============================================================================
|
||||
# Build arguments for registry configuration:
|
||||
# - BASE_REGISTRY: Registry URL (default: docker.io for Docker Hub)
|
||||
# - PYTHON_IMAGE: Python image name and tag (default: python:3.11-slim)
|
||||
# =============================================================================
|
||||
|
||||
ARG BASE_REGISTRY=docker.io
|
||||
ARG PYTHON_IMAGE=python:3.11-slim
|
||||
|
||||
FROM ${BASE_REGISTRY}/${PYTHON_IMAGE} AS shared
|
||||
WORKDIR /shared
|
||||
COPY shared/ /shared/
|
||||
|
||||
ARG BASE_REGISTRY=docker.io
|
||||
ARG PYTHON_IMAGE=python:3.11-slim
|
||||
FROM ${BASE_REGISTRY}/${PYTHON_IMAGE}
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install system dependencies
|
||||
RUN apt-get update && apt-get install -y \
|
||||
gcc \
|
||||
curl \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy requirements
|
||||
COPY shared/requirements-tracing.txt /tmp/
|
||||
|
||||
COPY services/pos/requirements.txt .
|
||||
|
||||
# Install Python dependencies
|
||||
RUN pip install --no-cache-dir -r /tmp/requirements-tracing.txt
|
||||
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy shared libraries from the shared stage
|
||||
COPY --from=shared /shared /app/shared
|
||||
|
||||
# Copy application code
|
||||
COPY services/pos/ .
|
||||
|
||||
|
||||
|
||||
# Add shared libraries to Python path
|
||||
ENV PYTHONPATH="/app:/app/shared:${PYTHONPATH:-}"
|
||||
|
||||
|
||||
# Expose port
|
||||
EXPOSE 8000
|
||||
|
||||
# Health check
|
||||
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
|
||||
CMD curl -f http://localhost:8000/health || exit 1
|
||||
|
||||
# Run application
|
||||
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]
|
||||
899
services/pos/README.md
Normal file
899
services/pos/README.md
Normal file
@@ -0,0 +1,899 @@
|
||||
# POS Service
|
||||
|
||||
## Overview
|
||||
|
||||
The **POS (Point of Sale) Service** integrates with popular POS systems like Square, Toast, and Lightspeed to automatically sync sales transactions into Bakery-IA. It eliminates manual sales data entry, ensures real-time sales tracking, and provides the foundation for accurate demand forecasting. This service bridges the gap between retail operations and business intelligence, making the platform immediately valuable for bakeries already using modern POS systems.
|
||||
|
||||
## Key Features
|
||||
|
||||
### Multi-POS Integration
|
||||
- **Square Integration** - Full API integration with Square POS
|
||||
- **Toast Integration** - Restaurant POS system integration
|
||||
- **Lightspeed Integration** - Retail POS system integration
|
||||
- **Webhook Support** - Real-time transaction sync via webhooks
|
||||
- **OAuth Authentication** - Secure POS account linking
|
||||
- **Multi-Location Support** - Handle multiple store locations
|
||||
- **Automatic Reconnection** - Handle API token expiration gracefully
|
||||
|
||||
### Sales Data Synchronization
|
||||
- **Real-Time Sync** - Transactions sync within seconds
|
||||
- **Historical Import** - Import past sales data on initial setup
|
||||
- **Product Mapping** - Map POS products to Bakery-IA products
|
||||
- **Transaction Deduplication** - Prevent duplicate entries
|
||||
- **Data Validation** - Ensure data quality and accuracy
|
||||
- **Sync Status Tracking** - Monitor sync health and errors
|
||||
- **Manual Sync Trigger** - Force sync on demand
|
||||
|
||||
### Transaction Processing
|
||||
- **Line Item Details** - Product, quantity, price per transaction
|
||||
- **Payment Methods** - Cash, card, contactless tracking
|
||||
- **Customer Data** - Customer name, email if available
|
||||
- **Discounts & Taxes** - Full transaction details preserved
|
||||
- **Refunds & Voids** - Handle transaction cancellations
|
||||
- **Tips & Gratuities** - Track additional revenue
|
||||
- **Transaction Metadata** - Store name, cashier, timestamp
|
||||
|
||||
### Product Catalog Sync
|
||||
- **Product Import** - Sync product catalog from POS
|
||||
- **Category Mapping** - Map POS categories to Bakery-IA
|
||||
- **Price Sync** - Keep prices updated
|
||||
- **Product Updates** - Detect new products automatically
|
||||
- **SKU Matching** - Match by SKU, name, or manual mapping
|
||||
- **Inventory Integration** - Link POS products to inventory items
|
||||
|
||||
### Analytics & Monitoring
|
||||
- **Sync Dashboard** - Monitor sync status across POS systems
|
||||
- **Error Tracking** - Log and alert on sync failures
|
||||
- **Data Quality Metrics** - Track unmapped products, errors
|
||||
- **Sync Performance** - Monitor sync speed and latency
|
||||
- **Transaction Volume** - Daily/hourly transaction counts
|
||||
- **API Health Monitoring** - Track POS API availability
|
||||
|
||||
### Configuration Management
|
||||
- **POS Account Linking** - Connect POS accounts via OAuth
|
||||
- **Mapping Configuration** - Product and category mappings
|
||||
- **Sync Schedule** - Configure sync frequency
|
||||
- **Webhook Management** - Register/update webhook endpoints
|
||||
- **API Credentials** - Secure storage of API keys
|
||||
- **Multi-Tenant Isolation** - Separate POS accounts per tenant
|
||||
|
||||
## Business Value
|
||||
|
||||
### For Bakery Owners
|
||||
- **Zero Manual Entry** - Sales automatically sync to Bakery-IA
|
||||
- **Real-Time Visibility** - Know sales performance instantly
|
||||
- **Accurate Forecasting** - ML models use actual sales data
|
||||
- **Time Savings** - Eliminate daily sales data entry
|
||||
- **Data Accuracy** - 99.9%+ vs. manual entry errors
|
||||
- **Immediate ROI** - Value from day one of POS connection
|
||||
|
||||
### Quantifiable Impact
|
||||
- **Time Savings**: 5-8 hours/week eliminating manual entry
|
||||
- **Data Accuracy**: 99.9%+ vs. 85-95% manual entry
|
||||
- **Forecast Improvement**: 10-20% better accuracy with real data
|
||||
- **Revenue Tracking**: Real-time vs. end-of-day manual reconciliation
|
||||
- **Setup Time**: 15 minutes to connect vs. hours of manual entry
|
||||
- **Error Elimination**: Zero transcription errors
|
||||
|
||||
### For Sales Staff
|
||||
- **No Extra Work** - POS integration is invisible to staff
|
||||
- **Focus on Customers** - No post-sale data entry
|
||||
- **Instant Reporting** - Managers see sales in real-time
|
||||
|
||||
### For Managers
|
||||
- **Real-Time Dashboards** - Sales performance updates live
|
||||
- **Product Performance** - Know what's selling instantly
|
||||
- **Multi-Store Visibility** - All locations in one view
|
||||
- **Trend Detection** - Spot patterns as they emerge
|
||||
|
||||
## Technology Stack
|
||||
|
||||
- **Framework**: FastAPI (Python 3.11+) - Async web framework
|
||||
- **Database**: PostgreSQL 17 - Transaction and mapping data
|
||||
- **Caching**: Redis 7.4 - Transaction deduplication cache
|
||||
- **Messaging**: RabbitMQ 4.1 - Transaction event publishing
|
||||
- **HTTP Client**: HTTPx - Async API calls to POS systems
|
||||
- **OAuth**: Authlib - OAuth 2.0 flows for POS authentication
|
||||
- **Webhooks**: FastAPI webhook receivers
|
||||
- **Logging**: Structlog - Structured JSON logging
|
||||
- **Metrics**: Prometheus Client - Sync metrics
|
||||
|
||||
## API Endpoints (Key Routes)
|
||||
|
||||
### POS Account Management
|
||||
- `GET /api/v1/pos/accounts` - List connected POS accounts
|
||||
- `POST /api/v1/pos/accounts` - Connect new POS account
|
||||
- `GET /api/v1/pos/accounts/{account_id}` - Get account details
|
||||
- `PUT /api/v1/pos/accounts/{account_id}` - Update account
|
||||
- `DELETE /api/v1/pos/accounts/{account_id}` - Disconnect account
|
||||
- `POST /api/v1/pos/accounts/{account_id}/reconnect` - Refresh OAuth tokens
|
||||
|
||||
### OAuth & Authentication
|
||||
- `GET /api/v1/pos/oauth/square/authorize` - Start Square OAuth flow
|
||||
- `GET /api/v1/pos/oauth/square/callback` - Square OAuth callback
|
||||
- `GET /api/v1/pos/oauth/toast/authorize` - Start Toast OAuth flow
|
||||
- `GET /api/v1/pos/oauth/toast/callback` - Toast OAuth callback
|
||||
- `GET /api/v1/pos/oauth/lightspeed/authorize` - Start Lightspeed OAuth
|
||||
- `GET /api/v1/pos/oauth/lightspeed/callback` - Lightspeed callback
|
||||
|
||||
### Synchronization
|
||||
- `POST /api/v1/pos/sync/{account_id}` - Trigger manual sync
|
||||
- `POST /api/v1/pos/sync/{account_id}/historical` - Import historical data
|
||||
- `GET /api/v1/pos/sync/{account_id}/status` - Get sync status
|
||||
- `GET /api/v1/pos/sync/{account_id}/history` - Sync history log
|
||||
|
||||
### Product Mapping
|
||||
- `GET /api/v1/pos/mappings` - List product mappings
|
||||
- `POST /api/v1/pos/mappings` - Create product mapping
|
||||
- `PUT /api/v1/pos/mappings/{mapping_id}` - Update mapping
|
||||
- `DELETE /api/v1/pos/mappings/{mapping_id}` - Delete mapping
|
||||
- `GET /api/v1/pos/mappings/unmapped` - List unmapped POS products
|
||||
- `POST /api/v1/pos/mappings/auto-map` - Auto-map by name/SKU
|
||||
|
||||
### Webhooks
|
||||
- `POST /api/v1/pos/webhooks/square` - Square webhook receiver
|
||||
- `POST /api/v1/pos/webhooks/toast` - Toast webhook receiver
|
||||
- `POST /api/v1/pos/webhooks/lightspeed` - Lightspeed webhook receiver
|
||||
- `POST /api/v1/pos/accounts/{account_id}/webhooks/register` - Register webhooks
|
||||
|
||||
### Analytics
|
||||
- `GET /api/v1/pos/analytics/dashboard` - POS sync dashboard
|
||||
- `GET /api/v1/pos/analytics/sync-health` - Sync health metrics
|
||||
- `GET /api/v1/pos/analytics/unmapped-revenue` - Revenue from unmapped products
|
||||
|
||||
## Database Schema
|
||||
|
||||
### Main Tables
|
||||
|
||||
**pos_accounts**
|
||||
```sql
|
||||
CREATE TABLE pos_accounts (
|
||||
id UUID PRIMARY KEY,
|
||||
tenant_id UUID NOT NULL,
|
||||
pos_provider VARCHAR(50) NOT NULL, -- square, toast, lightspeed
|
||||
account_name VARCHAR(255),
|
||||
location_id VARCHAR(255), -- POS location identifier
|
||||
location_name VARCHAR(255),
|
||||
|
||||
-- OAuth credentials (encrypted)
|
||||
access_token TEXT,
|
||||
refresh_token TEXT,
|
||||
token_expires_at TIMESTAMP,
|
||||
merchant_id VARCHAR(255),
|
||||
|
||||
-- Sync configuration
|
||||
sync_enabled BOOLEAN DEFAULT TRUE,
|
||||
sync_frequency_minutes INTEGER DEFAULT 15,
|
||||
last_sync_at TIMESTAMP,
|
||||
last_successful_sync_at TIMESTAMP,
|
||||
next_sync_at TIMESTAMP,
|
||||
|
||||
-- Webhook configuration
|
||||
webhook_id VARCHAR(255),
|
||||
webhook_url VARCHAR(500),
|
||||
webhook_signature_key TEXT,
|
||||
|
||||
-- Status
|
||||
status VARCHAR(50) DEFAULT 'active', -- active, disconnected, error
|
||||
error_message TEXT,
|
||||
error_count INTEGER DEFAULT 0,
|
||||
last_error_at TIMESTAMP,
|
||||
|
||||
created_at TIMESTAMP DEFAULT NOW(),
|
||||
updated_at TIMESTAMP DEFAULT NOW(),
|
||||
UNIQUE(tenant_id, pos_provider, location_id)
|
||||
);
|
||||
```
|
||||
|
||||
**pos_transactions**
|
||||
```sql
|
||||
CREATE TABLE pos_transactions (
|
||||
id UUID PRIMARY KEY,
|
||||
tenant_id UUID NOT NULL,
|
||||
pos_account_id UUID REFERENCES pos_accounts(id) ON DELETE CASCADE,
|
||||
pos_transaction_id VARCHAR(255) NOT NULL, -- Original POS transaction ID
|
||||
pos_provider VARCHAR(50) NOT NULL,
|
||||
|
||||
-- Transaction details
|
||||
transaction_date TIMESTAMP NOT NULL,
|
||||
transaction_type VARCHAR(50) DEFAULT 'sale', -- sale, refund, void
|
||||
status VARCHAR(50), -- completed, pending, failed
|
||||
|
||||
-- Financial
|
||||
subtotal DECIMAL(10, 2) NOT NULL,
|
||||
tax_amount DECIMAL(10, 2) DEFAULT 0.00,
|
||||
discount_amount DECIMAL(10, 2) DEFAULT 0.00,
|
||||
tip_amount DECIMAL(10, 2) DEFAULT 0.00,
|
||||
total_amount DECIMAL(10, 2) NOT NULL,
|
||||
currency VARCHAR(10) DEFAULT 'EUR',
|
||||
|
||||
-- Payment
|
||||
payment_method VARCHAR(50), -- cash, card, contactless, mobile
|
||||
card_last_four VARCHAR(4),
|
||||
card_brand VARCHAR(50),
|
||||
|
||||
-- Customer (if available)
|
||||
customer_name VARCHAR(255),
|
||||
customer_email VARCHAR(255),
|
||||
customer_phone VARCHAR(50),
|
||||
|
||||
-- Metadata
|
||||
cashier_name VARCHAR(255),
|
||||
device_name VARCHAR(255),
|
||||
receipt_number VARCHAR(100),
|
||||
|
||||
-- Processing
|
||||
synced_to_sales BOOLEAN DEFAULT FALSE,
|
||||
sales_record_id UUID,
|
||||
sync_error TEXT,
|
||||
|
||||
created_at TIMESTAMP DEFAULT NOW(),
|
||||
UNIQUE(tenant_id, pos_provider, pos_transaction_id)
|
||||
);
|
||||
```
|
||||
|
||||
**pos_transaction_items**
|
||||
```sql
|
||||
CREATE TABLE pos_transaction_items (
|
||||
id UUID PRIMARY KEY,
|
||||
tenant_id UUID NOT NULL,
|
||||
pos_transaction_id UUID REFERENCES pos_transactions(id) ON DELETE CASCADE,
|
||||
pos_item_id VARCHAR(255), -- POS product ID
|
||||
|
||||
-- Product details
|
||||
product_name VARCHAR(255) NOT NULL,
|
||||
product_sku VARCHAR(100),
|
||||
category VARCHAR(100),
|
||||
quantity DECIMAL(10, 2) NOT NULL,
|
||||
unit_price DECIMAL(10, 2) NOT NULL,
|
||||
discount_amount DECIMAL(10, 2) DEFAULT 0.00,
|
||||
line_total DECIMAL(10, 2) NOT NULL,
|
||||
|
||||
-- Mapping
|
||||
mapped_product_id UUID, -- Bakery-IA product ID
|
||||
is_mapped BOOLEAN DEFAULT FALSE,
|
||||
|
||||
-- Modifiers (e.g., "Extra frosting")
|
||||
modifiers JSONB,
|
||||
|
||||
created_at TIMESTAMP DEFAULT NOW()
|
||||
);
|
||||
```
|
||||
|
||||
**pos_product_mappings**
|
||||
```sql
|
||||
CREATE TABLE pos_product_mappings (
|
||||
id UUID PRIMARY KEY,
|
||||
tenant_id UUID NOT NULL,
|
||||
pos_account_id UUID REFERENCES pos_accounts(id) ON DELETE CASCADE,
|
||||
pos_product_id VARCHAR(255) NOT NULL,
|
||||
pos_product_name VARCHAR(255) NOT NULL,
|
||||
pos_product_sku VARCHAR(100),
|
||||
pos_category VARCHAR(100),
|
||||
|
||||
-- Mapping
|
||||
bakery_product_id UUID NOT NULL, -- Link to products catalog
|
||||
bakery_product_name VARCHAR(255) NOT NULL,
|
||||
|
||||
-- Configuration
|
||||
mapping_type VARCHAR(50) DEFAULT 'manual', -- manual, auto, sku
|
||||
confidence_score DECIMAL(3, 2), -- For auto-mapping
|
||||
|
||||
created_at TIMESTAMP DEFAULT NOW(),
|
||||
updated_at TIMESTAMP DEFAULT NOW(),
|
||||
UNIQUE(tenant_id, pos_account_id, pos_product_id)
|
||||
);
|
||||
```
|
||||
|
||||
**pos_sync_logs**
|
||||
```sql
|
||||
CREATE TABLE pos_sync_logs (
|
||||
id UUID PRIMARY KEY,
|
||||
tenant_id UUID NOT NULL,
|
||||
pos_account_id UUID REFERENCES pos_accounts(id) ON DELETE CASCADE,
|
||||
sync_started_at TIMESTAMP NOT NULL,
|
||||
sync_completed_at TIMESTAMP,
|
||||
sync_duration_seconds INTEGER,
|
||||
|
||||
-- Status
|
||||
status VARCHAR(50) NOT NULL, -- success, partial, failed
|
||||
error_message TEXT,
|
||||
|
||||
-- Metrics
|
||||
transactions_fetched INTEGER DEFAULT 0,
|
||||
transactions_processed INTEGER DEFAULT 0,
|
||||
transactions_failed INTEGER DEFAULT 0,
|
||||
new_products_discovered INTEGER DEFAULT 0,
|
||||
unmapped_products_count INTEGER DEFAULT 0,
|
||||
|
||||
created_at TIMESTAMP DEFAULT NOW()
|
||||
);
|
||||
```
|
||||
|
||||
**pos_webhooks**
|
||||
```sql
|
||||
CREATE TABLE pos_webhooks (
|
||||
id UUID PRIMARY KEY,
|
||||
tenant_id UUID NOT NULL,
|
||||
pos_account_id UUID REFERENCES pos_accounts(id) ON DELETE CASCADE,
|
||||
webhook_event_id VARCHAR(255), -- POS webhook event ID
|
||||
event_type VARCHAR(100) NOT NULL, -- payment.created, order.updated, etc.
|
||||
event_data JSONB NOT NULL,
|
||||
received_at TIMESTAMP DEFAULT NOW(),
|
||||
processed_at TIMESTAMP,
|
||||
processing_status VARCHAR(50) DEFAULT 'pending', -- pending, processed, failed
|
||||
error_message TEXT,
|
||||
retry_count INTEGER DEFAULT 0
|
||||
);
|
||||
```
|
||||
|
||||
### Indexes for Performance
|
||||
```sql
|
||||
CREATE INDEX idx_pos_accounts_tenant ON pos_accounts(tenant_id, status);
|
||||
CREATE INDEX idx_pos_transactions_tenant_date ON pos_transactions(tenant_id, transaction_date DESC);
|
||||
CREATE INDEX idx_pos_transactions_account ON pos_transactions(pos_account_id);
|
||||
CREATE INDEX idx_pos_transactions_synced ON pos_transactions(tenant_id, synced_to_sales) WHERE synced_to_sales = FALSE;
|
||||
CREATE INDEX idx_pos_transaction_items_transaction ON pos_transaction_items(pos_transaction_id);
|
||||
CREATE INDEX idx_pos_transaction_items_unmapped ON pos_transaction_items(tenant_id, is_mapped) WHERE is_mapped = FALSE;
|
||||
CREATE INDEX idx_pos_mappings_account ON pos_product_mappings(pos_account_id);
|
||||
CREATE INDEX idx_pos_sync_logs_account_date ON pos_sync_logs(pos_account_id, sync_started_at DESC);
|
||||
```
|
||||
|
||||
## Business Logic Examples
|
||||
|
||||
### Square Transaction Sync
|
||||
```python
|
||||
async def sync_square_transactions(pos_account_id: UUID, start_date: datetime = None) -> dict:
|
||||
"""
|
||||
Sync transactions from Square POS.
|
||||
"""
|
||||
# Get POS account
|
||||
pos_account = await get_pos_account(pos_account_id)
|
||||
|
||||
if pos_account.pos_provider != 'square':
|
||||
raise ValueError("Not a Square account")
|
||||
|
||||
# Check token expiration
|
||||
if pos_account.token_expires_at and pos_account.token_expires_at < datetime.utcnow():
|
||||
await refresh_square_oauth_token(pos_account)
|
||||
|
||||
# Create sync log
|
||||
sync_log = POSSyncLog(
|
||||
tenant_id=pos_account.tenant_id,
|
||||
pos_account_id=pos_account.id,
|
||||
sync_started_at=datetime.utcnow(),
|
||||
status='in_progress'
|
||||
)
|
||||
db.add(sync_log)
|
||||
await db.flush()
|
||||
|
||||
try:
|
||||
# Default to last sync time or 24 hours ago
|
||||
if not start_date:
|
||||
start_date = pos_account.last_successful_sync_at or (datetime.utcnow() - timedelta(days=1))
|
||||
|
||||
# Call Square API
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.post(
|
||||
f"https://connect.squareup.com/v2/payments/list",
|
||||
headers={
|
||||
"Authorization": f"Bearer {pos_account.access_token}",
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
json={
|
||||
"location_id": pos_account.location_id,
|
||||
"begin_time": start_date.isoformat(),
|
||||
"end_time": datetime.utcnow().isoformat(),
|
||||
"limit": 100
|
||||
}
|
||||
)
|
||||
|
||||
if response.status_code != 200:
|
||||
raise Exception(f"Square API error: {response.text}")
|
||||
|
||||
data = response.json()
|
||||
payments = data.get('payments', [])
|
||||
|
||||
transactions_processed = 0
|
||||
transactions_failed = 0
|
||||
|
||||
for payment in payments:
|
||||
try:
|
||||
# Check for duplicate
|
||||
existing = await db.query(POSTransaction).filter(
|
||||
POSTransaction.tenant_id == pos_account.tenant_id,
|
||||
POSTransaction.pos_transaction_id == payment['id']
|
||||
).first()
|
||||
|
||||
if existing:
|
||||
continue # Skip duplicates
|
||||
|
||||
# Create transaction
|
||||
transaction = POSTransaction(
|
||||
tenant_id=pos_account.tenant_id,
|
||||
pos_account_id=pos_account.id,
|
||||
pos_transaction_id=payment['id'],
|
||||
pos_provider='square',
|
||||
transaction_date=datetime.fromisoformat(payment['created_at'].replace('Z', '+00:00')),
|
||||
transaction_type='sale' if payment['status'] == 'COMPLETED' else 'pending',
|
||||
status=payment['status'].lower(),
|
||||
total_amount=Decimal(payment['amount_money']['amount']) / 100,
|
||||
currency=payment['amount_money']['currency'],
|
||||
payment_method=payment.get('card_details', {}).get('card', {}).get('card_brand', 'unknown').lower(),
|
||||
card_last_four=payment.get('card_details', {}).get('card', {}).get('last_4'),
|
||||
receipt_number=payment.get('receipt_number')
|
||||
)
|
||||
db.add(transaction)
|
||||
await db.flush()
|
||||
|
||||
# Get line items from order
|
||||
if 'order_id' in payment:
|
||||
order_response = await client.get(
|
||||
f"https://connect.squareup.com/v2/orders/{payment['order_id']}",
|
||||
headers={"Authorization": f"Bearer {pos_account.access_token}"}
|
||||
)
|
||||
|
||||
if order_response.status_code == 200:
|
||||
order = order_response.json().get('order', {})
|
||||
line_items = order.get('line_items', [])
|
||||
|
||||
for item in line_items:
|
||||
# Create transaction item
|
||||
pos_item = POSTransactionItem(
|
||||
tenant_id=pos_account.tenant_id,
|
||||
pos_transaction_id=transaction.id,
|
||||
pos_item_id=item.get('catalog_object_id'),
|
||||
product_name=item['name'],
|
||||
quantity=Decimal(item['quantity']),
|
||||
unit_price=Decimal(item['base_price_money']['amount']) / 100,
|
||||
line_total=Decimal(item['total_money']['amount']) / 100
|
||||
)
|
||||
|
||||
# Check for mapping
|
||||
mapping = await get_product_mapping(
|
||||
pos_account.id,
|
||||
item.get('catalog_object_id')
|
||||
)
|
||||
if mapping:
|
||||
pos_item.mapped_product_id = mapping.bakery_product_id
|
||||
pos_item.is_mapped = True
|
||||
|
||||
db.add(pos_item)
|
||||
|
||||
# Sync to sales service
|
||||
await sync_transaction_to_sales(transaction)
|
||||
|
||||
transactions_processed += 1
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to process Square payment",
|
||||
payment_id=payment.get('id'),
|
||||
error=str(e))
|
||||
transactions_failed += 1
|
||||
continue
|
||||
|
||||
# Update sync log
|
||||
sync_log.sync_completed_at = datetime.utcnow()
|
||||
sync_log.sync_duration_seconds = int((sync_log.sync_completed_at - sync_log.sync_started_at).total_seconds())
|
||||
sync_log.status = 'success' if transactions_failed == 0 else 'partial'
|
||||
sync_log.transactions_fetched = len(payments)
|
||||
sync_log.transactions_processed = transactions_processed
|
||||
sync_log.transactions_failed = transactions_failed
|
||||
|
||||
# Update pos account
|
||||
pos_account.last_sync_at = datetime.utcnow()
|
||||
pos_account.last_successful_sync_at = datetime.utcnow()
|
||||
pos_account.error_count = 0
|
||||
|
||||
await db.commit()
|
||||
|
||||
# Publish sync completed event
|
||||
await publish_event('pos', 'pos.sync_completed', {
|
||||
'tenant_id': str(pos_account.tenant_id),
|
||||
'pos_account_id': str(pos_account.id),
|
||||
'transactions_processed': transactions_processed,
|
||||
'transactions_failed': transactions_failed
|
||||
})
|
||||
|
||||
return {
|
||||
'status': 'success',
|
||||
'transactions_processed': transactions_processed,
|
||||
'transactions_failed': transactions_failed
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
sync_log.status = 'failed'
|
||||
sync_log.error_message = str(e)
|
||||
sync_log.sync_completed_at = datetime.utcnow()
|
||||
|
||||
pos_account.error_count += 1
|
||||
pos_account.last_error_at = datetime.utcnow()
|
||||
pos_account.error_message = str(e)
|
||||
|
||||
await db.commit()
|
||||
|
||||
logger.error("Square sync failed",
|
||||
pos_account_id=str(pos_account_id),
|
||||
error=str(e))
|
||||
|
||||
raise
|
||||
```
|
||||
|
||||
### Auto Product Mapping
|
||||
```python
|
||||
async def auto_map_products(pos_account_id: UUID) -> dict:
|
||||
"""
|
||||
Automatically map POS products to Bakery-IA products using name/SKU matching.
|
||||
"""
|
||||
# Get unmapped transaction items
|
||||
unmapped_items = await db.query(POSTransactionItem).filter(
|
||||
POSTransactionItem.pos_account_id == pos_account_id,
|
||||
POSTransactionItem.is_mapped == False
|
||||
).all()
|
||||
|
||||
# Get unique products
|
||||
unique_products = {}
|
||||
for item in unmapped_items:
|
||||
key = (item.pos_item_id, item.product_name, item.product_sku)
|
||||
if key not in unique_products:
|
||||
unique_products[key] = item
|
||||
|
||||
# Get all Bakery-IA products
|
||||
bakery_products = await get_all_products(pos_account.tenant_id)
|
||||
|
||||
mapped_count = 0
|
||||
high_confidence_count = 0
|
||||
|
||||
for (pos_id, pos_name, pos_sku), item in unique_products.items():
|
||||
best_match = None
|
||||
confidence = 0.0
|
||||
|
||||
# Try SKU match first (highest confidence)
|
||||
if pos_sku:
|
||||
for product in bakery_products:
|
||||
if product.sku and product.sku.upper() == pos_sku.upper():
|
||||
best_match = product
|
||||
confidence = 1.0
|
||||
break
|
||||
|
||||
# Try name match (fuzzy matching)
|
||||
if not best_match:
|
||||
from difflib import SequenceMatcher
|
||||
|
||||
for product in bakery_products:
|
||||
# Calculate similarity ratio
|
||||
ratio = SequenceMatcher(None, pos_name.lower(), product.name.lower()).ratio()
|
||||
|
||||
if ratio > confidence and ratio > 0.80: # 80% similarity threshold
|
||||
best_match = product
|
||||
confidence = ratio
|
||||
|
||||
# Create mapping if confidence is high enough
|
||||
if best_match and confidence >= 0.80:
|
||||
mapping = POSProductMapping(
|
||||
tenant_id=pos_account.tenant_id,
|
||||
pos_account_id=pos_account_id,
|
||||
pos_product_id=pos_id,
|
||||
pos_product_name=pos_name,
|
||||
pos_product_sku=pos_sku,
|
||||
bakery_product_id=best_match.id,
|
||||
bakery_product_name=best_match.name,
|
||||
mapping_type='auto',
|
||||
confidence_score=Decimal(str(round(confidence, 2)))
|
||||
)
|
||||
db.add(mapping)
|
||||
|
||||
# Update all unmapped items with this product
|
||||
await db.query(POSTransactionItem).filter(
|
||||
POSTransactionItem.pos_account_id == pos_account_id,
|
||||
POSTransactionItem.pos_item_id == pos_id,
|
||||
POSTransactionItem.is_mapped == False
|
||||
).update({
|
||||
'mapped_product_id': best_match.id,
|
||||
'is_mapped': True
|
||||
})
|
||||
|
||||
mapped_count += 1
|
||||
if confidence >= 0.95:
|
||||
high_confidence_count += 1
|
||||
|
||||
await db.commit()
|
||||
|
||||
return {
|
||||
'total_unmapped_products': len(unique_products),
|
||||
'products_mapped': mapped_count,
|
||||
'high_confidence_mappings': high_confidence_count,
|
||||
'remaining_unmapped': len(unique_products) - mapped_count
|
||||
}
|
||||
```
|
||||
|
||||
### Webhook Handler
|
||||
```python
|
||||
async def handle_square_webhook(request: Request) -> dict:
|
||||
"""
|
||||
Handle incoming webhook from Square.
|
||||
"""
|
||||
# Verify webhook signature
|
||||
signature = request.headers.get('X-Square-Signature')
|
||||
body = await request.body()
|
||||
|
||||
# Signature verification (simplified)
|
||||
# In production, use proper HMAC verification with webhook signature key
|
||||
|
||||
# Parse webhook payload
|
||||
payload = await request.json()
|
||||
event_type = payload.get('type')
|
||||
merchant_id = payload.get('merchant_id')
|
||||
|
||||
# Find POS account
|
||||
pos_account = await db.query(POSAccount).filter(
|
||||
POSAccount.pos_provider == 'square',
|
||||
POSAccount.merchant_id == merchant_id,
|
||||
POSAccount.status == 'active'
|
||||
).first()
|
||||
|
||||
if not pos_account:
|
||||
logger.warning("Webhook received for unknown merchant", merchant_id=merchant_id)
|
||||
return {'status': 'ignored', 'reason': 'unknown_merchant'}
|
||||
|
||||
# Store webhook for processing
|
||||
webhook = POSWebhook(
|
||||
tenant_id=pos_account.tenant_id,
|
||||
pos_account_id=pos_account.id,
|
||||
webhook_event_id=payload.get('event_id'),
|
||||
event_type=event_type,
|
||||
event_data=payload,
|
||||
processing_status='pending'
|
||||
)
|
||||
db.add(webhook)
|
||||
await db.commit()
|
||||
|
||||
# Process webhook asynchronously
|
||||
# (In production, use background task queue)
|
||||
try:
|
||||
if event_type == 'payment.created':
|
||||
# Sync this specific payment
|
||||
payment_id = payload.get('data', {}).get('id')
|
||||
await sync_specific_square_payment(pos_account, payment_id)
|
||||
|
||||
webhook.processing_status = 'processed'
|
||||
webhook.processed_at = datetime.utcnow()
|
||||
|
||||
except Exception as e:
|
||||
webhook.processing_status = 'failed'
|
||||
webhook.error_message = str(e)
|
||||
logger.error("Webhook processing failed",
|
||||
webhook_id=str(webhook.id),
|
||||
error=str(e))
|
||||
|
||||
await db.commit()
|
||||
|
||||
return {'status': 'received'}
|
||||
```
|
||||
|
||||
## Events & Messaging
|
||||
|
||||
### Published Events (RabbitMQ)
|
||||
|
||||
**Exchange**: `pos`
|
||||
**Routing Keys**: `pos.sync_completed`, `pos.mapping_needed`, `pos.error`
|
||||
|
||||
**POS Sync Completed Event**
|
||||
```json
|
||||
{
|
||||
"event_type": "pos_sync_completed",
|
||||
"tenant_id": "uuid",
|
||||
"pos_account_id": "uuid",
|
||||
"pos_provider": "square",
|
||||
"location_name": "VUE Madrid - Centro",
|
||||
"transactions_processed": 45,
|
||||
"transactions_failed": 0,
|
||||
"new_products_discovered": 3,
|
||||
"sync_duration_seconds": 12,
|
||||
"timestamp": "2025-11-06T10:30:00Z"
|
||||
}
|
||||
```
|
||||
|
||||
**POS Mapping Needed Alert**
|
||||
```json
|
||||
{
|
||||
"event_type": "pos_mapping_needed",
|
||||
"tenant_id": "uuid",
|
||||
"pos_account_id": "uuid",
|
||||
"unmapped_products_count": 5,
|
||||
"unmapped_revenue_euros": 125.50,
|
||||
"sample_unmapped_products": [
|
||||
{"pos_product_name": "Croissant Especial", "transaction_count": 12},
|
||||
{"pos_product_name": "Pan Integral Grande", "transaction_count": 8}
|
||||
],
|
||||
"timestamp": "2025-11-06T14:00:00Z"
|
||||
}
|
||||
```
|
||||
|
||||
**POS Error Alert**
|
||||
```json
|
||||
{
|
||||
"event_type": "pos_error",
|
||||
"tenant_id": "uuid",
|
||||
"pos_account_id": "uuid",
|
||||
"pos_provider": "square",
|
||||
"error_type": "authentication_failed",
|
||||
"error_message": "OAuth token expired",
|
||||
"consecutive_failures": 3,
|
||||
"action_required": "Reconnect POS account",
|
||||
"timestamp": "2025-11-06T11:30:00Z"
|
||||
}
|
||||
```
|
||||
|
||||
### Consumed Events
|
||||
- **From Sales**: Sales data validation triggers re-sync if discrepancies found
|
||||
- **From Orchestrator**: Daily sync triggers for all active POS accounts
|
||||
|
||||
## Custom Metrics (Prometheus)
|
||||
|
||||
```python
|
||||
# POS metrics
|
||||
pos_accounts_total = Gauge(
|
||||
'pos_accounts_total',
|
||||
'Total connected POS accounts',
|
||||
['tenant_id', 'pos_provider', 'status']
|
||||
)
|
||||
|
||||
pos_transactions_synced_total = Counter(
|
||||
'pos_transactions_synced_total',
|
||||
'Total transactions synced from POS',
|
||||
['tenant_id', 'pos_provider']
|
||||
)
|
||||
|
||||
pos_sync_duration_seconds = Histogram(
|
||||
'pos_sync_duration_seconds',
|
||||
'POS sync duration',
|
||||
['tenant_id', 'pos_provider'],
|
||||
buckets=[5, 10, 30, 60, 120, 300]
|
||||
)
|
||||
|
||||
pos_sync_errors_total = Counter(
|
||||
'pos_sync_errors_total',
|
||||
'Total POS sync errors',
|
||||
['tenant_id', 'pos_provider', 'error_type']
|
||||
)
|
||||
|
||||
pos_unmapped_products_total = Gauge(
|
||||
'pos_unmapped_products_total',
|
||||
'Products without mapping',
|
||||
['tenant_id', 'pos_account_id']
|
||||
)
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
### Environment Variables
|
||||
|
||||
**Service Configuration:**
|
||||
- `PORT` - Service port (default: 8013)
|
||||
- `DATABASE_URL` - PostgreSQL connection string
|
||||
- `REDIS_URL` - Redis connection string
|
||||
- `RABBITMQ_URL` - RabbitMQ connection string
|
||||
|
||||
**POS Provider Configuration:**
|
||||
- `SQUARE_APP_ID` - Square application ID
|
||||
- `SQUARE_APP_SECRET` - Square application secret
|
||||
- `TOAST_CLIENT_ID` - Toast client ID
|
||||
- `TOAST_CLIENT_SECRET` - Toast client secret
|
||||
- `LIGHTSPEED_CLIENT_ID` - Lightspeed client ID
|
||||
- `LIGHTSPEED_CLIENT_SECRET` - Lightspeed client secret
|
||||
|
||||
**Sync Configuration:**
|
||||
- `DEFAULT_SYNC_FREQUENCY_MINUTES` - Default sync interval (default: 15)
|
||||
- `ENABLE_WEBHOOKS` - Use webhooks for real-time sync (default: true)
|
||||
- `MAX_SYNC_RETRIES` - Max retry attempts (default: 3)
|
||||
- `HISTORICAL_IMPORT_DAYS` - Days to import on initial setup (default: 90)
|
||||
|
||||
**Mapping Configuration:**
|
||||
- `AUTO_MAPPING_ENABLED` - Enable automatic product mapping (default: true)
|
||||
- `AUTO_MAPPING_CONFIDENCE_THRESHOLD` - Minimum confidence (default: 0.80)
|
||||
- `ALERT_ON_UNMAPPED_PRODUCTS` - Alert for unmapped products (default: true)
|
||||
|
||||
## Development Setup
|
||||
|
||||
### Prerequisites
|
||||
- Python 3.11+
|
||||
- PostgreSQL 17
|
||||
- Redis 7.4
|
||||
- RabbitMQ 4.1
|
||||
- POS system developer accounts (Square, Toast, Lightspeed)
|
||||
|
||||
### Local Development
|
||||
```bash
|
||||
cd services/pos
|
||||
python -m venv venv
|
||||
source venv/bin/activate
|
||||
|
||||
pip install -r requirements.txt
|
||||
|
||||
export DATABASE_URL=postgresql://user:pass@localhost:5432/pos
|
||||
export REDIS_URL=redis://localhost:6379/0
|
||||
export RABBITMQ_URL=amqp://guest:guest@localhost:5672/
|
||||
export SQUARE_APP_ID=your_square_app_id
|
||||
export SQUARE_APP_SECRET=your_square_app_secret
|
||||
|
||||
alembic upgrade head
|
||||
python main.py
|
||||
```
|
||||
|
||||
## Integration Points
|
||||
|
||||
### Dependencies
|
||||
- **POS Providers** - Square, Toast, Lightspeed APIs
|
||||
- **Auth Service** - User authentication
|
||||
- **PostgreSQL** - Transaction and mapping data
|
||||
- **Redis** - Deduplication cache
|
||||
- **RabbitMQ** - Event publishing
|
||||
|
||||
### Dependents
|
||||
- **Sales Service** - Receives synced transaction data
|
||||
- **Forecasting Service** - Uses sales data for ML models
|
||||
- **Inventory Service** - Stock deduction from sales
|
||||
- **Notification Service** - Sync error alerts
|
||||
- **Frontend Dashboard** - POS connection and mapping UI
|
||||
|
||||
## Business Value for VUE Madrid
|
||||
|
||||
### Problem Statement
|
||||
Spanish bakeries struggle with:
|
||||
- Hours of daily manual sales data entry
|
||||
- Transcription errors reducing forecast accuracy
|
||||
- Delayed visibility into sales performance
|
||||
- No integration between POS and business intelligence
|
||||
- Double data entry (POS + spreadsheets/accounting)
|
||||
|
||||
### Solution
|
||||
Bakery-IA POS Service provides:
|
||||
- **Zero Manual Entry**: Automatic transaction sync from POS
|
||||
- **Real-Time Data**: Sales data available within seconds
|
||||
- **Higher Accuracy**: 99.9%+ vs. 85-95% manual entry
|
||||
- **Immediate Value**: Works from day one, no setup needed
|
||||
- **Universal Compatibility**: Works with popular POS systems
|
||||
|
||||
### Quantifiable Impact
|
||||
|
||||
**Time Savings:**
|
||||
- 5-8 hours/week eliminating manual data entry
|
||||
- 1-2 hours/week on sales reconciliation
|
||||
- **Total: 6-10 hours/week saved**
|
||||
|
||||
**Data Quality:**
|
||||
- 99.9%+ accuracy vs. 85-95% manual entry
|
||||
- Zero transcription errors
|
||||
- Real-time vs. end-of-day data availability
|
||||
- 10-20% forecast accuracy improvement
|
||||
|
||||
**Operational Efficiency:**
|
||||
- 15-minute setup vs. hours of daily manual entry
|
||||
- Automatic sync every 15 minutes
|
||||
- Multi-location support in single dashboard
|
||||
- Instant error detection and alerts
|
||||
|
||||
### Target Market Fit (Spanish Bakeries)
|
||||
- **POS Adoption**: Growing use of Square, Toast, Lightspeed in Spain
|
||||
- **Labor Costs**: Spanish minimum wage makes manual entry expensive
|
||||
- **Modernization**: New generation of bakery owners embrace technology
|
||||
- **Market Trend**: Digital transformation in retail/food service
|
||||
|
||||
### ROI Calculation
|
||||
**Investment**: €0 additional (included in platform subscription)
|
||||
**Time Savings Value**: 6-10 hours/week × €15/hour = €360-600/month
|
||||
**Forecast Improvement Value**: 10-20% better accuracy = €100-400/month
|
||||
**Total Monthly Value**: €460-1,000
|
||||
**Annual ROI**: €5,520-12,000 value per bakery
|
||||
**Payback**: Immediate (included in subscription)
|
||||
|
||||
### Competitive Advantage
|
||||
- **First-Mover**: Few Spanish bakery platforms offer POS integration
|
||||
- **Multi-POS Support**: Flexibility for customers to choose POS
|
||||
- **Plug-and-Play**: 15-minute setup vs. competitors requiring IT setup
|
||||
- **Real-Time**: Webhook support for instant sync vs. batch processing
|
||||
|
||||
---
|
||||
|
||||
**Copyright © 2025 Bakery-IA. All rights reserved.**
|
||||
84
services/pos/alembic.ini
Normal file
84
services/pos/alembic.ini
Normal file
@@ -0,0 +1,84 @@
|
||||
# ================================================================
|
||||
# services/pos/alembic.ini - Alembic Configuration
|
||||
# ================================================================
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
script_location = migrations
|
||||
|
||||
# template used to generate migration file names
|
||||
file_template = %%(year)d%%(month).2d%%(day).2d_%%(hour).2d%%(minute).2d_%%(rev)s_%%(slug)s
|
||||
|
||||
# sys.path path, will be prepended to sys.path if present.
|
||||
prepend_sys_path = .
|
||||
|
||||
# timezone to use when rendering the date within the migration file
|
||||
# as well as the filename.
|
||||
timezone = Europe/Madrid
|
||||
|
||||
# max length of characters to apply to the
|
||||
# "slug" field
|
||||
truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
sourceless = false
|
||||
|
||||
# version of a migration file's filename format
|
||||
version_num_format = %%s
|
||||
|
||||
# version path separator
|
||||
version_path_separator = os
|
||||
|
||||
# set to 'true' to search source files recursively
|
||||
# in each "version_locations" directory
|
||||
recursive_version_locations = false
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
output_encoding = utf-8
|
||||
|
||||
# Database URL - will be overridden by environment variable or settings
|
||||
sqlalchemy.url = postgresql+asyncpg://pos_user:password@pos-db-service:5432/pos_db
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts.
|
||||
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
1
services/pos/app/__init__.py
Normal file
1
services/pos/app/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# POS Integration Service
|
||||
1
services/pos/app/api/__init__.py
Normal file
1
services/pos/app/api/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# API endpoints package
|
||||
93
services/pos/app/api/analytics.py
Normal file
93
services/pos/app/api/analytics.py
Normal file
@@ -0,0 +1,93 @@
|
||||
"""
|
||||
POS Service Analytics API Endpoints
|
||||
ANALYTICS layer - Channel and sync performance analytics
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Path, Query
|
||||
from typing import Optional, Dict, Any
|
||||
from uuid import UUID
|
||||
from datetime import datetime
|
||||
import structlog
|
||||
|
||||
from app.core.database import get_db
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from shared.auth.access_control import require_user_role
|
||||
from shared.routing import RouteBuilder
|
||||
|
||||
router = APIRouter()
|
||||
logger = structlog.get_logger()
|
||||
route_builder = RouteBuilder('pos')
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_analytics_route("sync-performance"),
|
||||
response_model=dict
|
||||
)
|
||||
@require_user_role(['viewer', 'member', 'admin', 'owner'])
|
||||
async def get_sync_performance_analytics(
|
||||
tenant_id: UUID = Path(...),
|
||||
days: int = Query(30, ge=1, le=365),
|
||||
config_id: Optional[UUID] = Query(None),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db=Depends(get_db)
|
||||
):
|
||||
"""Analyze sync performance metrics"""
|
||||
try:
|
||||
return {
|
||||
"period_days": days,
|
||||
"total_syncs": 0,
|
||||
"successful_syncs": 0,
|
||||
"failed_syncs": 0,
|
||||
"success_rate": 0.0,
|
||||
"average_duration_minutes": 0.0,
|
||||
"total_transactions_synced": 0,
|
||||
"total_revenue_synced": 0.0,
|
||||
"sync_frequency": {
|
||||
"daily_average": 0.0,
|
||||
"peak_day": None,
|
||||
"peak_count": 0
|
||||
},
|
||||
"error_analysis": {
|
||||
"common_errors": [],
|
||||
"error_trends": []
|
||||
}
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error("Failed to get sync analytics", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get analytics: {str(e)}")
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_analytics_route("channel-performance"),
|
||||
response_model=dict
|
||||
)
|
||||
@require_user_role(['viewer', 'member', 'admin', 'owner'])
|
||||
async def get_channel_performance_analytics(
|
||||
tenant_id: UUID = Path(...),
|
||||
days: int = Query(30, ge=1, le=365),
|
||||
pos_system: Optional[str] = Query(None),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db=Depends(get_db)
|
||||
):
|
||||
"""Analyze POS channel performance by system"""
|
||||
try:
|
||||
return {
|
||||
"period_days": days,
|
||||
"pos_system": pos_system,
|
||||
"channel_metrics": {
|
||||
"total_transactions": 0,
|
||||
"total_revenue": 0.0,
|
||||
"average_transaction_value": 0.0,
|
||||
"transaction_growth_rate": 0.0
|
||||
},
|
||||
"system_breakdown": [],
|
||||
"performance_trends": {
|
||||
"daily_trends": [],
|
||||
"hourly_trends": [],
|
||||
"day_of_week_trends": []
|
||||
},
|
||||
"top_performing_channels": []
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error("Failed to get channel analytics", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get channel analytics: {str(e)}")
|
||||
237
services/pos/app/api/audit.py
Normal file
237
services/pos/app/api/audit.py
Normal file
@@ -0,0 +1,237 @@
|
||||
# services/pos/app/api/audit.py
|
||||
"""
|
||||
Audit Logs API - Retrieve audit trail for pos service
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, Path, status
|
||||
from typing import Optional, Dict, Any
|
||||
from uuid import UUID
|
||||
from datetime import datetime
|
||||
import structlog
|
||||
from sqlalchemy import select, func, and_
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.models import AuditLog
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from shared.auth.access_control import require_user_role
|
||||
from shared.routing import RouteBuilder
|
||||
from shared.models.audit_log_schemas import (
|
||||
AuditLogResponse,
|
||||
AuditLogListResponse,
|
||||
AuditLogStatsResponse
|
||||
)
|
||||
from app.core.database import database_manager
|
||||
|
||||
route_builder = RouteBuilder('pos')
|
||||
router = APIRouter(tags=["audit-logs"])
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
async def get_db():
|
||||
"""Database session dependency"""
|
||||
async with database_manager.get_session() as session:
|
||||
yield session
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_base_route("audit-logs"),
|
||||
response_model=AuditLogListResponse
|
||||
)
|
||||
@require_user_role(['admin', 'owner'])
|
||||
async def get_audit_logs(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
start_date: Optional[datetime] = Query(None, description="Filter logs from this date"),
|
||||
end_date: Optional[datetime] = Query(None, description="Filter logs until this date"),
|
||||
user_id: Optional[UUID] = Query(None, description="Filter by user ID"),
|
||||
action: Optional[str] = Query(None, description="Filter by action type"),
|
||||
resource_type: Optional[str] = Query(None, description="Filter by resource type"),
|
||||
severity: Optional[str] = Query(None, description="Filter by severity level"),
|
||||
search: Optional[str] = Query(None, description="Search in description field"),
|
||||
limit: int = Query(100, ge=1, le=1000, description="Number of records to return"),
|
||||
offset: int = Query(0, ge=0, description="Number of records to skip"),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get audit logs for pos service.
|
||||
Requires admin or owner role.
|
||||
"""
|
||||
try:
|
||||
logger.info(
|
||||
"Retrieving audit logs",
|
||||
tenant_id=tenant_id,
|
||||
user_id=current_user.get("user_id"),
|
||||
filters={
|
||||
"start_date": start_date,
|
||||
"end_date": end_date,
|
||||
"action": action,
|
||||
"resource_type": resource_type,
|
||||
"severity": severity
|
||||
}
|
||||
)
|
||||
|
||||
# Build query filters
|
||||
filters = [AuditLog.tenant_id == tenant_id]
|
||||
|
||||
if start_date:
|
||||
filters.append(AuditLog.created_at >= start_date)
|
||||
if end_date:
|
||||
filters.append(AuditLog.created_at <= end_date)
|
||||
if user_id:
|
||||
filters.append(AuditLog.user_id == user_id)
|
||||
if action:
|
||||
filters.append(AuditLog.action == action)
|
||||
if resource_type:
|
||||
filters.append(AuditLog.resource_type == resource_type)
|
||||
if severity:
|
||||
filters.append(AuditLog.severity == severity)
|
||||
if search:
|
||||
filters.append(AuditLog.description.ilike(f"%{search}%"))
|
||||
|
||||
# Count total matching records
|
||||
count_query = select(func.count()).select_from(AuditLog).where(and_(*filters))
|
||||
total_result = await db.execute(count_query)
|
||||
total = total_result.scalar() or 0
|
||||
|
||||
# Fetch paginated results
|
||||
query = (
|
||||
select(AuditLog)
|
||||
.where(and_(*filters))
|
||||
.order_by(AuditLog.created_at.desc())
|
||||
.limit(limit)
|
||||
.offset(offset)
|
||||
)
|
||||
|
||||
result = await db.execute(query)
|
||||
audit_logs = result.scalars().all()
|
||||
|
||||
# Convert to response models
|
||||
items = [AuditLogResponse.from_orm(log) for log in audit_logs]
|
||||
|
||||
logger.info(
|
||||
"Successfully retrieved audit logs",
|
||||
tenant_id=tenant_id,
|
||||
total=total,
|
||||
returned=len(items)
|
||||
)
|
||||
|
||||
return AuditLogListResponse(
|
||||
items=items,
|
||||
total=total,
|
||||
limit=limit,
|
||||
offset=offset,
|
||||
has_more=(offset + len(items)) < total
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to retrieve audit logs",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to retrieve audit logs: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_base_route("audit-logs/stats"),
|
||||
response_model=AuditLogStatsResponse
|
||||
)
|
||||
@require_user_role(['admin', 'owner'])
|
||||
async def get_audit_log_stats(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
start_date: Optional[datetime] = Query(None, description="Filter logs from this date"),
|
||||
end_date: Optional[datetime] = Query(None, description="Filter logs until this date"),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get audit log statistics for pos service.
|
||||
Requires admin or owner role.
|
||||
"""
|
||||
try:
|
||||
logger.info(
|
||||
"Retrieving audit log statistics",
|
||||
tenant_id=tenant_id,
|
||||
user_id=current_user.get("user_id")
|
||||
)
|
||||
|
||||
# Build base filters
|
||||
filters = [AuditLog.tenant_id == tenant_id]
|
||||
if start_date:
|
||||
filters.append(AuditLog.created_at >= start_date)
|
||||
if end_date:
|
||||
filters.append(AuditLog.created_at <= end_date)
|
||||
|
||||
# Total events
|
||||
count_query = select(func.count()).select_from(AuditLog).where(and_(*filters))
|
||||
total_result = await db.execute(count_query)
|
||||
total_events = total_result.scalar() or 0
|
||||
|
||||
# Events by action
|
||||
action_query = (
|
||||
select(AuditLog.action, func.count().label('count'))
|
||||
.where(and_(*filters))
|
||||
.group_by(AuditLog.action)
|
||||
)
|
||||
action_result = await db.execute(action_query)
|
||||
events_by_action = {row.action: row.count for row in action_result}
|
||||
|
||||
# Events by severity
|
||||
severity_query = (
|
||||
select(AuditLog.severity, func.count().label('count'))
|
||||
.where(and_(*filters))
|
||||
.group_by(AuditLog.severity)
|
||||
)
|
||||
severity_result = await db.execute(severity_query)
|
||||
events_by_severity = {row.severity: row.count for row in severity_result}
|
||||
|
||||
# Events by resource type
|
||||
resource_query = (
|
||||
select(AuditLog.resource_type, func.count().label('count'))
|
||||
.where(and_(*filters))
|
||||
.group_by(AuditLog.resource_type)
|
||||
)
|
||||
resource_result = await db.execute(resource_query)
|
||||
events_by_resource_type = {row.resource_type: row.count for row in resource_result}
|
||||
|
||||
# Date range
|
||||
date_range_query = (
|
||||
select(
|
||||
func.min(AuditLog.created_at).label('min_date'),
|
||||
func.max(AuditLog.created_at).label('max_date')
|
||||
)
|
||||
.where(and_(*filters))
|
||||
)
|
||||
date_result = await db.execute(date_range_query)
|
||||
date_row = date_result.one()
|
||||
|
||||
logger.info(
|
||||
"Successfully retrieved audit log statistics",
|
||||
tenant_id=tenant_id,
|
||||
total_events=total_events
|
||||
)
|
||||
|
||||
return AuditLogStatsResponse(
|
||||
total_events=total_events,
|
||||
events_by_action=events_by_action,
|
||||
events_by_severity=events_by_severity,
|
||||
events_by_resource_type=events_by_resource_type,
|
||||
date_range={
|
||||
"min": date_row.min_date,
|
||||
"max": date_row.max_date
|
||||
}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to retrieve audit log statistics",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to retrieve audit log statistics: {str(e)}"
|
||||
)
|
||||
241
services/pos/app/api/configurations.py
Normal file
241
services/pos/app/api/configurations.py
Normal file
@@ -0,0 +1,241 @@
|
||||
"""
|
||||
POS Configuration API Endpoints
|
||||
ATOMIC layer - Basic CRUD operations for POS configurations
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Path, Query
|
||||
from typing import List, Optional, Dict, Any
|
||||
from uuid import UUID
|
||||
import structlog
|
||||
|
||||
from app.core.database import get_db
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from shared.auth.access_control import require_user_role, admin_role_required
|
||||
from shared.routing import RouteBuilder
|
||||
from shared.security import create_audit_logger, AuditSeverity, AuditAction
|
||||
from app.services.pos_config_service import POSConfigurationService
|
||||
from app.schemas.pos_config import POSConfigurationListResponse
|
||||
from app.models import AuditLog
|
||||
|
||||
router = APIRouter()
|
||||
logger = structlog.get_logger()
|
||||
audit_logger = create_audit_logger("pos-service", AuditLog)
|
||||
route_builder = RouteBuilder('pos')
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_base_route("configurations"),
|
||||
response_model=POSConfigurationListResponse
|
||||
)
|
||||
@require_user_role(['viewer', 'member', 'admin', 'owner'])
|
||||
async def list_pos_configurations(
|
||||
tenant_id: UUID = Path(...),
|
||||
pos_system: Optional[str] = Query(None),
|
||||
is_active: Optional[bool] = Query(None),
|
||||
skip: int = Query(0, ge=0),
|
||||
limit: int = Query(100, ge=1, le=100),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db=Depends(get_db)
|
||||
):
|
||||
"""List all POS configurations for a tenant"""
|
||||
try:
|
||||
service = POSConfigurationService()
|
||||
|
||||
configurations = await service.get_configurations_by_tenant(
|
||||
tenant_id=tenant_id,
|
||||
pos_system=pos_system,
|
||||
is_active=is_active,
|
||||
skip=skip,
|
||||
limit=limit
|
||||
)
|
||||
|
||||
total = await service.count_configurations_by_tenant(
|
||||
tenant_id=tenant_id,
|
||||
pos_system=pos_system,
|
||||
is_active=is_active
|
||||
)
|
||||
|
||||
return POSConfigurationListResponse(
|
||||
configurations=configurations,
|
||||
total=total,
|
||||
supported_systems=["square", "toast", "lightspeed"]
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error("Failed to list POS configurations", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to list configurations: {str(e)}")
|
||||
|
||||
|
||||
@router.post(
|
||||
route_builder.build_base_route("configurations"),
|
||||
response_model=dict,
|
||||
status_code=201
|
||||
)
|
||||
@admin_role_required
|
||||
async def create_pos_configuration(
|
||||
configuration_data: Dict[str, Any],
|
||||
tenant_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db=Depends(get_db)
|
||||
):
|
||||
"""Create a new POS configuration (Admin/Owner only)"""
|
||||
try:
|
||||
logger.info("Creating POS configuration",
|
||||
tenant_id=tenant_id,
|
||||
pos_system=configuration_data.get("pos_system"),
|
||||
user_id=current_user.get("user_id"))
|
||||
|
||||
return {
|
||||
"message": "POS configuration created successfully",
|
||||
"id": "placeholder",
|
||||
"pos_system": configuration_data.get("pos_system")
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error("Failed to create POS configuration", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to create configuration: {str(e)}")
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_resource_detail_route("configurations", "config_id"),
|
||||
response_model=dict
|
||||
)
|
||||
@require_user_role(['viewer', 'member', 'admin', 'owner'])
|
||||
async def get_pos_configuration(
|
||||
tenant_id: UUID = Path(...),
|
||||
config_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db=Depends(get_db)
|
||||
):
|
||||
"""Get a specific POS configuration"""
|
||||
try:
|
||||
return {
|
||||
"id": str(config_id),
|
||||
"tenant_id": str(tenant_id),
|
||||
"pos_system": "square",
|
||||
"is_active": True
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error("Failed to get POS configuration", error=str(e),
|
||||
tenant_id=tenant_id, config_id=config_id)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get configuration: {str(e)}")
|
||||
|
||||
|
||||
@router.put(
|
||||
route_builder.build_resource_detail_route("configurations", "config_id"),
|
||||
response_model=dict
|
||||
)
|
||||
@admin_role_required
|
||||
async def update_pos_configuration(
|
||||
configuration_data: Dict[str, Any],
|
||||
tenant_id: UUID = Path(...),
|
||||
config_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db=Depends(get_db)
|
||||
):
|
||||
"""Update a POS configuration (Admin/Owner only)"""
|
||||
try:
|
||||
# Log HIGH severity audit event for configuration changes
|
||||
try:
|
||||
await audit_logger.log_event(
|
||||
db_session=db,
|
||||
tenant_id=str(tenant_id),
|
||||
user_id=current_user["user_id"],
|
||||
action=AuditAction.UPDATE.value,
|
||||
resource_type="pos_configuration",
|
||||
resource_id=str(config_id),
|
||||
severity=AuditSeverity.HIGH.value,
|
||||
description=f"Admin {current_user.get('email', 'unknown')} updated POS configuration",
|
||||
changes={"configuration_updates": configuration_data},
|
||||
endpoint=f"/configurations/{config_id}",
|
||||
method="PUT"
|
||||
)
|
||||
except Exception as audit_error:
|
||||
logger.warning("Failed to log audit event", error=str(audit_error))
|
||||
|
||||
logger.info("POS configuration updated",
|
||||
config_id=str(config_id),
|
||||
tenant_id=str(tenant_id),
|
||||
user_id=current_user["user_id"])
|
||||
|
||||
return {"message": "Configuration updated successfully", "id": str(config_id)}
|
||||
except Exception as e:
|
||||
logger.error("Failed to update POS configuration", error=str(e),
|
||||
tenant_id=tenant_id, config_id=config_id)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to update configuration: {str(e)}")
|
||||
|
||||
|
||||
@router.delete(
|
||||
route_builder.build_resource_detail_route("configurations", "config_id"),
|
||||
response_model=dict
|
||||
)
|
||||
@require_user_role(['owner'])
|
||||
async def delete_pos_configuration(
|
||||
tenant_id: UUID = Path(...),
|
||||
config_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db=Depends(get_db)
|
||||
):
|
||||
"""Delete a POS configuration (Owner only)"""
|
||||
try:
|
||||
# Log CRITICAL severity audit event for configuration deletion
|
||||
try:
|
||||
await audit_logger.log_deletion(
|
||||
db_session=db,
|
||||
tenant_id=str(tenant_id),
|
||||
user_id=current_user["user_id"],
|
||||
resource_type="pos_configuration",
|
||||
resource_id=str(config_id),
|
||||
severity=AuditSeverity.CRITICAL.value,
|
||||
description=f"Owner {current_user.get('email', 'unknown')} deleted POS configuration",
|
||||
endpoint=f"/configurations/{config_id}",
|
||||
method="DELETE"
|
||||
)
|
||||
except Exception as audit_error:
|
||||
logger.warning("Failed to log audit event", error=str(audit_error))
|
||||
|
||||
logger.info("POS configuration deleted",
|
||||
config_id=str(config_id),
|
||||
tenant_id=str(tenant_id),
|
||||
user_id=current_user["user_id"])
|
||||
|
||||
return {"message": "Configuration deleted successfully"}
|
||||
except Exception as e:
|
||||
logger.error("Failed to delete POS configuration", error=str(e),
|
||||
tenant_id=tenant_id, config_id=config_id)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to delete configuration: {str(e)}")
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Reference Data
|
||||
# ============================================================================
|
||||
|
||||
@router.get(
|
||||
route_builder.build_global_route("supported-systems"),
|
||||
response_model=dict
|
||||
)
|
||||
async def get_supported_pos_systems():
|
||||
"""Get list of supported POS systems (no tenant context required)"""
|
||||
return {
|
||||
"systems": [
|
||||
{
|
||||
"id": "square",
|
||||
"name": "Square POS",
|
||||
"description": "Square Point of Sale system",
|
||||
"features": ["payments", "inventory", "analytics", "webhooks"],
|
||||
"supported_regions": ["US", "CA", "AU", "JP", "GB", "IE", "ES", "FR"]
|
||||
},
|
||||
{
|
||||
"id": "toast",
|
||||
"name": "Toast POS",
|
||||
"description": "Toast restaurant POS system",
|
||||
"features": ["orders", "payments", "menu_management", "webhooks"],
|
||||
"supported_regions": ["US", "CA", "IE", "ES"]
|
||||
},
|
||||
{
|
||||
"id": "lightspeed",
|
||||
"name": "Lightspeed Restaurant",
|
||||
"description": "Lightspeed restaurant management system",
|
||||
"features": ["orders", "inventory", "reservations", "webhooks"],
|
||||
"supported_regions": ["US", "CA", "EU", "AU"]
|
||||
}
|
||||
]
|
||||
}
|
||||
857
services/pos/app/api/pos_operations.py
Normal file
857
services/pos/app/api/pos_operations.py
Normal file
@@ -0,0 +1,857 @@
|
||||
"""
|
||||
POS Operations API Endpoints
|
||||
BUSINESS layer - Sync operations, webhooks, reconciliation, and test connection
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Path, Query, Body, Request, Header
|
||||
from typing import Optional, Dict, Any
|
||||
from uuid import UUID
|
||||
from datetime import datetime
|
||||
import structlog
|
||||
import json
|
||||
|
||||
from app.core.database import get_db
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from shared.auth.access_control import require_user_role, admin_role_required, service_only_access
|
||||
from shared.routing import RouteBuilder
|
||||
from app.services.pos_transaction_service import POSTransactionService
|
||||
from app.services.pos_config_service import POSConfigurationService
|
||||
from app.services.pos_webhook_service import POSWebhookService
|
||||
from app.services.pos_sync_service import POSSyncService
|
||||
from app.services.tenant_deletion_service import POSTenantDeletionService
|
||||
|
||||
router = APIRouter()
|
||||
logger = structlog.get_logger()
|
||||
route_builder = RouteBuilder('pos')
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Sync Operations
|
||||
# ============================================================================
|
||||
|
||||
@router.post(
|
||||
route_builder.build_operations_route("sync"),
|
||||
response_model=dict
|
||||
)
|
||||
@require_user_role(['member', 'admin', 'owner'])
|
||||
async def trigger_sync(
|
||||
sync_request: Dict[str, Any],
|
||||
tenant_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db=Depends(get_db)
|
||||
):
|
||||
"""Trigger manual synchronization with POS system (Member+)"""
|
||||
try:
|
||||
sync_type = sync_request.get("sync_type", "incremental")
|
||||
data_types = sync_request.get("data_types", ["transactions"])
|
||||
config_id = sync_request.get("config_id")
|
||||
|
||||
if not config_id:
|
||||
raise HTTPException(status_code=400, detail="config_id is required")
|
||||
|
||||
# Get POS configuration to determine system type
|
||||
config_service = POSConfigurationService()
|
||||
configs = await config_service.get_configurations_by_tenant(tenant_id, skip=0, limit=100)
|
||||
config = next((c for c in configs if str(c.id) == str(config_id)), None)
|
||||
|
||||
if not config:
|
||||
raise HTTPException(status_code=404, detail="POS configuration not found")
|
||||
|
||||
# Create sync job
|
||||
sync_service = POSSyncService(db)
|
||||
sync_log = await sync_service.create_sync_job(
|
||||
tenant_id=tenant_id,
|
||||
pos_config_id=UUID(config_id),
|
||||
pos_system=config.pos_system,
|
||||
sync_type=sync_type,
|
||||
data_types=data_types
|
||||
)
|
||||
|
||||
logger.info("Manual sync triggered",
|
||||
tenant_id=tenant_id,
|
||||
config_id=config_id,
|
||||
sync_id=str(sync_log.id),
|
||||
sync_type=sync_type,
|
||||
user_id=current_user.get("user_id"))
|
||||
|
||||
return {
|
||||
"message": "Sync triggered successfully",
|
||||
"sync_id": str(sync_log.id),
|
||||
"status": "queued",
|
||||
"sync_type": sync_type,
|
||||
"data_types": data_types,
|
||||
"estimated_duration": "5-10 minutes"
|
||||
}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Failed to trigger sync", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to trigger sync: {str(e)}")
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_operations_route("sync-status"),
|
||||
response_model=dict
|
||||
)
|
||||
@require_user_role(['viewer', 'member', 'admin', 'owner'])
|
||||
async def get_sync_status(
|
||||
tenant_id: UUID = Path(...),
|
||||
config_id: Optional[UUID] = Query(None),
|
||||
limit: int = Query(10, ge=1, le=100),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db=Depends(get_db)
|
||||
):
|
||||
"""Get synchronization status and recent sync history"""
|
||||
try:
|
||||
transaction_service = POSTransactionService()
|
||||
sync_service = POSSyncService(db)
|
||||
|
||||
# Get sync metrics from transaction service
|
||||
sync_metrics = await transaction_service.get_sync_metrics(tenant_id)
|
||||
|
||||
# Get last successful sync time
|
||||
sync_status = sync_metrics["sync_status"]
|
||||
last_successful_sync = sync_status.get("last_sync_at")
|
||||
|
||||
# Calculate sync success rate
|
||||
total = sync_metrics["total_transactions"]
|
||||
synced = sync_status.get("synced", 0)
|
||||
success_rate = (synced / total * 100) if total > 0 else 100.0
|
||||
|
||||
# Calculate actual average duration from sync logs
|
||||
average_duration_minutes = await sync_service.calculate_average_duration(
|
||||
tenant_id=tenant_id,
|
||||
pos_config_id=config_id,
|
||||
days=30
|
||||
)
|
||||
|
||||
return {
|
||||
"current_sync": None,
|
||||
"last_successful_sync": last_successful_sync.isoformat() if last_successful_sync else None,
|
||||
"recent_syncs": [], # Could be enhanced with actual sync history
|
||||
"sync_health": {
|
||||
"status": "healthy" if success_rate > 90 else "degraded" if success_rate > 70 else "unhealthy",
|
||||
"success_rate": round(success_rate, 2),
|
||||
"average_duration_minutes": average_duration_minutes,
|
||||
"last_error": None,
|
||||
"total_transactions": total,
|
||||
"synced_count": synced,
|
||||
"pending_count": sync_status.get("pending", 0),
|
||||
"failed_count": sync_status.get("failed", 0)
|
||||
}
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error("Failed to get sync status", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get sync status: {str(e)}")
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_operations_route("sync-logs"),
|
||||
response_model=dict
|
||||
)
|
||||
@require_user_role(['viewer', 'member', 'admin', 'owner'])
|
||||
async def get_sync_logs(
|
||||
tenant_id: UUID = Path(...),
|
||||
config_id: Optional[UUID] = Query(None),
|
||||
limit: int = Query(50, ge=1, le=200),
|
||||
offset: int = Query(0, ge=0),
|
||||
status: Optional[str] = Query(None),
|
||||
sync_type: Optional[str] = Query(None),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db=Depends(get_db)
|
||||
):
|
||||
"""Get detailed sync logs"""
|
||||
try:
|
||||
sync_service = POSSyncService(db)
|
||||
|
||||
logs_data = await sync_service.get_sync_logs(
|
||||
tenant_id=tenant_id,
|
||||
config_id=config_id,
|
||||
status=status,
|
||||
sync_type=sync_type,
|
||||
limit=limit,
|
||||
offset=offset
|
||||
)
|
||||
|
||||
return logs_data
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get sync logs", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get sync logs: {str(e)}")
|
||||
|
||||
|
||||
@router.post(
|
||||
route_builder.build_operations_route("resync-failed"),
|
||||
response_model=dict
|
||||
)
|
||||
@admin_role_required
|
||||
async def resync_failed_transactions(
|
||||
tenant_id: UUID = Path(...),
|
||||
days_back: int = Query(7, ge=1, le=90),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db=Depends(get_db)
|
||||
):
|
||||
"""Resync failed transactions from the specified time period (Admin/Owner only)"""
|
||||
try:
|
||||
# Get active POS configuration for tenant
|
||||
config_service = POSConfigurationService()
|
||||
configs = await config_service.get_configurations_by_tenant(
|
||||
tenant_id=tenant_id,
|
||||
is_active=True,
|
||||
skip=0,
|
||||
limit=1
|
||||
)
|
||||
|
||||
if not configs:
|
||||
raise HTTPException(status_code=404, detail="No active POS configuration found")
|
||||
|
||||
config = configs[0]
|
||||
|
||||
# Create resync job
|
||||
sync_service = POSSyncService(db)
|
||||
sync_log = await sync_service.create_sync_job(
|
||||
tenant_id=tenant_id,
|
||||
pos_config_id=config.id,
|
||||
pos_system=config.pos_system,
|
||||
sync_type="resync_failed",
|
||||
data_types=["transactions"]
|
||||
)
|
||||
|
||||
logger.info("Resync failed transactions requested",
|
||||
tenant_id=tenant_id,
|
||||
days_back=days_back,
|
||||
sync_id=str(sync_log.id),
|
||||
user_id=current_user.get("user_id"))
|
||||
|
||||
return {
|
||||
"message": "Resync job queued successfully",
|
||||
"job_id": str(sync_log.id),
|
||||
"scope": f"Failed transactions from last {days_back} days",
|
||||
"estimated_transactions": 0
|
||||
}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Failed to queue resync job", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to queue resync job: {str(e)}")
|
||||
|
||||
|
||||
@router.post(
|
||||
route_builder.build_operations_route("test-connection"),
|
||||
response_model=dict
|
||||
)
|
||||
@admin_role_required
|
||||
async def test_pos_connection(
|
||||
tenant_id: UUID = Path(...),
|
||||
config_id: UUID = Query(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db=Depends(get_db)
|
||||
):
|
||||
"""Test connection to POS system (Admin/Owner only)"""
|
||||
try:
|
||||
config_service = POSConfigurationService()
|
||||
|
||||
# Get the configuration to verify it exists
|
||||
configurations = await config_service.get_configurations_by_tenant(
|
||||
tenant_id=tenant_id,
|
||||
skip=0,
|
||||
limit=100
|
||||
)
|
||||
|
||||
config = next((c for c in configurations if str(c.id) == str(config_id)), None)
|
||||
|
||||
if not config:
|
||||
raise HTTPException(status_code=404, detail="Configuration not found")
|
||||
|
||||
# For demo purposes, we assume connection is successful if config exists
|
||||
# In production, this would actually test the POS API connection
|
||||
is_connected = config.is_connected and config.is_active
|
||||
|
||||
return {
|
||||
"success": is_connected,
|
||||
"status": "success" if is_connected else "failed",
|
||||
"message": f"Connection test {'successful' if is_connected else 'failed'} for {config.pos_system}",
|
||||
"tested_at": datetime.utcnow().isoformat(),
|
||||
"config_id": str(config_id),
|
||||
"pos_system": config.pos_system,
|
||||
"health_status": config.health_status
|
||||
}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Failed to test POS connection", error=str(e),
|
||||
tenant_id=tenant_id, config_id=config_id)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to test connection: {str(e)}")
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Webhook Operations
|
||||
# ============================================================================
|
||||
|
||||
@router.post(
|
||||
route_builder.build_webhook_route("{pos_system}"),
|
||||
response_model=dict
|
||||
)
|
||||
async def receive_webhook(
|
||||
request: Request,
|
||||
pos_system: str = Path(..., description="POS system name"),
|
||||
content_type: Optional[str] = Header(None),
|
||||
x_signature: Optional[str] = Header(None),
|
||||
x_webhook_signature: Optional[str] = Header(None),
|
||||
authorization: Optional[str] = Header(None),
|
||||
db=Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Receive webhooks from POS systems
|
||||
Supports Square, Toast, and Lightspeed webhook formats
|
||||
Includes signature verification, database logging, and duplicate detection
|
||||
"""
|
||||
webhook_service = POSWebhookService(db)
|
||||
start_time = datetime.utcnow()
|
||||
|
||||
try:
|
||||
# Validate POS system
|
||||
supported_systems = ["square", "toast", "lightspeed"]
|
||||
if pos_system.lower() not in supported_systems:
|
||||
raise HTTPException(status_code=400, detail=f"Unsupported POS system: {pos_system}")
|
||||
|
||||
# Get request details
|
||||
method = request.method
|
||||
url_path = str(request.url.path)
|
||||
query_params = dict(request.query_params)
|
||||
headers = dict(request.headers)
|
||||
|
||||
# Get client IP
|
||||
client_ip = None
|
||||
if hasattr(request, 'client') and request.client:
|
||||
client_ip = request.client.host
|
||||
|
||||
# Read payload
|
||||
try:
|
||||
body = await request.body()
|
||||
raw_payload = body.decode('utf-8') if body else ""
|
||||
payload_size = len(body) if body else 0
|
||||
|
||||
# Parse JSON if possible
|
||||
parsed_payload = None
|
||||
if raw_payload:
|
||||
try:
|
||||
parsed_payload = json.loads(raw_payload)
|
||||
except json.JSONDecodeError:
|
||||
logger.warning("Failed to parse webhook payload as JSON",
|
||||
pos_system=pos_system, payload_size=payload_size)
|
||||
except Exception as e:
|
||||
logger.error("Failed to read webhook payload", error=str(e))
|
||||
raise HTTPException(status_code=400, detail="Failed to read request payload")
|
||||
|
||||
# Determine signature from various header formats
|
||||
signature = x_signature or x_webhook_signature or authorization
|
||||
|
||||
# Parse webhook event details
|
||||
event_details = webhook_service.parse_webhook_event_details(pos_system, parsed_payload or {})
|
||||
webhook_type = event_details.get("webhook_type") or "unknown"
|
||||
event_id = event_details.get("event_id")
|
||||
transaction_id = event_details.get("transaction_id")
|
||||
order_id = event_details.get("order_id")
|
||||
|
||||
# Extract tenant_id from payload
|
||||
tenant_id = None
|
||||
if parsed_payload:
|
||||
tenant_id = await webhook_service.extract_tenant_id_from_payload(pos_system, parsed_payload)
|
||||
|
||||
# Check for duplicate webhook
|
||||
is_duplicate = False
|
||||
if event_id:
|
||||
is_duplicate, _ = await webhook_service.check_duplicate_webhook(
|
||||
pos_system, event_id, tenant_id
|
||||
)
|
||||
|
||||
# Verify webhook signature if tenant is identified
|
||||
is_signature_valid = None
|
||||
if signature and tenant_id:
|
||||
webhook_secret = await webhook_service.get_webhook_secret(pos_system, tenant_id)
|
||||
if webhook_secret:
|
||||
is_signature_valid = await webhook_service.verify_webhook_signature(
|
||||
pos_system, raw_payload, signature, webhook_secret
|
||||
)
|
||||
|
||||
if not is_signature_valid:
|
||||
logger.warning("Webhook signature verification failed",
|
||||
pos_system=pos_system,
|
||||
tenant_id=str(tenant_id))
|
||||
|
||||
# Log webhook receipt to database
|
||||
webhook_log = await webhook_service.log_webhook(
|
||||
pos_system=pos_system,
|
||||
webhook_type=webhook_type,
|
||||
method=method,
|
||||
url_path=url_path,
|
||||
query_params=query_params,
|
||||
headers=headers,
|
||||
raw_payload=raw_payload,
|
||||
payload_size=payload_size,
|
||||
content_type=content_type,
|
||||
signature=signature,
|
||||
is_signature_valid=is_signature_valid,
|
||||
source_ip=client_ip,
|
||||
event_id=event_id,
|
||||
tenant_id=tenant_id,
|
||||
transaction_id=transaction_id,
|
||||
order_id=order_id
|
||||
)
|
||||
|
||||
# Mark as duplicate if detected
|
||||
if is_duplicate:
|
||||
await webhook_service.update_webhook_status(
|
||||
webhook_log.id,
|
||||
status="duplicate",
|
||||
error_message="Duplicate event already processed"
|
||||
)
|
||||
logger.info("Duplicate webhook ignored", event_id=event_id)
|
||||
return _get_webhook_response(pos_system, success=True)
|
||||
|
||||
# Queue for async processing via RabbitMQ
|
||||
try:
|
||||
from shared.messaging import get_rabbitmq_client
|
||||
import uuid as uuid_module
|
||||
|
||||
rabbitmq_client = get_rabbitmq_client()
|
||||
if rabbitmq_client:
|
||||
# Publish POS transaction event for async processing
|
||||
event_payload = {
|
||||
"event_id": str(uuid_module.uuid4()),
|
||||
"event_type": f"pos.{webhook_type}",
|
||||
"timestamp": datetime.utcnow().isoformat(),
|
||||
"tenant_id": str(tenant_id) if tenant_id else None,
|
||||
"data": {
|
||||
"webhook_log_id": str(webhook_log.id),
|
||||
"pos_system": pos_system,
|
||||
"webhook_type": webhook_type,
|
||||
"payload": webhook_data,
|
||||
"event_id": event_id
|
||||
}
|
||||
}
|
||||
|
||||
await rabbitmq_client.publish_event(
|
||||
exchange_name="pos.events",
|
||||
routing_key=f"pos.{webhook_type}",
|
||||
event_data=event_payload
|
||||
)
|
||||
|
||||
logger.info("POS transaction queued for async processing",
|
||||
event_id=event_payload["event_id"],
|
||||
webhook_log_id=str(webhook_log.id))
|
||||
|
||||
# Update status to queued
|
||||
processing_duration_ms = int((datetime.utcnow() - start_time).total_seconds() * 1000)
|
||||
await webhook_service.update_webhook_status(
|
||||
webhook_log.id,
|
||||
status="queued",
|
||||
processing_duration_ms=processing_duration_ms
|
||||
)
|
||||
else:
|
||||
logger.warning("RabbitMQ client not available, marking as received only")
|
||||
processing_duration_ms = int((datetime.utcnow() - start_time).total_seconds() * 1000)
|
||||
await webhook_service.update_webhook_status(
|
||||
webhook_log.id,
|
||||
status="received",
|
||||
processing_duration_ms=processing_duration_ms
|
||||
)
|
||||
|
||||
except Exception as queue_error:
|
||||
logger.error("Failed to queue POS transaction for async processing",
|
||||
error=str(queue_error),
|
||||
webhook_log_id=str(webhook_log.id))
|
||||
# Mark as received even if queuing fails
|
||||
processing_duration_ms = int((datetime.utcnow() - start_time).total_seconds() * 1000)
|
||||
await webhook_service.update_webhook_status(
|
||||
webhook_log.id,
|
||||
status="received",
|
||||
processing_duration_ms=processing_duration_ms
|
||||
)
|
||||
|
||||
logger.info("Webhook processed and queued successfully",
|
||||
pos_system=pos_system,
|
||||
webhook_type=webhook_type,
|
||||
event_id=event_id,
|
||||
tenant_id=str(tenant_id) if tenant_id else None,
|
||||
webhook_log_id=str(webhook_log.id))
|
||||
|
||||
# Return appropriate response based on POS system requirements
|
||||
return _get_webhook_response(pos_system, success=True)
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Webhook processing failed",
|
||||
error=str(e),
|
||||
pos_system=pos_system,
|
||||
exc_info=True)
|
||||
|
||||
# Return 500 to trigger POS system retry
|
||||
raise HTTPException(status_code=500, detail="Webhook processing failed")
|
||||
|
||||
|
||||
def _get_webhook_response(pos_system: str, success: bool = True) -> Dict[str, Any]:
|
||||
"""Get POS-specific webhook response format"""
|
||||
if pos_system.lower() == "square":
|
||||
return {"status": "success" if success else "error"}
|
||||
elif pos_system.lower() == "toast":
|
||||
return {"success": success}
|
||||
elif pos_system.lower() == "lightspeed":
|
||||
return {"received": success}
|
||||
else:
|
||||
return {"status": "received" if success else "error"}
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_webhook_route("{pos_system}/status"),
|
||||
response_model=dict
|
||||
)
|
||||
async def get_webhook_status(pos_system: str = Path(..., description="POS system name")):
|
||||
"""Get webhook endpoint status for a POS system"""
|
||||
try:
|
||||
supported_systems = ["square", "toast", "lightspeed"]
|
||||
if pos_system.lower() not in supported_systems:
|
||||
raise HTTPException(status_code=400, detail=f"Unsupported POS system: {pos_system}")
|
||||
|
||||
return {
|
||||
"pos_system": pos_system,
|
||||
"status": "active",
|
||||
"endpoint": f"/api/v1/webhooks/{pos_system}",
|
||||
"supported_events": _get_supported_events(pos_system),
|
||||
"last_received": None,
|
||||
"total_received": 0
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error("Failed to get webhook status", error=str(e), pos_system=pos_system)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get webhook status: {str(e)}")
|
||||
|
||||
|
||||
def _get_supported_events(pos_system: str) -> Dict[str, Any]:
|
||||
"""Get supported webhook events for each POS system"""
|
||||
events = {
|
||||
"square": [
|
||||
"payment.created",
|
||||
"payment.updated",
|
||||
"order.created",
|
||||
"order.updated",
|
||||
"order.fulfilled",
|
||||
"inventory.count.updated"
|
||||
],
|
||||
"toast": [
|
||||
"OrderCreated",
|
||||
"OrderUpdated",
|
||||
"OrderPaid",
|
||||
"OrderCanceled",
|
||||
"OrderVoided"
|
||||
],
|
||||
"lightspeed": [
|
||||
"order.created",
|
||||
"order.updated",
|
||||
"order.paid",
|
||||
"sale.created",
|
||||
"sale.updated"
|
||||
]
|
||||
}
|
||||
|
||||
return {
|
||||
"events": events.get(pos_system.lower(), []),
|
||||
"format": "JSON",
|
||||
"authentication": "signature_verification"
|
||||
}
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Tenant Data Deletion Operations (Internal Service Only)
|
||||
# ============================================================================
|
||||
|
||||
@router.delete(
|
||||
route_builder.build_base_route("tenant/{tenant_id}", include_tenant_prefix=False),
|
||||
response_model=dict
|
||||
)
|
||||
@service_only_access
|
||||
async def delete_tenant_data(
|
||||
tenant_id: str = Path(..., description="Tenant ID to delete data for"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db=Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Delete all POS data for a tenant (Internal service only)
|
||||
|
||||
This endpoint is called by the orchestrator during tenant deletion.
|
||||
It permanently deletes all POS-related data including:
|
||||
- POS configurations
|
||||
- POS transactions and items
|
||||
- Webhook logs
|
||||
- Sync logs
|
||||
- Audit logs
|
||||
|
||||
**WARNING**: This operation is irreversible!
|
||||
|
||||
Returns:
|
||||
Deletion summary with counts of deleted records
|
||||
"""
|
||||
try:
|
||||
logger.info("pos.tenant_deletion.api_called", tenant_id=tenant_id)
|
||||
|
||||
deletion_service = POSTenantDeletionService(db)
|
||||
result = await deletion_service.safe_delete_tenant_data(tenant_id)
|
||||
|
||||
if not result.success:
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Tenant data deletion failed: {', '.join(result.errors)}"
|
||||
)
|
||||
|
||||
return {
|
||||
"message": "Tenant data deletion completed successfully",
|
||||
"summary": result.to_dict()
|
||||
}
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("pos.tenant_deletion.api_error",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e),
|
||||
exc_info=True)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to delete tenant data: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_base_route("tenant/{tenant_id}/deletion-preview", include_tenant_prefix=False),
|
||||
response_model=dict
|
||||
)
|
||||
@service_only_access
|
||||
async def preview_tenant_data_deletion(
|
||||
tenant_id: str = Path(..., description="Tenant ID to preview deletion for"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db=Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Preview what data would be deleted for a tenant (dry-run)
|
||||
|
||||
This endpoint shows counts of all data that would be deleted
|
||||
without actually deleting anything. Useful for:
|
||||
- Confirming deletion scope before execution
|
||||
- Auditing and compliance
|
||||
- Troubleshooting
|
||||
|
||||
Returns:
|
||||
Dictionary with entity names and their counts
|
||||
"""
|
||||
try:
|
||||
logger.info("pos.tenant_deletion.preview_called", tenant_id=tenant_id)
|
||||
|
||||
deletion_service = POSTenantDeletionService(db)
|
||||
preview = await deletion_service.get_tenant_data_preview(tenant_id)
|
||||
|
||||
total_records = sum(preview.values())
|
||||
|
||||
return {
|
||||
"tenant_id": tenant_id,
|
||||
"service": "pos",
|
||||
"preview": preview,
|
||||
"total_records": total_records,
|
||||
"warning": "These records will be permanently deleted and cannot be recovered"
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("pos.tenant_deletion.preview_error",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e),
|
||||
exc_info=True)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to preview tenant data deletion: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
# ================================================================
|
||||
# POS TO SALES SYNC ENDPOINTS
|
||||
# ================================================================
|
||||
|
||||
@router.post(
|
||||
"/tenants/{tenant_id}/pos/transactions/{transaction_id}/sync-to-sales",
|
||||
summary="Sync single transaction to sales",
|
||||
description="Manually sync a specific POS transaction to the sales service"
|
||||
)
|
||||
async def sync_transaction_to_sales(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
transaction_id: UUID = Path(..., description="Transaction ID to sync"),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep)
|
||||
):
|
||||
"""
|
||||
Sync a single POS transaction to the sales service
|
||||
|
||||
This endpoint:
|
||||
- Creates sales records for each item in the transaction
|
||||
- Automatically decreases inventory stock
|
||||
- Updates sync status flags
|
||||
- Returns detailed sync results
|
||||
"""
|
||||
try:
|
||||
from app.services.pos_transaction_service import POSTransactionService
|
||||
|
||||
transaction_service = POSTransactionService()
|
||||
|
||||
result = await transaction_service.sync_transaction_to_sales(
|
||||
transaction_id=transaction_id,
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
|
||||
if result.get("success"):
|
||||
logger.info("Transaction synced to sales via API",
|
||||
transaction_id=transaction_id,
|
||||
tenant_id=tenant_id,
|
||||
user_id=current_user.get("user_id"))
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"message": "Transaction synced successfully",
|
||||
**result
|
||||
}
|
||||
else:
|
||||
logger.warning("Transaction sync failed via API",
|
||||
transaction_id=transaction_id,
|
||||
tenant_id=tenant_id,
|
||||
error=result.get("error"))
|
||||
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=result.get("error", "Failed to sync transaction")
|
||||
)
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Failed to sync transaction to sales",
|
||||
error=str(e),
|
||||
transaction_id=transaction_id,
|
||||
tenant_id=tenant_id,
|
||||
exc_info=True)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to sync transaction: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.post(
|
||||
"/tenants/{tenant_id}/pos/transactions/sync-all-to-sales",
|
||||
summary="Batch sync unsynced transactions",
|
||||
description="Sync all unsynced POS transactions to the sales service"
|
||||
)
|
||||
async def sync_all_transactions_to_sales(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
limit: int = Query(50, ge=1, le=200, description="Max transactions to sync in one batch"),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep)
|
||||
):
|
||||
"""
|
||||
Batch sync all unsynced POS transactions to the sales service
|
||||
|
||||
This endpoint:
|
||||
- Finds all unsynced completed transactions
|
||||
- Syncs each one to the sales service
|
||||
- Creates sales records and decreases inventory
|
||||
- Returns summary with success/failure counts
|
||||
|
||||
Use this to:
|
||||
- Manually trigger sync after POS webhooks are received
|
||||
- Recover from sync failures
|
||||
- Initial migration of historical POS data
|
||||
"""
|
||||
try:
|
||||
from app.services.pos_transaction_service import POSTransactionService
|
||||
|
||||
transaction_service = POSTransactionService()
|
||||
|
||||
result = await transaction_service.sync_unsynced_transactions(
|
||||
tenant_id=tenant_id,
|
||||
limit=limit
|
||||
)
|
||||
|
||||
logger.info("Batch sync completed via API",
|
||||
tenant_id=tenant_id,
|
||||
total=result.get("total_transactions"),
|
||||
synced=result.get("synced"),
|
||||
failed=result.get("failed"),
|
||||
user_id=current_user.get("user_id"))
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"message": f"Synced {result.get('synced')} of {result.get('total_transactions')} transactions",
|
||||
**result
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to batch sync transactions to sales",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id,
|
||||
exc_info=True)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to batch sync transactions: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/tenants/{tenant_id}/pos/transactions/sync-status",
|
||||
summary="Get sync status summary",
|
||||
description="Get summary of synced vs unsynced transactions"
|
||||
)
|
||||
async def get_sync_status(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep)
|
||||
):
|
||||
"""
|
||||
Get sync status summary for POS transactions
|
||||
|
||||
Returns counts of:
|
||||
- Total completed transactions
|
||||
- Synced transactions
|
||||
- Unsynced transactions
|
||||
- Failed sync attempts
|
||||
"""
|
||||
try:
|
||||
from app.services.pos_transaction_service import POSTransactionService
|
||||
|
||||
transaction_service = POSTransactionService()
|
||||
|
||||
# Get counts for different sync states
|
||||
total_completed = await transaction_service.count_transactions_by_tenant(
|
||||
tenant_id=tenant_id,
|
||||
status="completed"
|
||||
)
|
||||
|
||||
synced = await transaction_service.count_transactions_by_tenant(
|
||||
tenant_id=tenant_id,
|
||||
status="completed",
|
||||
is_synced=True
|
||||
)
|
||||
|
||||
unsynced = await transaction_service.count_transactions_by_tenant(
|
||||
tenant_id=tenant_id,
|
||||
status="completed",
|
||||
is_synced=False
|
||||
)
|
||||
|
||||
return {
|
||||
"total_completed_transactions": total_completed,
|
||||
"synced_to_sales": synced,
|
||||
"pending_sync": unsynced,
|
||||
"sync_rate": round((synced / total_completed * 100) if total_completed > 0 else 0, 2)
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get sync status",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id,
|
||||
exc_info=True)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to get sync status: {str(e)}"
|
||||
)
|
||||
148
services/pos/app/api/transactions.py
Normal file
148
services/pos/app/api/transactions.py
Normal file
@@ -0,0 +1,148 @@
|
||||
"""
|
||||
POS Transactions API Endpoints
|
||||
ATOMIC layer - Basic CRUD operations for POS transactions
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Path, Query
|
||||
from typing import Optional
|
||||
from uuid import UUID
|
||||
from datetime import datetime
|
||||
from decimal import Decimal
|
||||
import structlog
|
||||
|
||||
from app.core.database import get_db
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from shared.auth.access_control import require_user_role
|
||||
from shared.routing import RouteBuilder
|
||||
from app.services.pos_transaction_service import POSTransactionService
|
||||
from app.schemas.pos_transaction import (
|
||||
POSTransactionResponse,
|
||||
POSTransactionListResponse,
|
||||
POSTransactionDashboardSummary
|
||||
)
|
||||
|
||||
router = APIRouter()
|
||||
logger = structlog.get_logger()
|
||||
route_builder = RouteBuilder('pos')
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_base_route("transactions"),
|
||||
response_model=POSTransactionListResponse
|
||||
)
|
||||
@require_user_role(['viewer', 'member', 'admin', 'owner'])
|
||||
async def list_pos_transactions(
|
||||
tenant_id: UUID = Path(...),
|
||||
pos_system: Optional[str] = Query(None),
|
||||
start_date: Optional[datetime] = Query(None),
|
||||
end_date: Optional[datetime] = Query(None),
|
||||
status: Optional[str] = Query(None),
|
||||
is_synced: Optional[bool] = Query(None),
|
||||
limit: int = Query(50, ge=1, le=200),
|
||||
offset: int = Query(0, ge=0),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db=Depends(get_db)
|
||||
):
|
||||
"""List POS transactions for a tenant"""
|
||||
try:
|
||||
service = POSTransactionService()
|
||||
|
||||
transactions = await service.get_transactions_by_tenant(
|
||||
tenant_id=tenant_id,
|
||||
pos_system=pos_system,
|
||||
start_date=start_date,
|
||||
end_date=end_date,
|
||||
status=status,
|
||||
is_synced=is_synced,
|
||||
skip=offset,
|
||||
limit=limit
|
||||
)
|
||||
|
||||
total = await service.count_transactions_by_tenant(
|
||||
tenant_id=tenant_id,
|
||||
pos_system=pos_system,
|
||||
start_date=start_date,
|
||||
end_date=end_date,
|
||||
status=status,
|
||||
is_synced=is_synced
|
||||
)
|
||||
|
||||
# Get sync metrics for summary
|
||||
sync_metrics = await service.get_sync_metrics(tenant_id)
|
||||
|
||||
# Calculate summary
|
||||
total_amount = sum(float(t.total_amount) for t in transactions if t.status == "completed")
|
||||
|
||||
has_more = (offset + limit) < total
|
||||
|
||||
return POSTransactionListResponse(
|
||||
transactions=transactions,
|
||||
total=total,
|
||||
has_more=has_more,
|
||||
summary={
|
||||
"total_amount": total_amount,
|
||||
"transaction_count": len(transactions),
|
||||
"sync_status": sync_metrics["sync_status"]
|
||||
}
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error("Failed to list POS transactions", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to list transactions: {str(e)}")
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_resource_detail_route("transactions", "transaction_id"),
|
||||
response_model=POSTransactionResponse
|
||||
)
|
||||
@require_user_role(['viewer', 'member', 'admin', 'owner'])
|
||||
async def get_pos_transaction(
|
||||
tenant_id: UUID = Path(...),
|
||||
transaction_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db=Depends(get_db)
|
||||
):
|
||||
"""Get a specific POS transaction"""
|
||||
try:
|
||||
service = POSTransactionService()
|
||||
|
||||
transaction = await service.get_transaction_with_items(
|
||||
transaction_id=transaction_id,
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
|
||||
if not transaction:
|
||||
raise HTTPException(status_code=404, detail="Transaction not found")
|
||||
|
||||
return transaction
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Failed to get POS transaction", error=str(e),
|
||||
tenant_id=tenant_id, transaction_id=transaction_id)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get transaction: {str(e)}")
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_operations_route("transactions-dashboard"),
|
||||
response_model=POSTransactionDashboardSummary
|
||||
)
|
||||
@require_user_role(['viewer', 'member', 'admin', 'owner'])
|
||||
async def get_transactions_dashboard(
|
||||
tenant_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db=Depends(get_db)
|
||||
):
|
||||
"""Get dashboard summary for POS transactions"""
|
||||
try:
|
||||
service = POSTransactionService()
|
||||
|
||||
summary = await service.get_dashboard_summary(tenant_id)
|
||||
|
||||
logger.info("Transactions dashboard retrieved",
|
||||
tenant_id=str(tenant_id),
|
||||
total_today=summary.total_transactions_today)
|
||||
|
||||
return summary
|
||||
except Exception as e:
|
||||
logger.error("Failed to get transactions dashboard", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get dashboard: {str(e)}")
|
||||
583
services/pos/app/consumers/pos_event_consumer.py
Normal file
583
services/pos/app/consumers/pos_event_consumer.py
Normal file
@@ -0,0 +1,583 @@
|
||||
"""
|
||||
POS Event Consumer
|
||||
Processes POS webhook events from RabbitMQ queue
|
||||
Handles sales transactions, refunds, and inventory updates from various POS systems
|
||||
"""
|
||||
import json
|
||||
import structlog
|
||||
from typing import Dict, Any, Optional
|
||||
from datetime import datetime
|
||||
from decimal import Decimal
|
||||
|
||||
from shared.messaging import RabbitMQClient
|
||||
from app.services.webhook_service import WebhookService
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class POSEventConsumer:
|
||||
"""
|
||||
Consumes POS webhook events from RabbitMQ and processes them
|
||||
Supports multiple POS systems: Square, Shopify, Toast, etc.
|
||||
"""
|
||||
|
||||
def __init__(self, db_session: AsyncSession):
|
||||
self.db_session = db_session
|
||||
self.webhook_service = WebhookService()
|
||||
|
||||
async def consume_pos_events(
|
||||
self,
|
||||
rabbitmq_client: RabbitMQClient
|
||||
):
|
||||
"""
|
||||
Start consuming POS events from RabbitMQ
|
||||
"""
|
||||
async def process_message(message):
|
||||
"""Process a single POS event message"""
|
||||
try:
|
||||
async with message.process():
|
||||
# Parse event data
|
||||
event_data = json.loads(message.body.decode())
|
||||
logger.info(
|
||||
"Received POS event",
|
||||
event_id=event_data.get('event_id'),
|
||||
event_type=event_data.get('event_type'),
|
||||
pos_system=event_data.get('data', {}).get('pos_system')
|
||||
)
|
||||
|
||||
# Process the event
|
||||
await self.process_pos_event(event_data)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error processing POS event",
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
# Start consuming events
|
||||
await rabbitmq_client.consume_events(
|
||||
exchange_name="pos.events",
|
||||
queue_name="pos.processing.queue",
|
||||
routing_key="pos.*",
|
||||
callback=process_message
|
||||
)
|
||||
|
||||
logger.info("Started consuming POS events")
|
||||
|
||||
async def process_pos_event(self, event_data: Dict[str, Any]) -> bool:
|
||||
"""
|
||||
Process a POS event based on type
|
||||
|
||||
Args:
|
||||
event_data: Full event payload from RabbitMQ
|
||||
|
||||
Returns:
|
||||
bool: True if processed successfully
|
||||
"""
|
||||
try:
|
||||
data = event_data.get('data', {})
|
||||
webhook_log_id = data.get('webhook_log_id')
|
||||
pos_system = data.get('pos_system', 'unknown')
|
||||
webhook_type = data.get('webhook_type')
|
||||
payload = data.get('payload', {})
|
||||
tenant_id = event_data.get('tenant_id')
|
||||
|
||||
if not webhook_log_id:
|
||||
logger.warning("POS event missing webhook_log_id", event_data=event_data)
|
||||
return False
|
||||
|
||||
# Update webhook log status to processing
|
||||
await self.webhook_service.update_webhook_status(
|
||||
webhook_log_id,
|
||||
status="processing",
|
||||
notes="Event consumer processing"
|
||||
)
|
||||
|
||||
# Route to appropriate handler based on webhook type
|
||||
success = False
|
||||
if webhook_type in ['sale.completed', 'transaction.completed', 'order.completed']:
|
||||
success = await self._handle_sale_completed(tenant_id, pos_system, payload)
|
||||
elif webhook_type in ['sale.refunded', 'transaction.refunded', 'order.refunded']:
|
||||
success = await self._handle_sale_refunded(tenant_id, pos_system, payload)
|
||||
elif webhook_type in ['inventory.updated', 'stock.updated']:
|
||||
success = await self._handle_inventory_updated(tenant_id, pos_system, payload)
|
||||
else:
|
||||
logger.warning("Unknown POS webhook type", webhook_type=webhook_type)
|
||||
success = True # Mark as processed to avoid retry
|
||||
|
||||
# Update webhook log with final status
|
||||
if success:
|
||||
await self.webhook_service.update_webhook_status(
|
||||
webhook_log_id,
|
||||
status="completed",
|
||||
notes="Successfully processed"
|
||||
)
|
||||
logger.info(
|
||||
"POS event processed successfully",
|
||||
webhook_log_id=webhook_log_id,
|
||||
webhook_type=webhook_type
|
||||
)
|
||||
else:
|
||||
await self.webhook_service.update_webhook_status(
|
||||
webhook_log_id,
|
||||
status="failed",
|
||||
notes="Processing failed"
|
||||
)
|
||||
logger.error(
|
||||
"POS event processing failed",
|
||||
webhook_log_id=webhook_log_id,
|
||||
webhook_type=webhook_type
|
||||
)
|
||||
|
||||
return success
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error in process_pos_event",
|
||||
error=str(e),
|
||||
event_id=event_data.get('event_id'),
|
||||
exc_info=True
|
||||
)
|
||||
return False
|
||||
|
||||
async def _handle_sale_completed(
|
||||
self,
|
||||
tenant_id: str,
|
||||
pos_system: str,
|
||||
payload: Dict[str, Any]
|
||||
) -> bool:
|
||||
"""
|
||||
Handle completed sale transaction
|
||||
|
||||
Updates:
|
||||
- Inventory quantities (decrease stock)
|
||||
- Sales analytics data
|
||||
- Revenue tracking
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
pos_system: POS system name (square, shopify, toast, etc.)
|
||||
payload: Sale data from POS system
|
||||
|
||||
Returns:
|
||||
bool: True if handled successfully
|
||||
"""
|
||||
try:
|
||||
# Extract transaction data based on POS system format
|
||||
transaction_data = self._parse_sale_data(pos_system, payload)
|
||||
|
||||
if not transaction_data:
|
||||
logger.warning("Failed to parse sale data", pos_system=pos_system)
|
||||
return False
|
||||
|
||||
# Update inventory via inventory service client
|
||||
from shared.clients.inventory_client import InventoryServiceClient
|
||||
from shared.config.base import get_settings
|
||||
|
||||
config = get_settings()
|
||||
inventory_client = InventoryServiceClient(config, "pos")
|
||||
|
||||
for item in transaction_data.get('items', []):
|
||||
product_id = item.get('product_id')
|
||||
quantity = item.get('quantity', 0)
|
||||
unit_of_measure = item.get('unit_of_measure', 'units')
|
||||
|
||||
if not product_id or quantity <= 0:
|
||||
continue
|
||||
|
||||
# Decrease inventory stock
|
||||
try:
|
||||
await inventory_client.adjust_stock(
|
||||
tenant_id=tenant_id,
|
||||
product_id=product_id,
|
||||
quantity=-quantity, # Negative for sale
|
||||
unit_of_measure=unit_of_measure,
|
||||
reason=f"POS sale - {pos_system}",
|
||||
reference_id=transaction_data.get('transaction_id')
|
||||
)
|
||||
logger.info(
|
||||
"Inventory updated for sale",
|
||||
product_id=product_id,
|
||||
quantity=quantity,
|
||||
pos_system=pos_system
|
||||
)
|
||||
except Exception as inv_error:
|
||||
logger.error(
|
||||
"Failed to update inventory",
|
||||
product_id=product_id,
|
||||
error=str(inv_error)
|
||||
)
|
||||
# Continue processing other items even if one fails
|
||||
|
||||
# Publish sales data to sales service via RabbitMQ
|
||||
from shared.messaging import get_rabbitmq_client
|
||||
import uuid
|
||||
|
||||
rabbitmq_client = get_rabbitmq_client()
|
||||
if rabbitmq_client:
|
||||
sales_event = {
|
||||
"event_id": str(uuid.uuid4()),
|
||||
"event_type": "sales.transaction.completed",
|
||||
"timestamp": datetime.utcnow().isoformat(),
|
||||
"tenant_id": tenant_id,
|
||||
"data": {
|
||||
"transaction_id": transaction_data.get('transaction_id'),
|
||||
"pos_system": pos_system,
|
||||
"total_amount": transaction_data.get('total_amount', 0),
|
||||
"items": transaction_data.get('items', []),
|
||||
"payment_method": transaction_data.get('payment_method'),
|
||||
"transaction_date": transaction_data.get('transaction_date'),
|
||||
"customer_id": transaction_data.get('customer_id')
|
||||
}
|
||||
}
|
||||
|
||||
await rabbitmq_client.publish_event(
|
||||
exchange_name="sales.events",
|
||||
routing_key="sales.transaction.completed",
|
||||
event_data=sales_event
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Published sales event",
|
||||
event_id=sales_event["event_id"],
|
||||
transaction_id=transaction_data.get('transaction_id')
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error handling sale completed",
|
||||
error=str(e),
|
||||
pos_system=pos_system,
|
||||
exc_info=True
|
||||
)
|
||||
return False
|
||||
|
||||
async def _handle_sale_refunded(
|
||||
self,
|
||||
tenant_id: str,
|
||||
pos_system: str,
|
||||
payload: Dict[str, Any]
|
||||
) -> bool:
|
||||
"""
|
||||
Handle refunded sale transaction
|
||||
|
||||
Updates:
|
||||
- Inventory quantities (increase stock)
|
||||
- Sales analytics (negative transaction)
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
pos_system: POS system name
|
||||
payload: Refund data from POS system
|
||||
|
||||
Returns:
|
||||
bool: True if handled successfully
|
||||
"""
|
||||
try:
|
||||
# Extract refund data based on POS system format
|
||||
refund_data = self._parse_refund_data(pos_system, payload)
|
||||
|
||||
if not refund_data:
|
||||
logger.warning("Failed to parse refund data", pos_system=pos_system)
|
||||
return False
|
||||
|
||||
# Update inventory via inventory service client
|
||||
from shared.clients.inventory_client import InventoryServiceClient
|
||||
from shared.config.base import get_settings
|
||||
|
||||
config = get_settings()
|
||||
inventory_client = InventoryServiceClient(config, "pos")
|
||||
|
||||
for item in refund_data.get('items', []):
|
||||
product_id = item.get('product_id')
|
||||
quantity = item.get('quantity', 0)
|
||||
unit_of_measure = item.get('unit_of_measure', 'units')
|
||||
|
||||
if not product_id or quantity <= 0:
|
||||
continue
|
||||
|
||||
# Increase inventory stock (return to stock)
|
||||
try:
|
||||
await inventory_client.adjust_stock(
|
||||
tenant_id=tenant_id,
|
||||
product_id=product_id,
|
||||
quantity=quantity, # Positive for refund
|
||||
unit_of_measure=unit_of_measure,
|
||||
reason=f"POS refund - {pos_system}",
|
||||
reference_id=refund_data.get('refund_id')
|
||||
)
|
||||
logger.info(
|
||||
"Inventory updated for refund",
|
||||
product_id=product_id,
|
||||
quantity=quantity,
|
||||
pos_system=pos_system
|
||||
)
|
||||
except Exception as inv_error:
|
||||
logger.error(
|
||||
"Failed to update inventory for refund",
|
||||
product_id=product_id,
|
||||
error=str(inv_error)
|
||||
)
|
||||
|
||||
# Publish refund event to sales service
|
||||
from shared.messaging import get_rabbitmq_client
|
||||
import uuid
|
||||
|
||||
rabbitmq_client = get_rabbitmq_client()
|
||||
if rabbitmq_client:
|
||||
refund_event = {
|
||||
"event_id": str(uuid.uuid4()),
|
||||
"event_type": "sales.transaction.refunded",
|
||||
"timestamp": datetime.utcnow().isoformat(),
|
||||
"tenant_id": tenant_id,
|
||||
"data": {
|
||||
"refund_id": refund_data.get('refund_id'),
|
||||
"original_transaction_id": refund_data.get('original_transaction_id'),
|
||||
"pos_system": pos_system,
|
||||
"refund_amount": refund_data.get('refund_amount', 0),
|
||||
"items": refund_data.get('items', []),
|
||||
"refund_date": refund_data.get('refund_date')
|
||||
}
|
||||
}
|
||||
|
||||
await rabbitmq_client.publish_event(
|
||||
exchange_name="sales.events",
|
||||
routing_key="sales.transaction.refunded",
|
||||
event_data=refund_event
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Published refund event",
|
||||
event_id=refund_event["event_id"],
|
||||
refund_id=refund_data.get('refund_id')
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error handling sale refunded",
|
||||
error=str(e),
|
||||
pos_system=pos_system,
|
||||
exc_info=True
|
||||
)
|
||||
return False
|
||||
|
||||
async def _handle_inventory_updated(
|
||||
self,
|
||||
tenant_id: str,
|
||||
pos_system: str,
|
||||
payload: Dict[str, Any]
|
||||
) -> bool:
|
||||
"""
|
||||
Handle inventory update from POS system
|
||||
|
||||
Syncs inventory levels from POS to our system
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
pos_system: POS system name
|
||||
payload: Inventory data from POS system
|
||||
|
||||
Returns:
|
||||
bool: True if handled successfully
|
||||
"""
|
||||
try:
|
||||
# Extract inventory data
|
||||
inventory_data = self._parse_inventory_data(pos_system, payload)
|
||||
|
||||
if not inventory_data:
|
||||
logger.warning("Failed to parse inventory data", pos_system=pos_system)
|
||||
return False
|
||||
|
||||
# Update inventory via inventory service client
|
||||
from shared.clients.inventory_client import InventoryServiceClient
|
||||
from shared.config.base import get_settings
|
||||
|
||||
config = get_settings()
|
||||
inventory_client = InventoryServiceClient(config, "pos")
|
||||
|
||||
for item in inventory_data.get('items', []):
|
||||
product_id = item.get('product_id')
|
||||
new_quantity = item.get('quantity', 0)
|
||||
|
||||
if not product_id:
|
||||
continue
|
||||
|
||||
# Sync inventory level
|
||||
try:
|
||||
await inventory_client.sync_stock_level(
|
||||
tenant_id=tenant_id,
|
||||
product_id=product_id,
|
||||
quantity=new_quantity,
|
||||
source=f"POS sync - {pos_system}"
|
||||
)
|
||||
logger.info(
|
||||
"Inventory synced from POS",
|
||||
product_id=product_id,
|
||||
new_quantity=new_quantity,
|
||||
pos_system=pos_system
|
||||
)
|
||||
except Exception as inv_error:
|
||||
logger.error(
|
||||
"Failed to sync inventory",
|
||||
product_id=product_id,
|
||||
error=str(inv_error)
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error handling inventory updated",
|
||||
error=str(e),
|
||||
pos_system=pos_system,
|
||||
exc_info=True
|
||||
)
|
||||
return False
|
||||
|
||||
def _parse_sale_data(self, pos_system: str, payload: Dict[str, Any]) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Parse sale data from various POS system formats
|
||||
|
||||
Args:
|
||||
pos_system: POS system name
|
||||
payload: Raw payload from POS webhook
|
||||
|
||||
Returns:
|
||||
Normalized transaction data
|
||||
"""
|
||||
try:
|
||||
if pos_system.lower() == 'square':
|
||||
return self._parse_square_sale(payload)
|
||||
elif pos_system.lower() == 'shopify':
|
||||
return self._parse_shopify_sale(payload)
|
||||
elif pos_system.lower() == 'toast':
|
||||
return self._parse_toast_sale(payload)
|
||||
else:
|
||||
# Generic parser for custom POS systems
|
||||
return self._parse_generic_sale(payload)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error parsing sale data", pos_system=pos_system, error=str(e))
|
||||
return None
|
||||
|
||||
def _parse_square_sale(self, payload: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Parse Square POS sale format"""
|
||||
payment = payload.get('payment', {})
|
||||
order = payment.get('order', {})
|
||||
line_items = order.get('line_items', [])
|
||||
|
||||
items = []
|
||||
for item in line_items:
|
||||
items.append({
|
||||
'product_id': item.get('catalog_object_id'),
|
||||
'product_name': item.get('name'),
|
||||
'quantity': float(item.get('quantity', 1)),
|
||||
'unit_price': float(item.get('base_price_money', {}).get('amount', 0)) / 100,
|
||||
'unit_of_measure': 'units'
|
||||
})
|
||||
|
||||
return {
|
||||
'transaction_id': payment.get('id'),
|
||||
'total_amount': float(payment.get('amount_money', {}).get('amount', 0)) / 100,
|
||||
'items': items,
|
||||
'payment_method': payment.get('card_details', {}).get('card', {}).get('card_brand', 'unknown'),
|
||||
'transaction_date': payment.get('created_at'),
|
||||
'customer_id': payment.get('customer_id')
|
||||
}
|
||||
|
||||
def _parse_shopify_sale(self, payload: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Parse Shopify POS sale format"""
|
||||
line_items = payload.get('line_items', [])
|
||||
|
||||
items = []
|
||||
for item in line_items:
|
||||
items.append({
|
||||
'product_id': str(item.get('product_id')),
|
||||
'product_name': item.get('title'),
|
||||
'quantity': float(item.get('quantity', 1)),
|
||||
'unit_price': float(item.get('price', 0)),
|
||||
'unit_of_measure': 'units'
|
||||
})
|
||||
|
||||
return {
|
||||
'transaction_id': str(payload.get('id')),
|
||||
'total_amount': float(payload.get('total_price', 0)),
|
||||
'items': items,
|
||||
'payment_method': payload.get('payment_gateway_names', ['unknown'])[0],
|
||||
'transaction_date': payload.get('created_at'),
|
||||
'customer_id': str(payload.get('customer', {}).get('id')) if payload.get('customer') else None
|
||||
}
|
||||
|
||||
def _parse_toast_sale(self, payload: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Parse Toast POS sale format"""
|
||||
selections = payload.get('selections', [])
|
||||
|
||||
items = []
|
||||
for item in selections:
|
||||
items.append({
|
||||
'product_id': item.get('guid'),
|
||||
'product_name': item.get('displayName'),
|
||||
'quantity': float(item.get('quantity', 1)),
|
||||
'unit_price': float(item.get('preDiscountPrice', 0)),
|
||||
'unit_of_measure': 'units'
|
||||
})
|
||||
|
||||
return {
|
||||
'transaction_id': payload.get('guid'),
|
||||
'total_amount': float(payload.get('totalAmount', 0)),
|
||||
'items': items,
|
||||
'payment_method': payload.get('payments', [{}])[0].get('type', 'unknown'),
|
||||
'transaction_date': payload.get('closedDate'),
|
||||
'customer_id': payload.get('customer', {}).get('guid')
|
||||
}
|
||||
|
||||
def _parse_generic_sale(self, payload: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Parse generic/custom POS sale format"""
|
||||
items = []
|
||||
for item in payload.get('items', []):
|
||||
items.append({
|
||||
'product_id': item.get('product_id') or item.get('id'),
|
||||
'product_name': item.get('name') or item.get('description'),
|
||||
'quantity': float(item.get('quantity', 1)),
|
||||
'unit_price': float(item.get('price', 0)),
|
||||
'unit_of_measure': item.get('unit_of_measure', 'units')
|
||||
})
|
||||
|
||||
return {
|
||||
'transaction_id': payload.get('transaction_id') or payload.get('id'),
|
||||
'total_amount': float(payload.get('total', 0)),
|
||||
'items': items,
|
||||
'payment_method': payload.get('payment_method', 'unknown'),
|
||||
'transaction_date': payload.get('timestamp') or payload.get('created_at'),
|
||||
'customer_id': payload.get('customer_id')
|
||||
}
|
||||
|
||||
def _parse_refund_data(self, pos_system: str, payload: Dict[str, Any]) -> Optional[Dict[str, Any]]:
|
||||
"""Parse refund data from various POS systems"""
|
||||
# Similar parsing logic as sales, but for refunds
|
||||
# Simplified for now - would follow same pattern as _parse_sale_data
|
||||
return {
|
||||
'refund_id': payload.get('refund_id') or payload.get('id'),
|
||||
'original_transaction_id': payload.get('original_transaction_id'),
|
||||
'refund_amount': float(payload.get('amount', 0)),
|
||||
'items': payload.get('items', []),
|
||||
'refund_date': payload.get('refund_date') or payload.get('created_at')
|
||||
}
|
||||
|
||||
def _parse_inventory_data(self, pos_system: str, payload: Dict[str, Any]) -> Optional[Dict[str, Any]]:
|
||||
"""Parse inventory data from various POS systems"""
|
||||
return {
|
||||
'items': payload.get('items', [])
|
||||
}
|
||||
|
||||
|
||||
# Factory function for creating consumer instance
|
||||
def create_pos_event_consumer(db_session: AsyncSession) -> POSEventConsumer:
|
||||
"""Create POS event consumer instance"""
|
||||
return POSEventConsumer(db_session)
|
||||
1
services/pos/app/core/__init__.py
Normal file
1
services/pos/app/core/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# Core configuration and utilities
|
||||
192
services/pos/app/core/config.py
Normal file
192
services/pos/app/core/config.py
Normal file
@@ -0,0 +1,192 @@
|
||||
# services/pos/app/core/config.py
|
||||
"""
|
||||
POS Integration Service Configuration
|
||||
"""
|
||||
|
||||
import os
|
||||
from typing import List, Optional
|
||||
from pydantic import Field
|
||||
from shared.config.base import BaseServiceSettings
|
||||
|
||||
|
||||
class Settings(BaseServiceSettings):
|
||||
"""POS Integration service settings extending base configuration"""
|
||||
|
||||
# Override service-specific settings
|
||||
SERVICE_NAME: str = "pos-service"
|
||||
VERSION: str = "1.0.0"
|
||||
APP_NAME: str = "Bakery POS Integration Service"
|
||||
DESCRIPTION: str = "Integration service for external POS systems (Square, Toast, Lightspeed)"
|
||||
|
||||
# API Configuration
|
||||
API_V1_STR: str = "/api/v1"
|
||||
|
||||
# Database configuration (secure approach - build from components)
|
||||
@property
|
||||
def DATABASE_URL(self) -> str:
|
||||
"""Build database URL from secure components"""
|
||||
# Try complete URL first (for backward compatibility)
|
||||
complete_url = os.getenv("POS_DATABASE_URL")
|
||||
if complete_url:
|
||||
return complete_url
|
||||
|
||||
# Build from components (secure approach)
|
||||
user = os.getenv("POS_DB_USER", "pos_user")
|
||||
password = os.getenv("POS_DB_PASSWORD", "pos_pass123")
|
||||
host = os.getenv("POS_DB_HOST", "localhost")
|
||||
port = os.getenv("POS_DB_PORT", "5432")
|
||||
name = os.getenv("POS_DB_NAME", "pos_db")
|
||||
|
||||
return f"postgresql+asyncpg://{user}:{password}@{host}:{port}/{name}"
|
||||
|
||||
# POS-specific Redis database
|
||||
REDIS_DB: int = Field(default=5, env="POS_REDIS_DB")
|
||||
|
||||
# ================================================================
|
||||
# POS PROVIDER CONFIGURATIONS
|
||||
# ================================================================
|
||||
|
||||
# Square POS Configuration
|
||||
SQUARE_APPLICATION_ID: Optional[str] = Field(default=None, env="SQUARE_APPLICATION_ID")
|
||||
SQUARE_ACCESS_TOKEN: Optional[str] = Field(default=None, env="SQUARE_ACCESS_TOKEN")
|
||||
SQUARE_WEBHOOK_SIGNATURE_KEY: Optional[str] = Field(default=None, env="SQUARE_WEBHOOK_SIGNATURE_KEY")
|
||||
SQUARE_ENVIRONMENT: str = Field(default="sandbox", env="SQUARE_ENVIRONMENT") # sandbox or production
|
||||
SQUARE_BASE_URL: str = "https://connect.squareup.com"
|
||||
SQUARE_SANDBOX_URL: str = "https://connect.squareupsandbox.com"
|
||||
|
||||
@property
|
||||
def SQUARE_API_URL(self) -> str:
|
||||
return self.SQUARE_SANDBOX_URL if self.SQUARE_ENVIRONMENT == "sandbox" else self.SQUARE_BASE_URL
|
||||
|
||||
# Toast POS Configuration
|
||||
TOAST_CLIENT_ID: Optional[str] = Field(default=None, env="TOAST_CLIENT_ID")
|
||||
TOAST_CLIENT_SECRET: Optional[str] = Field(default=None, env="TOAST_CLIENT_SECRET")
|
||||
TOAST_WEBHOOK_SECRET: Optional[str] = Field(default=None, env="TOAST_WEBHOOK_SECRET")
|
||||
TOAST_ENVIRONMENT: str = Field(default="sandbox", env="TOAST_ENVIRONMENT") # sandbox or production
|
||||
TOAST_BASE_URL: str = "https://ws-api.toasttab.com"
|
||||
TOAST_SANDBOX_URL: str = "https://ws-sandbox-api.toasttab.com"
|
||||
|
||||
@property
|
||||
def TOAST_API_URL(self) -> str:
|
||||
return self.TOAST_SANDBOX_URL if self.TOAST_ENVIRONMENT == "sandbox" else self.TOAST_BASE_URL
|
||||
|
||||
# Lightspeed POS Configuration
|
||||
LIGHTSPEED_CLIENT_ID: Optional[str] = Field(default=None, env="LIGHTSPEED_CLIENT_ID")
|
||||
LIGHTSPEED_CLIENT_SECRET: Optional[str] = Field(default=None, env="LIGHTSPEED_CLIENT_SECRET")
|
||||
LIGHTSPEED_WEBHOOK_SECRET: Optional[str] = Field(default=None, env="LIGHTSPEED_WEBHOOK_SECRET")
|
||||
LIGHTSPEED_CLUSTER_ID: Optional[str] = Field(default=None, env="LIGHTSPEED_CLUSTER_ID")
|
||||
LIGHTSPEED_BASE_URL: str = "https://api-{cluster}.lightspeedhq.com"
|
||||
|
||||
def get_lightspeed_api_url(self, cluster_id: Optional[str] = None) -> str:
|
||||
cluster = cluster_id or self.LIGHTSPEED_CLUSTER_ID or "us1"
|
||||
return self.LIGHTSPEED_BASE_URL.format(cluster=cluster)
|
||||
|
||||
# ================================================================
|
||||
# WEBHOOK CONFIGURATION
|
||||
# ================================================================
|
||||
|
||||
# Webhook Base Configuration
|
||||
WEBHOOK_BASE_URL: str = Field(default="https://your-domain.com", env="WEBHOOK_BASE_URL")
|
||||
WEBHOOK_SECRET: str = Field(default="your-webhook-secret", env="WEBHOOK_SECRET")
|
||||
WEBHOOK_TIMEOUT_SECONDS: int = Field(default=30, env="WEBHOOK_TIMEOUT_SECONDS")
|
||||
|
||||
# Webhook Rate Limiting
|
||||
WEBHOOK_RATE_LIMIT_PER_MINUTE: int = Field(default=1000, env="WEBHOOK_RATE_LIMIT_PER_MINUTE")
|
||||
WEBHOOK_BURST_LIMIT: int = Field(default=100, env="WEBHOOK_BURST_LIMIT")
|
||||
|
||||
# Webhook Retry Configuration
|
||||
WEBHOOK_MAX_RETRIES: int = Field(default=3, env="WEBHOOK_MAX_RETRIES")
|
||||
WEBHOOK_RETRY_DELAY_SECONDS: int = Field(default=5, env="WEBHOOK_RETRY_DELAY_SECONDS")
|
||||
|
||||
# ================================================================
|
||||
# SYNC CONFIGURATION
|
||||
# ================================================================
|
||||
|
||||
# Data Synchronization Settings
|
||||
SYNC_ENABLED: bool = Field(default=True, env="POS_SYNC_ENABLED")
|
||||
SYNC_INTERVAL_SECONDS: int = Field(default=300, env="POS_SYNC_INTERVAL_SECONDS") # 5 minutes
|
||||
SYNC_BATCH_SIZE: int = Field(default=100, env="POS_SYNC_BATCH_SIZE")
|
||||
SYNC_MAX_RETRY_ATTEMPTS: int = Field(default=3, env="POS_SYNC_MAX_RETRY_ATTEMPTS")
|
||||
SYNC_RETRY_DELAY_SECONDS: int = Field(default=60, env="POS_SYNC_RETRY_DELAY_SECONDS")
|
||||
|
||||
# Historical Data Sync
|
||||
HISTORICAL_SYNC_DAYS: int = Field(default=30, env="POS_HISTORICAL_SYNC_DAYS")
|
||||
INITIAL_SYNC_BATCH_SIZE: int = Field(default=50, env="POS_INITIAL_SYNC_BATCH_SIZE")
|
||||
|
||||
# ================================================================
|
||||
# SECURITY & ENCRYPTION
|
||||
# ================================================================
|
||||
|
||||
# API Credential Encryption
|
||||
ENCRYPTION_KEY: Optional[str] = Field(default=None, env="POS_ENCRYPTION_KEY")
|
||||
CREDENTIALS_ENCRYPTION_ENABLED: bool = Field(default=True, env="POS_CREDENTIALS_ENCRYPTION_ENABLED")
|
||||
|
||||
# API Rate Limiting
|
||||
API_RATE_LIMIT_PER_MINUTE: int = Field(default=60, env="POS_API_RATE_LIMIT_PER_MINUTE")
|
||||
API_BURST_LIMIT: int = Field(default=10, env="POS_API_BURST_LIMIT")
|
||||
|
||||
# ================================================================
|
||||
# CACHING CONFIGURATION
|
||||
# ================================================================
|
||||
|
||||
# POS Data Cache TTL
|
||||
POS_CONFIG_CACHE_TTL: int = Field(default=3600, env="POS_CONFIG_CACHE_TTL") # 1 hour
|
||||
POS_TRANSACTION_CACHE_TTL: int = Field(default=300, env="POS_TRANSACTION_CACHE_TTL") # 5 minutes
|
||||
POS_PRODUCT_CACHE_TTL: int = Field(default=1800, env="POS_PRODUCT_CACHE_TTL") # 30 minutes
|
||||
|
||||
# ================================================================
|
||||
# SUPPORTED POS SYSTEMS
|
||||
# ================================================================
|
||||
|
||||
SUPPORTED_POS_SYSTEMS: List[str] = ["square", "toast", "lightspeed"]
|
||||
|
||||
# Default POS system for new tenants
|
||||
DEFAULT_POS_SYSTEM: str = Field(default="square", env="DEFAULT_POS_SYSTEM")
|
||||
|
||||
# ================================================================
|
||||
# INTER-SERVICE COMMUNICATION
|
||||
# ================================================================
|
||||
|
||||
# Override service URLs
|
||||
SALES_SERVICE_URL: str = Field(
|
||||
default="http://sales-service:8000",
|
||||
env="SALES_SERVICE_URL"
|
||||
)
|
||||
|
||||
INVENTORY_SERVICE_URL: str = Field(
|
||||
default="http://inventory-service:8000",
|
||||
env="INVENTORY_SERVICE_URL"
|
||||
)
|
||||
|
||||
# ================================================================
|
||||
# BUSINESS RULES
|
||||
# ================================================================
|
||||
|
||||
# Transaction Processing
|
||||
MIN_TRANSACTION_AMOUNT: float = Field(default=0.01, env="POS_MIN_TRANSACTION_AMOUNT")
|
||||
MAX_TRANSACTION_AMOUNT: float = Field(default=10000.0, env="POS_MAX_TRANSACTION_AMOUNT")
|
||||
|
||||
# Duplicate Detection Window (in minutes)
|
||||
DUPLICATE_DETECTION_WINDOW: int = Field(default=5, env="POS_DUPLICATE_DETECTION_WINDOW")
|
||||
|
||||
# Data Retention
|
||||
TRANSACTION_RETENTION_DAYS: int = Field(default=1095, env="POS_TRANSACTION_RETENTION_DAYS") # 3 years
|
||||
WEBHOOK_LOG_RETENTION_DAYS: int = Field(default=30, env="POS_WEBHOOK_LOG_RETENTION_DAYS")
|
||||
SYNC_LOG_RETENTION_DAYS: int = Field(default=90, env="POS_SYNC_LOG_RETENTION_DAYS")
|
||||
|
||||
# ================================================================
|
||||
# MONITORING & ALERTING
|
||||
# ================================================================
|
||||
|
||||
# Health Check Configuration
|
||||
POS_HEALTH_CHECK_ENABLED: bool = Field(default=True, env="POS_HEALTH_CHECK_ENABLED")
|
||||
POS_HEALTH_CHECK_INTERVAL: int = Field(default=60, env="POS_HEALTH_CHECK_INTERVAL") # seconds
|
||||
|
||||
# Alert Thresholds
|
||||
WEBHOOK_FAILURE_THRESHOLD: int = Field(default=5, env="POS_WEBHOOK_FAILURE_THRESHOLD")
|
||||
SYNC_FAILURE_THRESHOLD: int = Field(default=3, env="POS_SYNC_FAILURE_THRESHOLD")
|
||||
API_ERROR_THRESHOLD: int = Field(default=10, env="POS_API_ERROR_THRESHOLD")
|
||||
|
||||
|
||||
# Global settings instance
|
||||
settings = Settings()
|
||||
85
services/pos/app/core/database.py
Normal file
85
services/pos/app/core/database.py
Normal file
@@ -0,0 +1,85 @@
|
||||
# services/pos/app/core/database.py
|
||||
"""
|
||||
POS Integration Service Database Configuration using shared database manager
|
||||
"""
|
||||
|
||||
import structlog
|
||||
from contextlib import asynccontextmanager
|
||||
|
||||
from app.core.config import settings
|
||||
from shared.database.base import DatabaseManager, Base
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
# Create database manager instance
|
||||
database_manager = DatabaseManager(
|
||||
database_url=settings.DATABASE_URL,
|
||||
service_name="pos-service",
|
||||
pool_size=settings.DB_POOL_SIZE,
|
||||
max_overflow=settings.DB_MAX_OVERFLOW,
|
||||
pool_recycle=settings.DB_POOL_RECYCLE,
|
||||
echo=settings.DB_ECHO
|
||||
)
|
||||
|
||||
|
||||
async def get_db():
|
||||
"""
|
||||
Database dependency for FastAPI - using shared database manager
|
||||
"""
|
||||
async for session in database_manager.get_db():
|
||||
yield session
|
||||
|
||||
|
||||
async def init_db():
|
||||
"""Initialize database tables using shared database manager"""
|
||||
try:
|
||||
logger.info("Initializing POS Integration Service database...")
|
||||
|
||||
# Import all models to ensure they're registered
|
||||
from app.models import pos_config, pos_transaction, pos_webhook, pos_sync # noqa: F401
|
||||
|
||||
# Create all tables using database manager
|
||||
await database_manager.create_tables(Base.metadata)
|
||||
|
||||
logger.info("POS Integration Service database initialized successfully")
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to initialize database", error=str(e))
|
||||
raise
|
||||
|
||||
|
||||
async def close_db():
|
||||
"""Close database connections using shared database manager"""
|
||||
try:
|
||||
await database_manager.close_connections()
|
||||
logger.info("Database connections closed")
|
||||
except Exception as e:
|
||||
logger.error("Error closing database connections", error=str(e))
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def get_db_transaction():
|
||||
"""
|
||||
Context manager for database transactions using shared database manager
|
||||
"""
|
||||
async with database_manager.get_session() as session:
|
||||
try:
|
||||
async with session.begin():
|
||||
yield session
|
||||
except Exception as e:
|
||||
logger.error("Transaction error", error=str(e))
|
||||
raise
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def get_background_session():
|
||||
"""
|
||||
Context manager for background tasks using shared database manager
|
||||
"""
|
||||
async with database_manager.get_background_session() as session:
|
||||
yield session
|
||||
|
||||
|
||||
async def health_check():
|
||||
"""Database health check using shared database manager"""
|
||||
return await database_manager.health_check()
|
||||
1
services/pos/app/integrations/__init__.py
Normal file
1
services/pos/app/integrations/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# POS Integration providers
|
||||
365
services/pos/app/integrations/base_pos_client.py
Normal file
365
services/pos/app/integrations/base_pos_client.py
Normal file
@@ -0,0 +1,365 @@
|
||||
# services/pos/app/integrations/base_pos_client.py
|
||||
"""
|
||||
Base POS Client
|
||||
Abstract base class for all POS system integrations
|
||||
"""
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Dict, List, Optional, Any, Tuple
|
||||
from datetime import datetime
|
||||
from dataclasses import dataclass
|
||||
|
||||
import structlog
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
@dataclass
|
||||
class POSCredentials:
|
||||
"""POS system credentials"""
|
||||
pos_system: str
|
||||
environment: str
|
||||
api_key: Optional[str] = None
|
||||
api_secret: Optional[str] = None
|
||||
access_token: Optional[str] = None
|
||||
application_id: Optional[str] = None
|
||||
merchant_id: Optional[str] = None
|
||||
location_id: Optional[str] = None
|
||||
webhook_secret: Optional[str] = None
|
||||
additional_params: Optional[Dict[str, Any]] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class POSTransaction:
|
||||
"""Standardized POS transaction"""
|
||||
external_id: str
|
||||
transaction_type: str
|
||||
status: str
|
||||
total_amount: float
|
||||
subtotal: float
|
||||
tax_amount: float
|
||||
tip_amount: float
|
||||
discount_amount: float
|
||||
currency: str
|
||||
transaction_date: datetime
|
||||
payment_method: Optional[str] = None
|
||||
payment_status: Optional[str] = None
|
||||
location_id: Optional[str] = None
|
||||
location_name: Optional[str] = None
|
||||
staff_id: Optional[str] = None
|
||||
staff_name: Optional[str] = None
|
||||
customer_id: Optional[str] = None
|
||||
customer_email: Optional[str] = None
|
||||
order_type: Optional[str] = None
|
||||
table_number: Optional[str] = None
|
||||
receipt_number: Optional[str] = None
|
||||
external_order_id: Optional[str] = None
|
||||
items: List['POSTransactionItem']
|
||||
raw_data: Dict[str, Any]
|
||||
|
||||
|
||||
@dataclass
|
||||
class POSTransactionItem:
|
||||
"""Standardized POS transaction item"""
|
||||
external_id: Optional[str]
|
||||
sku: Optional[str]
|
||||
name: str
|
||||
category: Optional[str]
|
||||
quantity: float
|
||||
unit_price: float
|
||||
total_price: float
|
||||
discount_amount: float
|
||||
tax_amount: float
|
||||
modifiers: Optional[Dict[str, Any]] = None
|
||||
raw_data: Optional[Dict[str, Any]] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class POSProduct:
|
||||
"""Standardized POS product"""
|
||||
external_id: str
|
||||
name: str
|
||||
sku: Optional[str]
|
||||
category: Optional[str]
|
||||
subcategory: Optional[str]
|
||||
price: float
|
||||
description: Optional[str]
|
||||
is_active: bool
|
||||
raw_data: Dict[str, Any]
|
||||
|
||||
|
||||
@dataclass
|
||||
class SyncResult:
|
||||
"""Result of a sync operation"""
|
||||
success: bool
|
||||
records_processed: int
|
||||
records_created: int
|
||||
records_updated: int
|
||||
records_skipped: int
|
||||
records_failed: int
|
||||
errors: List[str]
|
||||
warnings: List[str]
|
||||
duration_seconds: float
|
||||
api_calls_made: int
|
||||
|
||||
|
||||
class POSClientError(Exception):
|
||||
"""Base exception for POS client errors"""
|
||||
pass
|
||||
|
||||
|
||||
class POSAuthenticationError(POSClientError):
|
||||
"""Authentication failed"""
|
||||
pass
|
||||
|
||||
|
||||
class POSRateLimitError(POSClientError):
|
||||
"""Rate limit exceeded"""
|
||||
pass
|
||||
|
||||
|
||||
class POSConnectionError(POSClientError):
|
||||
"""Connection to POS system failed"""
|
||||
pass
|
||||
|
||||
|
||||
class BasePOSClient(ABC):
|
||||
"""
|
||||
Abstract base class for POS system integrations
|
||||
|
||||
Provides common interface for all POS providers:
|
||||
- Square, Toast, Lightspeed, etc.
|
||||
"""
|
||||
|
||||
def __init__(self, credentials: POSCredentials):
|
||||
self.credentials = credentials
|
||||
self.pos_system = credentials.pos_system
|
||||
self.logger = logger.bind(pos_system=self.pos_system)
|
||||
|
||||
@abstractmethod
|
||||
async def test_connection(self) -> Tuple[bool, str]:
|
||||
"""
|
||||
Test connection to POS system
|
||||
|
||||
Returns:
|
||||
Tuple of (success: bool, message: str)
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
async def get_transactions(
|
||||
self,
|
||||
start_date: datetime,
|
||||
end_date: datetime,
|
||||
location_id: Optional[str] = None,
|
||||
limit: int = 100,
|
||||
cursor: Optional[str] = None
|
||||
) -> Tuple[List[POSTransaction], Optional[str]]:
|
||||
"""
|
||||
Get transactions from POS system
|
||||
|
||||
Args:
|
||||
start_date: Start date for transaction query
|
||||
end_date: End date for transaction query
|
||||
location_id: Optional location filter
|
||||
limit: Maximum number of records to return
|
||||
cursor: Pagination cursor for next page
|
||||
|
||||
Returns:
|
||||
Tuple of (transactions: List[POSTransaction], next_cursor: Optional[str])
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
async def get_transaction(self, transaction_id: str) -> Optional[POSTransaction]:
|
||||
"""
|
||||
Get a specific transaction by ID
|
||||
|
||||
Args:
|
||||
transaction_id: External transaction ID
|
||||
|
||||
Returns:
|
||||
POSTransaction if found, None otherwise
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
async def get_products(
|
||||
self,
|
||||
location_id: Optional[str] = None,
|
||||
limit: int = 100,
|
||||
cursor: Optional[str] = None
|
||||
) -> Tuple[List[POSProduct], Optional[str]]:
|
||||
"""
|
||||
Get products/menu items from POS system
|
||||
|
||||
Args:
|
||||
location_id: Optional location filter
|
||||
limit: Maximum number of records to return
|
||||
cursor: Pagination cursor for next page
|
||||
|
||||
Returns:
|
||||
Tuple of (products: List[POSProduct], next_cursor: Optional[str])
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def verify_webhook_signature(self, payload: bytes, signature: str) -> bool:
|
||||
"""
|
||||
Verify webhook signature
|
||||
|
||||
Args:
|
||||
payload: Raw webhook payload
|
||||
signature: Signature from webhook headers
|
||||
|
||||
Returns:
|
||||
True if signature is valid
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def parse_webhook_payload(self, payload: Dict[str, Any]) -> Optional[POSTransaction]:
|
||||
"""
|
||||
Parse webhook payload into standardized transaction
|
||||
|
||||
Args:
|
||||
payload: Webhook payload
|
||||
|
||||
Returns:
|
||||
POSTransaction if parseable, None otherwise
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_webhook_events(self) -> List[str]:
|
||||
"""
|
||||
Get list of supported webhook events
|
||||
|
||||
Returns:
|
||||
List of supported event types
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_rate_limits(self) -> Dict[str, Any]:
|
||||
"""
|
||||
Get rate limit information
|
||||
|
||||
Returns:
|
||||
Dictionary with rate limit details
|
||||
"""
|
||||
pass
|
||||
|
||||
# Common utility methods
|
||||
|
||||
def get_pos_system(self) -> str:
|
||||
"""Get POS system identifier"""
|
||||
return self.pos_system
|
||||
|
||||
def get_environment(self) -> str:
|
||||
"""Get environment (sandbox/production)"""
|
||||
return self.credentials.environment
|
||||
|
||||
def is_production(self) -> bool:
|
||||
"""Check if running in production environment"""
|
||||
return self.credentials.environment.lower() == "production"
|
||||
|
||||
def log_api_call(self, method: str, endpoint: str, status_code: int, duration_ms: int):
|
||||
"""Log API call for monitoring"""
|
||||
self.logger.info(
|
||||
"POS API call",
|
||||
method=method,
|
||||
endpoint=endpoint,
|
||||
status_code=status_code,
|
||||
duration_ms=duration_ms,
|
||||
environment=self.get_environment()
|
||||
)
|
||||
|
||||
def log_error(self, error: Exception, context: str):
|
||||
"""Log error with context"""
|
||||
self.logger.error(
|
||||
f"POS client error: {context}",
|
||||
error=str(error),
|
||||
error_type=type(error).__name__,
|
||||
pos_system=self.pos_system
|
||||
)
|
||||
|
||||
async def sync_transactions(
|
||||
self,
|
||||
start_date: datetime,
|
||||
end_date: datetime,
|
||||
location_id: Optional[str] = None,
|
||||
batch_size: int = 100
|
||||
) -> SyncResult:
|
||||
"""
|
||||
Sync transactions from POS system with error handling and batching
|
||||
|
||||
Args:
|
||||
start_date: Start date for sync
|
||||
end_date: End date for sync
|
||||
location_id: Optional location filter
|
||||
batch_size: Number of records per batch
|
||||
|
||||
Returns:
|
||||
SyncResult with operation details
|
||||
"""
|
||||
start_time = datetime.utcnow()
|
||||
result = SyncResult(
|
||||
success=False,
|
||||
records_processed=0,
|
||||
records_created=0,
|
||||
records_updated=0,
|
||||
records_skipped=0,
|
||||
records_failed=0,
|
||||
errors=[],
|
||||
warnings=[],
|
||||
duration_seconds=0,
|
||||
api_calls_made=0
|
||||
)
|
||||
|
||||
try:
|
||||
cursor = None
|
||||
while True:
|
||||
try:
|
||||
transactions, next_cursor = await self.get_transactions(
|
||||
start_date=start_date,
|
||||
end_date=end_date,
|
||||
location_id=location_id,
|
||||
limit=batch_size,
|
||||
cursor=cursor
|
||||
)
|
||||
|
||||
result.api_calls_made += 1
|
||||
result.records_processed += len(transactions)
|
||||
|
||||
if not transactions:
|
||||
break
|
||||
|
||||
# Process transactions would be implemented by the service layer
|
||||
self.logger.info(
|
||||
"Synced transaction batch",
|
||||
batch_size=len(transactions),
|
||||
total_processed=result.records_processed
|
||||
)
|
||||
|
||||
cursor = next_cursor
|
||||
if not cursor:
|
||||
break
|
||||
|
||||
except Exception as e:
|
||||
result.errors.append(f"Batch sync error: {str(e)}")
|
||||
result.records_failed += batch_size
|
||||
self.log_error(e, "Transaction sync batch")
|
||||
break
|
||||
|
||||
result.success = len(result.errors) == 0
|
||||
|
||||
except Exception as e:
|
||||
result.errors.append(f"Sync operation failed: {str(e)}")
|
||||
self.log_error(e, "Transaction sync operation")
|
||||
|
||||
finally:
|
||||
end_time = datetime.utcnow()
|
||||
result.duration_seconds = (end_time - start_time).total_seconds()
|
||||
|
||||
return result
|
||||
463
services/pos/app/integrations/square_client.py
Normal file
463
services/pos/app/integrations/square_client.py
Normal file
@@ -0,0 +1,463 @@
|
||||
# services/pos/app/integrations/square_client.py
|
||||
"""
|
||||
Square POS Client
|
||||
Integration with Square Point of Sale API
|
||||
"""
|
||||
|
||||
import hashlib
|
||||
import hmac
|
||||
import json
|
||||
from typing import Dict, List, Optional, Any, Tuple
|
||||
from datetime import datetime
|
||||
import asyncio
|
||||
|
||||
import httpx
|
||||
import structlog
|
||||
|
||||
from .base_pos_client import (
|
||||
BasePOSClient,
|
||||
POSCredentials,
|
||||
POSTransaction,
|
||||
POSTransactionItem,
|
||||
POSProduct,
|
||||
POSClientError,
|
||||
POSAuthenticationError,
|
||||
POSRateLimitError,
|
||||
POSConnectionError
|
||||
)
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class SquarePOSClient(BasePOSClient):
|
||||
"""Square POS API client implementation"""
|
||||
|
||||
def __init__(self, credentials: POSCredentials):
|
||||
super().__init__(credentials)
|
||||
|
||||
self.base_url = self._get_base_url()
|
||||
self.application_id = credentials.application_id
|
||||
self.access_token = credentials.access_token
|
||||
self.webhook_secret = credentials.webhook_secret
|
||||
self.location_id = credentials.location_id
|
||||
|
||||
if not self.access_token:
|
||||
raise POSAuthenticationError("Square access token is required")
|
||||
|
||||
def _get_base_url(self) -> str:
|
||||
"""Get Square API base URL based on environment"""
|
||||
if self.credentials.environment.lower() == "production":
|
||||
return "https://connect.squareup.com"
|
||||
else:
|
||||
return "https://connect.squareupsandbox.com"
|
||||
|
||||
def _get_headers(self) -> Dict[str, str]:
|
||||
"""Get headers for Square API requests"""
|
||||
headers = {
|
||||
"Authorization": f"Bearer {self.access_token}",
|
||||
"Content-Type": "application/json",
|
||||
"Accept": "application/json",
|
||||
}
|
||||
|
||||
if self.application_id:
|
||||
headers["Square-Version"] = "2024-01-18" # Use latest API version
|
||||
|
||||
return headers
|
||||
|
||||
async def _make_request(
|
||||
self,
|
||||
method: str,
|
||||
endpoint: str,
|
||||
data: Optional[Dict] = None,
|
||||
params: Optional[Dict] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""Make HTTP request to Square API with error handling"""
|
||||
url = f"{self.base_url}{endpoint}"
|
||||
headers = self._get_headers()
|
||||
|
||||
start_time = datetime.utcnow()
|
||||
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=30.0) as client:
|
||||
response = await client.request(
|
||||
method=method,
|
||||
url=url,
|
||||
headers=headers,
|
||||
json=data,
|
||||
params=params
|
||||
)
|
||||
|
||||
duration_ms = int((datetime.utcnow() - start_time).total_seconds() * 1000)
|
||||
self.log_api_call(method, endpoint, response.status_code, duration_ms)
|
||||
|
||||
if response.status_code == 401:
|
||||
raise POSAuthenticationError("Invalid Square access token")
|
||||
elif response.status_code == 429:
|
||||
raise POSRateLimitError("Square API rate limit exceeded")
|
||||
elif response.status_code >= 400:
|
||||
error_text = response.text
|
||||
raise POSClientError(f"Square API error {response.status_code}: {error_text}")
|
||||
|
||||
return response.json()
|
||||
|
||||
except httpx.TimeoutException:
|
||||
raise POSConnectionError("Timeout connecting to Square API")
|
||||
except httpx.ConnectError:
|
||||
raise POSConnectionError("Failed to connect to Square API")
|
||||
|
||||
async def test_connection(self) -> Tuple[bool, str]:
|
||||
"""Test connection to Square API"""
|
||||
try:
|
||||
# Try to get location info
|
||||
response = await self._make_request("GET", "/v2/locations")
|
||||
|
||||
locations = response.get("locations", [])
|
||||
if locations:
|
||||
return True, f"Connected successfully. Found {len(locations)} location(s)."
|
||||
else:
|
||||
return False, "Connected but no locations found"
|
||||
|
||||
except POSAuthenticationError:
|
||||
return False, "Authentication failed - invalid access token"
|
||||
except POSRateLimitError:
|
||||
return False, "Rate limit exceeded"
|
||||
except POSConnectionError as e:
|
||||
return False, f"Connection failed: {str(e)}"
|
||||
except Exception as e:
|
||||
return False, f"Test failed: {str(e)}"
|
||||
|
||||
async def get_transactions(
|
||||
self,
|
||||
start_date: datetime,
|
||||
end_date: datetime,
|
||||
location_id: Optional[str] = None,
|
||||
limit: int = 100,
|
||||
cursor: Optional[str] = None
|
||||
) -> Tuple[List[POSTransaction], Optional[str]]:
|
||||
"""Get transactions from Square API"""
|
||||
|
||||
# Use provided location_id or fall back to configured one
|
||||
target_location = location_id or self.location_id
|
||||
if not target_location:
|
||||
# Get first available location
|
||||
locations_response = await self._make_request("GET", "/v2/locations")
|
||||
locations = locations_response.get("locations", [])
|
||||
if not locations:
|
||||
return [], None
|
||||
target_location = locations[0]["id"]
|
||||
|
||||
# Build query parameters
|
||||
query = {
|
||||
"location_ids": [target_location],
|
||||
"begin_time": start_date.isoformat() + "Z",
|
||||
"end_time": end_date.isoformat() + "Z",
|
||||
"limit": min(limit, 200), # Square max is 200
|
||||
}
|
||||
|
||||
if cursor:
|
||||
query["cursor"] = cursor
|
||||
|
||||
try:
|
||||
response = await self._make_request("POST", "/v2/orders/search", data={"query": query})
|
||||
|
||||
orders = response.get("orders", [])
|
||||
transactions = []
|
||||
|
||||
for order in orders:
|
||||
transaction = self._parse_square_order(order)
|
||||
if transaction:
|
||||
transactions.append(transaction)
|
||||
|
||||
next_cursor = response.get("cursor")
|
||||
return transactions, next_cursor
|
||||
|
||||
except Exception as e:
|
||||
self.log_error(e, "Getting transactions")
|
||||
raise
|
||||
|
||||
async def get_transaction(self, transaction_id: str) -> Optional[POSTransaction]:
|
||||
"""Get specific transaction by ID"""
|
||||
try:
|
||||
response = await self._make_request("GET", f"/v2/orders/{transaction_id}")
|
||||
order = response.get("order")
|
||||
|
||||
if order:
|
||||
return self._parse_square_order(order)
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
self.log_error(e, f"Getting transaction {transaction_id}")
|
||||
return None
|
||||
|
||||
def _parse_square_order(self, order: Dict[str, Any]) -> Optional[POSTransaction]:
|
||||
"""Parse Square order into standardized transaction"""
|
||||
try:
|
||||
# Extract basic transaction info
|
||||
external_id = order.get("id", "")
|
||||
state = order.get("state", "")
|
||||
|
||||
# Map Square states to our standard states
|
||||
status_map = {
|
||||
"COMPLETED": "completed",
|
||||
"CANCELED": "voided",
|
||||
"DRAFT": "pending",
|
||||
"OPEN": "pending"
|
||||
}
|
||||
status = status_map.get(state, "pending")
|
||||
|
||||
# Parse amounts (Square uses smallest currency unit, e.g., cents)
|
||||
total_money = order.get("total_money", {})
|
||||
total_amount = float(total_money.get("amount", 0)) / 100.0
|
||||
|
||||
base_price_money = order.get("base_price_money", {})
|
||||
subtotal = float(base_price_money.get("amount", 0)) / 100.0
|
||||
|
||||
total_tax_money = order.get("total_tax_money", {})
|
||||
tax_amount = float(total_tax_money.get("amount", 0)) / 100.0
|
||||
|
||||
total_tip_money = order.get("total_tip_money", {})
|
||||
tip_amount = float(total_tip_money.get("amount", 0)) / 100.0
|
||||
|
||||
total_discount_money = order.get("total_discount_money", {})
|
||||
discount_amount = float(total_discount_money.get("amount", 0)) / 100.0
|
||||
|
||||
currency = total_money.get("currency", "USD")
|
||||
|
||||
# Parse timestamps
|
||||
created_at = order.get("created_at")
|
||||
transaction_date = datetime.fromisoformat(created_at.replace("Z", "+00:00")) if created_at else datetime.utcnow()
|
||||
|
||||
# Parse location info
|
||||
location_id = order.get("location_id")
|
||||
|
||||
# Parse line items
|
||||
items = []
|
||||
line_items = order.get("line_items", [])
|
||||
|
||||
for line_item in line_items:
|
||||
item = self._parse_square_line_item(line_item)
|
||||
if item:
|
||||
items.append(item)
|
||||
|
||||
# Parse payments for payment method
|
||||
payment_method = None
|
||||
tenders = order.get("tenders", [])
|
||||
if tenders:
|
||||
payment_method = tenders[0].get("type", "").lower()
|
||||
|
||||
# Create transaction
|
||||
transaction = POSTransaction(
|
||||
external_id=external_id,
|
||||
transaction_type="sale", # Square orders are typically sales
|
||||
status=status,
|
||||
total_amount=total_amount,
|
||||
subtotal=subtotal,
|
||||
tax_amount=tax_amount,
|
||||
tip_amount=tip_amount,
|
||||
discount_amount=discount_amount,
|
||||
currency=currency,
|
||||
transaction_date=transaction_date,
|
||||
payment_method=payment_method,
|
||||
payment_status="paid" if status == "completed" else "pending",
|
||||
location_id=location_id,
|
||||
items=items,
|
||||
raw_data=order
|
||||
)
|
||||
|
||||
return transaction
|
||||
|
||||
except Exception as e:
|
||||
self.log_error(e, f"Parsing Square order {order.get('id', 'unknown')}")
|
||||
return None
|
||||
|
||||
def _parse_square_line_item(self, line_item: Dict[str, Any]) -> Optional[POSTransactionItem]:
|
||||
"""Parse Square line item into standardized transaction item"""
|
||||
try:
|
||||
name = line_item.get("name", "Unknown Item")
|
||||
quantity = float(line_item.get("quantity", "1"))
|
||||
|
||||
# Parse pricing
|
||||
item_total_money = line_item.get("item_total_money", {})
|
||||
total_price = float(item_total_money.get("amount", 0)) / 100.0
|
||||
|
||||
unit_price = total_price / quantity if quantity > 0 else 0
|
||||
|
||||
# Parse variations for SKU
|
||||
variation = line_item.get("catalog_object_id")
|
||||
sku = variation if variation else None
|
||||
|
||||
# Parse category from item data
|
||||
item_data = line_item.get("item_data", {})
|
||||
category = item_data.get("category_name")
|
||||
|
||||
# Parse modifiers
|
||||
modifiers_data = line_item.get("modifiers", [])
|
||||
modifiers = {}
|
||||
for modifier in modifiers_data:
|
||||
mod_name = modifier.get("name", "")
|
||||
mod_price = float(modifier.get("total_price_money", {}).get("amount", 0)) / 100.0
|
||||
modifiers[mod_name] = mod_price
|
||||
|
||||
item = POSTransactionItem(
|
||||
external_id=line_item.get("uid"),
|
||||
sku=sku,
|
||||
name=name,
|
||||
category=category,
|
||||
quantity=quantity,
|
||||
unit_price=unit_price,
|
||||
total_price=total_price,
|
||||
discount_amount=0, # Square handles discounts at order level
|
||||
tax_amount=0, # Square handles taxes at order level
|
||||
modifiers=modifiers if modifiers else None,
|
||||
raw_data=line_item
|
||||
)
|
||||
|
||||
return item
|
||||
|
||||
except Exception as e:
|
||||
self.log_error(e, f"Parsing Square line item {line_item.get('uid', 'unknown')}")
|
||||
return None
|
||||
|
||||
async def get_products(
|
||||
self,
|
||||
location_id: Optional[str] = None,
|
||||
limit: int = 100,
|
||||
cursor: Optional[str] = None
|
||||
) -> Tuple[List[POSProduct], Optional[str]]:
|
||||
"""Get products from Square Catalog API"""
|
||||
|
||||
query_params = {
|
||||
"types": "ITEM",
|
||||
"limit": min(limit, 1000) # Square catalog max
|
||||
}
|
||||
|
||||
if cursor:
|
||||
query_params["cursor"] = cursor
|
||||
|
||||
try:
|
||||
response = await self._make_request("GET", "/v2/catalog/list", params=query_params)
|
||||
|
||||
objects = response.get("objects", [])
|
||||
products = []
|
||||
|
||||
for obj in objects:
|
||||
product = self._parse_square_catalog_item(obj)
|
||||
if product:
|
||||
products.append(product)
|
||||
|
||||
next_cursor = response.get("cursor")
|
||||
return products, next_cursor
|
||||
|
||||
except Exception as e:
|
||||
self.log_error(e, "Getting products")
|
||||
raise
|
||||
|
||||
def _parse_square_catalog_item(self, catalog_object: Dict[str, Any]) -> Optional[POSProduct]:
|
||||
"""Parse Square catalog item into standardized product"""
|
||||
try:
|
||||
item_data = catalog_object.get("item_data", {})
|
||||
|
||||
external_id = catalog_object.get("id", "")
|
||||
name = item_data.get("name", "Unknown Product")
|
||||
description = item_data.get("description")
|
||||
category = item_data.get("category_name")
|
||||
is_active = not catalog_object.get("is_deleted", False)
|
||||
|
||||
# Get price from first variation
|
||||
variations = item_data.get("variations", [])
|
||||
price = 0.0
|
||||
sku = None
|
||||
|
||||
if variations:
|
||||
first_variation = variations[0]
|
||||
variation_data = first_variation.get("item_variation_data", {})
|
||||
price_money = variation_data.get("price_money", {})
|
||||
price = float(price_money.get("amount", 0)) / 100.0
|
||||
sku = variation_data.get("sku")
|
||||
|
||||
product = POSProduct(
|
||||
external_id=external_id,
|
||||
name=name,
|
||||
sku=sku,
|
||||
category=category,
|
||||
subcategory=None,
|
||||
price=price,
|
||||
description=description,
|
||||
is_active=is_active,
|
||||
raw_data=catalog_object
|
||||
)
|
||||
|
||||
return product
|
||||
|
||||
except Exception as e:
|
||||
self.log_error(e, f"Parsing Square catalog item {catalog_object.get('id', 'unknown')}")
|
||||
return None
|
||||
|
||||
def verify_webhook_signature(self, payload: bytes, signature: str) -> bool:
|
||||
"""Verify Square webhook signature"""
|
||||
if not self.webhook_secret:
|
||||
self.logger.warning("No webhook secret configured for signature verification")
|
||||
return True # Allow webhooks without verification if no secret
|
||||
|
||||
try:
|
||||
# Square uses HMAC-SHA256
|
||||
expected_signature = hmac.new(
|
||||
self.webhook_secret.encode('utf-8'),
|
||||
payload,
|
||||
hashlib.sha256
|
||||
).hexdigest()
|
||||
|
||||
# Remove any prefix from signature
|
||||
clean_signature = signature.replace("sha256=", "")
|
||||
|
||||
return hmac.compare_digest(expected_signature, clean_signature)
|
||||
|
||||
except Exception as e:
|
||||
self.log_error(e, "Webhook signature verification")
|
||||
return False
|
||||
|
||||
def parse_webhook_payload(self, payload: Dict[str, Any]) -> Optional[POSTransaction]:
|
||||
"""Parse Square webhook payload"""
|
||||
try:
|
||||
event_type = payload.get("type")
|
||||
|
||||
# Handle different Square webhook events
|
||||
if event_type in ["order.created", "order.updated", "order.fulfilled"]:
|
||||
order_data = payload.get("data", {}).get("object", {}).get("order")
|
||||
if order_data:
|
||||
return self._parse_square_order(order_data)
|
||||
|
||||
elif event_type in ["payment.created", "payment.updated"]:
|
||||
# For payment events, we might need to fetch the full order
|
||||
payment_data = payload.get("data", {}).get("object", {}).get("payment", {})
|
||||
order_id = payment_data.get("order_id")
|
||||
|
||||
if order_id:
|
||||
# Note: This would require an async call, so this is a simplified version
|
||||
self.logger.info("Payment webhook received", order_id=order_id, event_type=event_type)
|
||||
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
self.log_error(e, "Parsing webhook payload")
|
||||
return None
|
||||
|
||||
def get_webhook_events(self) -> List[str]:
|
||||
"""Get list of supported Square webhook events"""
|
||||
return [
|
||||
"order.created",
|
||||
"order.updated",
|
||||
"order.fulfilled",
|
||||
"payment.created",
|
||||
"payment.updated",
|
||||
"inventory.count.updated"
|
||||
]
|
||||
|
||||
def get_rate_limits(self) -> Dict[str, Any]:
|
||||
"""Get Square API rate limit information"""
|
||||
return {
|
||||
"requests_per_second": 100,
|
||||
"daily_limit": 50000,
|
||||
"burst_limit": 200,
|
||||
"webhook_limit": 1000
|
||||
}
|
||||
217
services/pos/app/jobs/sync_pos_to_sales.py
Normal file
217
services/pos/app/jobs/sync_pos_to_sales.py
Normal file
@@ -0,0 +1,217 @@
|
||||
"""
|
||||
Background Job: Sync POS Transactions to Sales Service
|
||||
|
||||
This job runs periodically to sync unsynced POS transactions to the sales service,
|
||||
which automatically decreases inventory stock levels.
|
||||
|
||||
Schedule: Every 5 minutes (configurable)
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Dict, Any
|
||||
import structlog
|
||||
|
||||
from app.services.pos_transaction_service import POSTransactionService
|
||||
from app.repositories.pos_config_repository import POSConfigRepository
|
||||
from app.core.database import get_db_transaction
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class POSToSalesSyncJob:
|
||||
"""Background job for syncing POS transactions to sales service"""
|
||||
|
||||
def __init__(self):
|
||||
self.transaction_service = POSTransactionService()
|
||||
self.batch_size = 50 # Process 50 transactions per batch
|
||||
self.max_retries = 3 # Max retry attempts for failed syncs
|
||||
|
||||
async def run(self):
|
||||
"""
|
||||
Main job execution method
|
||||
|
||||
This method:
|
||||
1. Finds all tenants with active POS configurations
|
||||
2. For each tenant, syncs unsynced transactions
|
||||
3. Logs results and errors
|
||||
"""
|
||||
start_time = datetime.utcnow()
|
||||
logger.info("Starting POS to Sales sync job")
|
||||
|
||||
try:
|
||||
# Get all tenants with active POS configurations
|
||||
tenants_to_sync = await self._get_active_tenants()
|
||||
|
||||
if not tenants_to_sync:
|
||||
logger.info("No active tenants found for sync")
|
||||
return {
|
||||
"success": True,
|
||||
"tenants_processed": 0,
|
||||
"total_synced": 0,
|
||||
"total_failed": 0
|
||||
}
|
||||
|
||||
total_synced = 0
|
||||
total_failed = 0
|
||||
results = []
|
||||
|
||||
for tenant_id in tenants_to_sync:
|
||||
try:
|
||||
result = await self.transaction_service.sync_unsynced_transactions(
|
||||
tenant_id=tenant_id,
|
||||
limit=self.batch_size
|
||||
)
|
||||
|
||||
synced = result.get("synced", 0)
|
||||
failed = result.get("failed", 0)
|
||||
|
||||
total_synced += synced
|
||||
total_failed += failed
|
||||
|
||||
results.append({
|
||||
"tenant_id": str(tenant_id),
|
||||
"synced": synced,
|
||||
"failed": failed
|
||||
})
|
||||
|
||||
logger.info("Tenant sync completed",
|
||||
tenant_id=str(tenant_id),
|
||||
synced=synced,
|
||||
failed=failed)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to sync tenant",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e))
|
||||
results.append({
|
||||
"tenant_id": str(tenant_id),
|
||||
"error": str(e)
|
||||
})
|
||||
|
||||
duration = (datetime.utcnow() - start_time).total_seconds()
|
||||
|
||||
logger.info("POS to Sales sync job completed",
|
||||
duration_seconds=duration,
|
||||
tenants_processed=len(tenants_to_sync),
|
||||
total_synced=total_synced,
|
||||
total_failed=total_failed)
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"tenants_processed": len(tenants_to_sync),
|
||||
"total_synced": total_synced,
|
||||
"total_failed": total_failed,
|
||||
"duration_seconds": duration,
|
||||
"results": results
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
duration = (datetime.utcnow() - start_time).total_seconds()
|
||||
logger.error("POS to Sales sync job failed",
|
||||
error=str(e),
|
||||
duration_seconds=duration,
|
||||
exc_info=True)
|
||||
|
||||
return {
|
||||
"success": False,
|
||||
"error": str(e),
|
||||
"duration_seconds": duration
|
||||
}
|
||||
|
||||
async def _get_active_tenants(self):
|
||||
"""Get list of tenant IDs with active POS configurations"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
repository = POSConfigRepository(db)
|
||||
|
||||
# Get all active POS configurations
|
||||
configs = await repository.get_all_active_configs()
|
||||
|
||||
# Extract unique tenant IDs
|
||||
tenant_ids = list(set(config.tenant_id for config in configs))
|
||||
|
||||
logger.info("Found tenants with active POS configs",
|
||||
count=len(tenant_ids))
|
||||
|
||||
return tenant_ids
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get active tenants", error=str(e))
|
||||
return []
|
||||
|
||||
async def sync_specific_tenant(self, tenant_id: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Sync transactions for a specific tenant (for manual triggering)
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID as string
|
||||
|
||||
Returns:
|
||||
Sync result dictionary
|
||||
"""
|
||||
try:
|
||||
from uuid import UUID
|
||||
tenant_uuid = UUID(tenant_id)
|
||||
|
||||
result = await self.transaction_service.sync_unsynced_transactions(
|
||||
tenant_id=tenant_uuid,
|
||||
limit=self.batch_size
|
||||
)
|
||||
|
||||
logger.info("Manual tenant sync completed",
|
||||
tenant_id=tenant_id,
|
||||
synced=result.get("synced"),
|
||||
failed=result.get("failed"))
|
||||
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to sync specific tenant",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
return {
|
||||
"success": False,
|
||||
"error": str(e)
|
||||
}
|
||||
|
||||
|
||||
# Singleton instance for use in schedulers
|
||||
pos_to_sales_sync_job = POSToSalesSyncJob()
|
||||
|
||||
|
||||
async def run_pos_to_sales_sync():
|
||||
"""
|
||||
Entry point for scheduler
|
||||
|
||||
Usage with APScheduler:
|
||||
```python
|
||||
from apscheduler.schedulers.asyncio import AsyncIOScheduler
|
||||
from app.jobs.sync_pos_to_sales import run_pos_to_sales_sync
|
||||
|
||||
scheduler = AsyncIOScheduler()
|
||||
scheduler.add_job(
|
||||
run_pos_to_sales_sync,
|
||||
'interval',
|
||||
minutes=5,
|
||||
id='pos_to_sales_sync'
|
||||
)
|
||||
scheduler.start()
|
||||
```
|
||||
|
||||
Usage with Celery:
|
||||
```python
|
||||
from celery import Celery
|
||||
from app.jobs.sync_pos_to_sales import run_pos_to_sales_sync
|
||||
|
||||
@celery.task
|
||||
def sync_pos_transactions():
|
||||
asyncio.run(run_pos_to_sales_sync())
|
||||
```
|
||||
"""
|
||||
return await pos_to_sales_sync_job.run()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# For testing: Run sync manually
|
||||
asyncio.run(run_pos_to_sales_sync())
|
||||
218
services/pos/app/main.py
Normal file
218
services/pos/app/main.py
Normal file
@@ -0,0 +1,218 @@
|
||||
"""
|
||||
POS Integration Service
|
||||
Handles integration with external POS systems (Square, Toast, Lightspeed)
|
||||
"""
|
||||
|
||||
import time
|
||||
from fastapi import FastAPI, Request
|
||||
from sqlalchemy import text
|
||||
from app.core.config import settings
|
||||
from app.api.configurations import router as configurations_router
|
||||
from app.api.transactions import router as transactions_router
|
||||
from app.api.pos_operations import router as pos_operations_router
|
||||
from app.api.analytics import router as analytics_router
|
||||
from app.api.audit import router as audit_router
|
||||
# from app.api.internal_demo import router as internal_demo_router # REMOVED: Replaced by script-based seed data loading
|
||||
from app.core.database import database_manager
|
||||
from shared.service_base import StandardFastAPIService
|
||||
|
||||
|
||||
class POSService(StandardFastAPIService):
|
||||
"""POS Integration Service with standardized setup"""
|
||||
|
||||
expected_migration_version = "e9976ec9fe9e"
|
||||
|
||||
def __init__(self):
|
||||
# Initialize scheduler reference
|
||||
self.pos_scheduler = None
|
||||
|
||||
# Define expected database tables for health checks
|
||||
pos_expected_tables = [
|
||||
'pos_configurations', 'pos_transactions', 'pos_transaction_items',
|
||||
'pos_webhook_logs', 'pos_sync_logs'
|
||||
]
|
||||
|
||||
# Define custom metrics for POS service
|
||||
pos_custom_metrics = {
|
||||
"pos_webhooks_received_total": {
|
||||
"type": "counter",
|
||||
"description": "Total POS webhooks received",
|
||||
"labels": ["provider", "event_type"]
|
||||
},
|
||||
"pos_sync_jobs_total": {
|
||||
"type": "counter",
|
||||
"description": "Total POS sync jobs",
|
||||
"labels": ["provider", "status"]
|
||||
},
|
||||
"pos_transactions_synced_total": {
|
||||
"type": "counter",
|
||||
"description": "Total transactions synced",
|
||||
"labels": ["provider"]
|
||||
},
|
||||
"pos_webhook_processing_duration_seconds": {
|
||||
"type": "histogram",
|
||||
"description": "Time spent processing webhooks"
|
||||
},
|
||||
"pos_sync_duration_seconds": {
|
||||
"type": "histogram",
|
||||
"description": "Time spent syncing data"
|
||||
}
|
||||
}
|
||||
|
||||
super().__init__(
|
||||
service_name="pos-service",
|
||||
app_name="POS Integration Service",
|
||||
description="Handles integration with external POS systems",
|
||||
version="1.0.0",
|
||||
cors_origins=settings.CORS_ORIGINS,
|
||||
api_prefix="", # Empty because RouteBuilder already includes /api/v1
|
||||
database_manager=database_manager,
|
||||
expected_tables=pos_expected_tables,
|
||||
custom_metrics=pos_custom_metrics
|
||||
)
|
||||
|
||||
async def verify_migrations(self):
|
||||
"""Verify database schema matches the latest migrations."""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
result = await session.execute(text("SELECT version_num FROM alembic_version"))
|
||||
version = result.scalar()
|
||||
if version != self.expected_migration_version:
|
||||
self.logger.error(f"Migration version mismatch: expected {self.expected_migration_version}, got {version}")
|
||||
raise RuntimeError(f"Migration version mismatch: expected {self.expected_migration_version}, got {version}")
|
||||
self.logger.info(f"Migration verification successful: {version}")
|
||||
except Exception as e:
|
||||
self.logger.error(f"Migration verification failed: {e}")
|
||||
raise
|
||||
|
||||
async def on_startup(self, app: FastAPI):
|
||||
"""Custom startup logic for POS service"""
|
||||
# Verify migrations first
|
||||
await self.verify_migrations()
|
||||
|
||||
# Call parent startup
|
||||
await super().on_startup(app)
|
||||
|
||||
# Start background scheduler for POS-to-Sales sync with leader election
|
||||
try:
|
||||
from app.scheduler import POSScheduler
|
||||
self.pos_scheduler = POSScheduler(
|
||||
redis_url=settings.REDIS_URL, # Pass Redis URL for leader election
|
||||
sync_interval_minutes=settings.SYNC_INTERVAL_SECONDS // 60 if settings.SYNC_INTERVAL_SECONDS >= 60 else 5
|
||||
)
|
||||
await self.pos_scheduler.start()
|
||||
self.logger.info("POS scheduler started successfully with leader election")
|
||||
|
||||
# Store scheduler in app state for status checks
|
||||
app.state.pos_scheduler = self.pos_scheduler
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to start POS scheduler: {e}", exc_info=True)
|
||||
# Don't fail startup if scheduler fails
|
||||
|
||||
# Custom startup completed
|
||||
self.logger.info("POS Integration Service started successfully")
|
||||
|
||||
async def on_shutdown(self, app: FastAPI):
|
||||
"""Custom shutdown logic for POS service"""
|
||||
# Shutdown POS scheduler
|
||||
try:
|
||||
if self.pos_scheduler:
|
||||
await self.pos_scheduler.stop()
|
||||
self.logger.info("POS scheduler stopped successfully")
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to stop POS scheduler: {e}", exc_info=True)
|
||||
|
||||
# Database cleanup is handled by the base class
|
||||
pass
|
||||
|
||||
def get_service_features(self):
|
||||
"""Return POS-specific features"""
|
||||
return [
|
||||
"pos_integration",
|
||||
"square_support",
|
||||
"toast_support",
|
||||
"lightspeed_support",
|
||||
"webhook_handling",
|
||||
"transaction_sync",
|
||||
"real_time_updates"
|
||||
]
|
||||
|
||||
def setup_custom_middleware(self):
|
||||
"""Setup custom middleware for POS service"""
|
||||
# Middleware for request logging and timing
|
||||
@self.app.middleware("http")
|
||||
async def log_requests(request: Request, call_next):
|
||||
start_time = time.time()
|
||||
|
||||
# Log request
|
||||
self.logger.info(
|
||||
"Incoming request",
|
||||
method=request.method,
|
||||
url=str(request.url),
|
||||
client_ip=request.client.host if request.client else None
|
||||
)
|
||||
|
||||
response = await call_next(request)
|
||||
|
||||
# Log response
|
||||
process_time = time.time() - start_time
|
||||
self.logger.info(
|
||||
"Request completed",
|
||||
method=request.method,
|
||||
url=str(request.url),
|
||||
status_code=response.status_code,
|
||||
process_time=f"{process_time:.4f}s"
|
||||
)
|
||||
|
||||
response.headers["X-Process-Time"] = str(process_time)
|
||||
return response
|
||||
|
||||
def setup_custom_endpoints(self):
|
||||
"""Setup custom endpoints for POS service"""
|
||||
@self.app.get("/")
|
||||
async def root():
|
||||
"""Root endpoint"""
|
||||
return {
|
||||
"service": "POS Integration Service",
|
||||
"version": "1.0.0",
|
||||
"status": "running",
|
||||
"supported_pos_systems": ["square", "toast", "lightspeed"]
|
||||
}
|
||||
|
||||
|
||||
# Create service instance
|
||||
service = POSService()
|
||||
|
||||
# Create FastAPI app with standardized setup
|
||||
app = service.create_app(
|
||||
docs_url="/docs" if settings.ENVIRONMENT != "production" else None,
|
||||
redoc_url="/redoc" if settings.ENVIRONMENT != "production" else None
|
||||
)
|
||||
|
||||
# Setup standard endpoints
|
||||
service.setup_standard_endpoints()
|
||||
|
||||
# Setup custom middleware
|
||||
service.setup_custom_middleware()
|
||||
|
||||
# Setup custom endpoints
|
||||
service.setup_custom_endpoints()
|
||||
|
||||
# Include routers
|
||||
# IMPORTANT: Register audit router FIRST to avoid route matching conflicts
|
||||
service.add_router(audit_router, tags=["audit-logs"])
|
||||
service.add_router(configurations_router, tags=["pos-configurations"])
|
||||
service.add_router(transactions_router, tags=["pos-transactions"])
|
||||
service.add_router(pos_operations_router, tags=["pos-operations"])
|
||||
service.add_router(analytics_router, tags=["pos-analytics"])
|
||||
# service.add_router(internal_demo_router, tags=["internal-demo"]) # REMOVED: Replaced by script-based seed data loading
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
uvicorn.run(
|
||||
"main:app",
|
||||
host="0.0.0.0",
|
||||
port=8000,
|
||||
reload=True
|
||||
)
|
||||
24
services/pos/app/models/__init__.py
Normal file
24
services/pos/app/models/__init__.py
Normal file
@@ -0,0 +1,24 @@
|
||||
"""
|
||||
Database models for POS Integration Service
|
||||
"""
|
||||
|
||||
# Import AuditLog model for this service
|
||||
from shared.security import create_audit_log_model
|
||||
from shared.database.base import Base
|
||||
|
||||
# Create audit log model for this service
|
||||
AuditLog = create_audit_log_model(Base)
|
||||
|
||||
from .pos_config import POSConfiguration
|
||||
from .pos_transaction import POSTransaction, POSTransactionItem
|
||||
from .pos_webhook import POSWebhookLog
|
||||
from .pos_sync import POSSyncLog
|
||||
|
||||
__all__ = [
|
||||
"POSConfiguration",
|
||||
"POSTransaction",
|
||||
"POSTransactionItem",
|
||||
"POSWebhookLog",
|
||||
"POSSyncLog",
|
||||
"AuditLog"
|
||||
]
|
||||
83
services/pos/app/models/pos_config.py
Normal file
83
services/pos/app/models/pos_config.py
Normal file
@@ -0,0 +1,83 @@
|
||||
# services/pos/app/models/pos_config.py
|
||||
"""
|
||||
POS Configuration Model
|
||||
Stores POS system configurations for each tenant
|
||||
"""
|
||||
|
||||
from sqlalchemy import Column, String, DateTime, Boolean, Text, JSON, Index
|
||||
from sqlalchemy.dialects.postgresql import UUID
|
||||
from sqlalchemy.sql import func
|
||||
import uuid
|
||||
|
||||
from shared.database.base import Base
|
||||
|
||||
|
||||
class POSConfiguration(Base):
|
||||
"""
|
||||
POS system configuration for tenants
|
||||
Stores encrypted credentials and settings for each POS provider
|
||||
"""
|
||||
__tablename__ = "pos_configurations"
|
||||
|
||||
# Primary identifiers
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4, index=True)
|
||||
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
|
||||
# POS Provider Information
|
||||
pos_system = Column(String(50), nullable=False) # square, toast, lightspeed
|
||||
provider_name = Column(String(100), nullable=False) # Display name for the provider
|
||||
|
||||
# Configuration Status
|
||||
is_active = Column(Boolean, default=True, nullable=False)
|
||||
is_connected = Column(Boolean, default=False, nullable=False)
|
||||
|
||||
# Authentication & Credentials (encrypted)
|
||||
encrypted_credentials = Column(Text, nullable=True) # JSON with encrypted API keys/tokens
|
||||
webhook_url = Column(String(500), nullable=True)
|
||||
webhook_secret = Column(String(255), nullable=True)
|
||||
|
||||
# Provider-specific Settings
|
||||
environment = Column(String(20), default="sandbox", nullable=False) # sandbox, production
|
||||
location_id = Column(String(100), nullable=True) # For multi-location setups
|
||||
merchant_id = Column(String(100), nullable=True) # Provider merchant ID
|
||||
|
||||
# Sync Configuration
|
||||
sync_enabled = Column(Boolean, default=True, nullable=False)
|
||||
sync_interval_minutes = Column(String(10), default="5", nullable=False)
|
||||
auto_sync_products = Column(Boolean, default=True, nullable=False)
|
||||
auto_sync_transactions = Column(Boolean, default=True, nullable=False)
|
||||
|
||||
# Last Sync Information
|
||||
last_sync_at = Column(DateTime(timezone=True), nullable=True)
|
||||
last_successful_sync_at = Column(DateTime(timezone=True), nullable=True)
|
||||
last_sync_status = Column(String(50), nullable=True) # success, failed, partial
|
||||
last_sync_message = Column(Text, nullable=True)
|
||||
|
||||
# Provider-specific Configuration (JSON)
|
||||
provider_settings = Column(JSON, nullable=True)
|
||||
|
||||
# Connection Health
|
||||
last_health_check_at = Column(DateTime(timezone=True), nullable=True)
|
||||
health_status = Column(String(50), default="unknown", nullable=False) # healthy, unhealthy, unknown
|
||||
health_message = Column(Text, nullable=True)
|
||||
|
||||
# Timestamps
|
||||
created_at = Column(DateTime(timezone=True), server_default=func.now(), nullable=False)
|
||||
updated_at = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now(), nullable=False)
|
||||
|
||||
# Metadata
|
||||
created_by = Column(UUID(as_uuid=True), nullable=True)
|
||||
notes = Column(Text, nullable=True)
|
||||
|
||||
# Indexes for performance
|
||||
__table_args__ = (
|
||||
Index('idx_pos_config_tenant_pos_system', 'tenant_id', 'pos_system'),
|
||||
Index('idx_pos_config_active', 'is_active'),
|
||||
Index('idx_pos_config_connected', 'is_connected'),
|
||||
Index('idx_pos_config_sync_enabled', 'sync_enabled'),
|
||||
Index('idx_pos_config_health_status', 'health_status'),
|
||||
Index('idx_pos_config_created_at', 'created_at'),
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<POSConfiguration(id={self.id}, tenant_id={self.tenant_id}, pos_system='{self.pos_system}', is_active={self.is_active})>"
|
||||
126
services/pos/app/models/pos_sync.py
Normal file
126
services/pos/app/models/pos_sync.py
Normal file
@@ -0,0 +1,126 @@
|
||||
# services/pos/app/models/pos_sync.py
|
||||
"""
|
||||
POS Sync Log Model
|
||||
Tracks synchronization operations with POS systems
|
||||
"""
|
||||
|
||||
from sqlalchemy import Column, String, DateTime, Boolean, Integer, Text, JSON, Index, Numeric
|
||||
from sqlalchemy.dialects.postgresql import UUID
|
||||
from sqlalchemy.sql import func
|
||||
import uuid
|
||||
|
||||
from shared.database.base import Base
|
||||
|
||||
|
||||
class POSSyncLog(Base):
|
||||
"""
|
||||
Log of synchronization operations with POS systems
|
||||
"""
|
||||
__tablename__ = "pos_sync_logs"
|
||||
|
||||
# Primary identifiers
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4, index=True)
|
||||
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
pos_config_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
|
||||
# Sync Operation Details
|
||||
sync_type = Column(String(50), nullable=False, index=True) # full, incremental, manual, webhook_triggered
|
||||
sync_direction = Column(String(20), nullable=False) # inbound, outbound, bidirectional
|
||||
data_type = Column(String(50), nullable=False, index=True) # transactions, products, customers, orders
|
||||
|
||||
# POS Provider Information
|
||||
pos_system = Column(String(50), nullable=False, index=True) # square, toast, lightspeed
|
||||
|
||||
# Sync Status
|
||||
status = Column(String(50), nullable=False, default="started", index=True) # started, in_progress, completed, failed, cancelled
|
||||
|
||||
# Timing Information
|
||||
started_at = Column(DateTime(timezone=True), nullable=False, index=True)
|
||||
completed_at = Column(DateTime(timezone=True), nullable=True)
|
||||
duration_seconds = Column(Numeric(10, 3), nullable=True)
|
||||
|
||||
# Date Range for Sync
|
||||
sync_from_date = Column(DateTime(timezone=True), nullable=True)
|
||||
sync_to_date = Column(DateTime(timezone=True), nullable=True)
|
||||
|
||||
# Statistics
|
||||
records_requested = Column(Integer, default=0, nullable=False)
|
||||
records_processed = Column(Integer, default=0, nullable=False)
|
||||
records_created = Column(Integer, default=0, nullable=False)
|
||||
records_updated = Column(Integer, default=0, nullable=False)
|
||||
records_skipped = Column(Integer, default=0, nullable=False)
|
||||
records_failed = Column(Integer, default=0, nullable=False)
|
||||
|
||||
# API Usage Statistics
|
||||
api_calls_made = Column(Integer, default=0, nullable=False)
|
||||
api_rate_limit_hits = Column(Integer, default=0, nullable=False)
|
||||
total_api_time_ms = Column(Integer, default=0, nullable=False)
|
||||
|
||||
# Error Information
|
||||
error_message = Column(Text, nullable=True)
|
||||
error_code = Column(String(100), nullable=True)
|
||||
error_details = Column(JSON, nullable=True)
|
||||
|
||||
# Retry Information
|
||||
retry_attempt = Column(Integer, default=0, nullable=False)
|
||||
max_retries = Column(Integer, default=3, nullable=False)
|
||||
parent_sync_id = Column(UUID(as_uuid=True), nullable=True) # Reference to original sync for retries
|
||||
|
||||
# Configuration Snapshot
|
||||
sync_configuration = Column(JSON, nullable=True) # Settings used for this sync
|
||||
|
||||
# Progress Tracking
|
||||
current_page = Column(Integer, nullable=True)
|
||||
total_pages = Column(Integer, nullable=True)
|
||||
current_batch = Column(Integer, nullable=True)
|
||||
total_batches = Column(Integer, nullable=True)
|
||||
progress_percentage = Column(Numeric(5, 2), nullable=True)
|
||||
|
||||
# Data Quality
|
||||
validation_errors = Column(JSON, nullable=True) # Array of validation issues
|
||||
data_quality_score = Column(Numeric(5, 2), nullable=True) # 0-100 score
|
||||
|
||||
# Performance Metrics
|
||||
memory_usage_mb = Column(Numeric(10, 2), nullable=True)
|
||||
cpu_usage_percentage = Column(Numeric(5, 2), nullable=True)
|
||||
network_bytes_received = Column(Integer, nullable=True)
|
||||
network_bytes_sent = Column(Integer, nullable=True)
|
||||
|
||||
# Business Impact
|
||||
revenue_synced = Column(Numeric(12, 2), nullable=True) # Total monetary value synced
|
||||
transactions_synced = Column(Integer, default=0, nullable=False)
|
||||
|
||||
# Trigger Information
|
||||
triggered_by = Column(String(50), nullable=True) # system, user, webhook, schedule
|
||||
triggered_by_user_id = Column(UUID(as_uuid=True), nullable=True)
|
||||
trigger_details = Column(JSON, nullable=True)
|
||||
|
||||
# External References
|
||||
external_batch_id = Column(String(255), nullable=True) # POS system's batch/job ID
|
||||
webhook_log_id = Column(UUID(as_uuid=True), nullable=True) # If triggered by webhook
|
||||
|
||||
# Timestamps
|
||||
created_at = Column(DateTime(timezone=True), server_default=func.now(), nullable=False)
|
||||
updated_at = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now(), nullable=False)
|
||||
|
||||
# Metadata
|
||||
notes = Column(Text, nullable=True)
|
||||
tags = Column(JSON, nullable=True) # Array of tags for categorization
|
||||
|
||||
# Indexes for performance
|
||||
__table_args__ = (
|
||||
Index('idx_sync_log_tenant_started', 'tenant_id', 'started_at'),
|
||||
Index('idx_sync_log_pos_system_type', 'pos_system', 'sync_type'),
|
||||
Index('idx_sync_log_status', 'status'),
|
||||
Index('idx_sync_log_data_type', 'data_type'),
|
||||
Index('idx_sync_log_trigger', 'triggered_by'),
|
||||
Index('idx_sync_log_completed', 'completed_at'),
|
||||
Index('idx_sync_log_duration', 'duration_seconds'),
|
||||
Index('idx_sync_log_retry', 'retry_attempt'),
|
||||
Index('idx_sync_log_parent', 'parent_sync_id'),
|
||||
Index('idx_sync_log_webhook', 'webhook_log_id'),
|
||||
Index('idx_sync_log_external_batch', 'external_batch_id'),
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<POSSyncLog(id={self.id}, pos_system='{self.pos_system}', type='{self.sync_type}', status='{self.status}')>"
|
||||
174
services/pos/app/models/pos_transaction.py
Normal file
174
services/pos/app/models/pos_transaction.py
Normal file
@@ -0,0 +1,174 @@
|
||||
# services/pos/app/models/pos_transaction.py
|
||||
"""
|
||||
POS Transaction Models
|
||||
Stores transaction data from POS systems
|
||||
"""
|
||||
|
||||
from sqlalchemy import Column, String, DateTime, Boolean, Numeric, Integer, Text, JSON, Index, ForeignKey
|
||||
from sqlalchemy.dialects.postgresql import UUID
|
||||
from sqlalchemy.sql import func
|
||||
from sqlalchemy.orm import relationship
|
||||
import uuid
|
||||
|
||||
from shared.database.base import Base
|
||||
|
||||
|
||||
class POSTransaction(Base):
|
||||
"""
|
||||
Main transaction record from POS systems
|
||||
"""
|
||||
__tablename__ = "pos_transactions"
|
||||
|
||||
# Primary identifiers
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4, index=True)
|
||||
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
pos_config_id = Column(UUID(as_uuid=True), ForeignKey("pos_configurations.id"), nullable=False, index=True)
|
||||
|
||||
# POS Provider Information
|
||||
pos_system = Column(String(50), nullable=False, index=True) # square, toast, lightspeed
|
||||
external_transaction_id = Column(String(255), nullable=False, index=True) # POS system's transaction ID
|
||||
external_order_id = Column(String(255), nullable=True, index=True) # POS system's order ID
|
||||
|
||||
# Transaction Details
|
||||
transaction_type = Column(String(50), nullable=False) # sale, refund, void, exchange
|
||||
status = Column(String(50), nullable=False) # completed, pending, failed, refunded, voided
|
||||
|
||||
# Financial Information
|
||||
subtotal = Column(Numeric(10, 2), nullable=False)
|
||||
tax_amount = Column(Numeric(10, 2), default=0, nullable=False)
|
||||
tip_amount = Column(Numeric(10, 2), default=0, nullable=False)
|
||||
discount_amount = Column(Numeric(10, 2), default=0, nullable=False)
|
||||
total_amount = Column(Numeric(10, 2), nullable=False)
|
||||
currency = Column(String(3), default="EUR", nullable=False)
|
||||
|
||||
# Payment Information
|
||||
payment_method = Column(String(50), nullable=True) # card, cash, digital_wallet, etc.
|
||||
payment_status = Column(String(50), nullable=True) # paid, pending, failed
|
||||
|
||||
# Transaction Timing
|
||||
transaction_date = Column(DateTime(timezone=True), nullable=False, index=True)
|
||||
pos_created_at = Column(DateTime(timezone=True), nullable=False) # Original POS timestamp
|
||||
pos_updated_at = Column(DateTime(timezone=True), nullable=True) # Last update in POS
|
||||
|
||||
# Location & Staff
|
||||
location_id = Column(String(100), nullable=True)
|
||||
location_name = Column(String(255), nullable=True)
|
||||
staff_id = Column(String(100), nullable=True)
|
||||
staff_name = Column(String(255), nullable=True)
|
||||
|
||||
# Customer Information
|
||||
customer_id = Column(String(100), nullable=True)
|
||||
customer_email = Column(String(255), nullable=True)
|
||||
customer_phone = Column(String(50), nullable=True)
|
||||
|
||||
# Order Context
|
||||
order_type = Column(String(50), nullable=True) # dine_in, takeout, delivery, pickup
|
||||
table_number = Column(String(20), nullable=True)
|
||||
receipt_number = Column(String(100), nullable=True)
|
||||
|
||||
# Sync Status
|
||||
is_synced_to_sales = Column(Boolean, default=False, nullable=False, index=True)
|
||||
sales_record_id = Column(UUID(as_uuid=True), nullable=True, index=True) # Reference to sales service
|
||||
sync_attempted_at = Column(DateTime(timezone=True), nullable=True)
|
||||
sync_completed_at = Column(DateTime(timezone=True), nullable=True)
|
||||
sync_error = Column(Text, nullable=True)
|
||||
sync_retry_count = Column(Integer, default=0, nullable=False)
|
||||
|
||||
# Raw Data
|
||||
raw_data = Column(JSON, nullable=True) # Complete raw response from POS
|
||||
|
||||
# Processing Status
|
||||
is_processed = Column(Boolean, default=False, nullable=False)
|
||||
processing_error = Column(Text, nullable=True)
|
||||
|
||||
# Duplicate Detection
|
||||
is_duplicate = Column(Boolean, default=False, nullable=False)
|
||||
duplicate_of = Column(UUID(as_uuid=True), nullable=True)
|
||||
|
||||
# Timestamps
|
||||
created_at = Column(DateTime(timezone=True), server_default=func.now(), nullable=False)
|
||||
updated_at = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now(), nullable=False)
|
||||
|
||||
# Relationships
|
||||
items = relationship("POSTransactionItem", back_populates="transaction", cascade="all, delete-orphan")
|
||||
|
||||
# Indexes for performance
|
||||
__table_args__ = (
|
||||
Index('idx_pos_transaction_tenant_date', 'tenant_id', 'transaction_date'),
|
||||
Index('idx_pos_transaction_external_id', 'pos_system', 'external_transaction_id'),
|
||||
Index('idx_pos_transaction_sync_status', 'is_synced_to_sales'),
|
||||
Index('idx_pos_transaction_status', 'status'),
|
||||
Index('idx_pos_transaction_type', 'transaction_type'),
|
||||
Index('idx_pos_transaction_processed', 'is_processed'),
|
||||
Index('idx_pos_transaction_duplicate', 'is_duplicate'),
|
||||
Index('idx_pos_transaction_location', 'location_id'),
|
||||
Index('idx_pos_transaction_customer', 'customer_id'),
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<POSTransaction(id={self.id}, external_id='{self.external_transaction_id}', pos_system='{self.pos_system}', total={self.total_amount})>"
|
||||
|
||||
|
||||
class POSTransactionItem(Base):
|
||||
"""
|
||||
Individual items within a POS transaction
|
||||
"""
|
||||
__tablename__ = "pos_transaction_items"
|
||||
|
||||
# Primary identifiers
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4, index=True)
|
||||
transaction_id = Column(UUID(as_uuid=True), ForeignKey("pos_transactions.id"), nullable=False, index=True)
|
||||
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
|
||||
# POS Item Information
|
||||
external_item_id = Column(String(255), nullable=True) # POS system's item ID
|
||||
sku = Column(String(100), nullable=True, index=True)
|
||||
|
||||
# Product Details
|
||||
product_name = Column(String(255), nullable=False)
|
||||
product_category = Column(String(100), nullable=True, index=True)
|
||||
product_subcategory = Column(String(100), nullable=True)
|
||||
|
||||
# Quantity & Pricing
|
||||
quantity = Column(Numeric(10, 3), nullable=False)
|
||||
unit_price = Column(Numeric(10, 2), nullable=False)
|
||||
total_price = Column(Numeric(10, 2), nullable=False)
|
||||
|
||||
# Discounts & Modifiers
|
||||
discount_amount = Column(Numeric(10, 2), default=0, nullable=False)
|
||||
tax_amount = Column(Numeric(10, 2), default=0, nullable=False)
|
||||
|
||||
# Modifiers (e.g., extra shot, no foam for coffee)
|
||||
modifiers = Column(JSON, nullable=True)
|
||||
|
||||
# Inventory Mapping
|
||||
inventory_product_id = Column(UUID(as_uuid=True), nullable=True, index=True) # Mapped to inventory service
|
||||
is_mapped_to_inventory = Column(Boolean, default=False, nullable=False)
|
||||
|
||||
# Sync Status
|
||||
is_synced_to_sales = Column(Boolean, default=False, nullable=False)
|
||||
sync_error = Column(Text, nullable=True)
|
||||
|
||||
# Raw Data
|
||||
raw_data = Column(JSON, nullable=True)
|
||||
|
||||
# Timestamps
|
||||
created_at = Column(DateTime(timezone=True), server_default=func.now(), nullable=False)
|
||||
updated_at = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now(), nullable=False)
|
||||
|
||||
# Relationships
|
||||
transaction = relationship("POSTransaction", back_populates="items")
|
||||
|
||||
# Indexes for performance
|
||||
__table_args__ = (
|
||||
Index('idx_pos_item_transaction', 'transaction_id'),
|
||||
Index('idx_pos_item_product', 'product_name'),
|
||||
Index('idx_pos_item_category', 'product_category'),
|
||||
Index('idx_pos_item_sku', 'sku'),
|
||||
Index('idx_pos_item_inventory', 'inventory_product_id'),
|
||||
Index('idx_pos_item_sync', 'is_synced_to_sales'),
|
||||
Index('idx_pos_item_mapped', 'is_mapped_to_inventory'),
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<POSTransactionItem(id={self.id}, product='{self.product_name}', quantity={self.quantity}, price={self.total_price})>"
|
||||
109
services/pos/app/models/pos_webhook.py
Normal file
109
services/pos/app/models/pos_webhook.py
Normal file
@@ -0,0 +1,109 @@
|
||||
# services/pos/app/models/pos_webhook.py
|
||||
"""
|
||||
POS Webhook Log Model
|
||||
Tracks webhook events from POS systems
|
||||
"""
|
||||
|
||||
from sqlalchemy import Column, String, DateTime, Boolean, Integer, Text, JSON, Index
|
||||
from sqlalchemy.dialects.postgresql import UUID
|
||||
from sqlalchemy.sql import func
|
||||
import uuid
|
||||
|
||||
from shared.database.base import Base
|
||||
|
||||
|
||||
class POSWebhookLog(Base):
|
||||
"""
|
||||
Log of webhook events received from POS systems
|
||||
"""
|
||||
__tablename__ = "pos_webhook_logs"
|
||||
|
||||
# Primary identifiers
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4, index=True)
|
||||
tenant_id = Column(UUID(as_uuid=True), nullable=True, index=True) # May be null until parsed
|
||||
|
||||
# POS Provider Information
|
||||
pos_system = Column(String(50), nullable=False, index=True) # square, toast, lightspeed
|
||||
webhook_type = Column(String(100), nullable=False, index=True) # payment.created, order.updated, etc.
|
||||
|
||||
# Request Information
|
||||
method = Column(String(10), nullable=False) # POST, PUT, etc.
|
||||
url_path = Column(String(500), nullable=False)
|
||||
query_params = Column(JSON, nullable=True)
|
||||
headers = Column(JSON, nullable=True)
|
||||
|
||||
# Payload
|
||||
raw_payload = Column(Text, nullable=False) # Raw webhook payload
|
||||
payload_size = Column(Integer, nullable=False, default=0)
|
||||
content_type = Column(String(100), nullable=True)
|
||||
|
||||
# Security
|
||||
signature = Column(String(500), nullable=True) # Webhook signature for verification
|
||||
is_signature_valid = Column(Boolean, nullable=True) # null = not checked, true/false = verified
|
||||
source_ip = Column(String(45), nullable=True) # IPv4 or IPv6
|
||||
|
||||
# Processing Status
|
||||
status = Column(String(50), nullable=False, default="received", index=True) # received, processing, processed, failed
|
||||
processing_started_at = Column(DateTime(timezone=True), nullable=True)
|
||||
processing_completed_at = Column(DateTime(timezone=True), nullable=True)
|
||||
processing_duration_ms = Column(Integer, nullable=True)
|
||||
|
||||
# Error Handling
|
||||
error_message = Column(Text, nullable=True)
|
||||
error_code = Column(String(50), nullable=True)
|
||||
retry_count = Column(Integer, default=0, nullable=False)
|
||||
max_retries = Column(Integer, default=3, nullable=False)
|
||||
|
||||
# Response Information
|
||||
response_status_code = Column(Integer, nullable=True)
|
||||
response_body = Column(Text, nullable=True)
|
||||
response_sent_at = Column(DateTime(timezone=True), nullable=True)
|
||||
|
||||
# Event Metadata
|
||||
event_id = Column(String(255), nullable=True, index=True) # POS system's event ID
|
||||
event_timestamp = Column(DateTime(timezone=True), nullable=True) # When event occurred in POS
|
||||
sequence_number = Column(Integer, nullable=True) # For ordered events
|
||||
|
||||
# Business Data References
|
||||
transaction_id = Column(String(255), nullable=True, index=True) # Referenced transaction
|
||||
order_id = Column(String(255), nullable=True, index=True) # Referenced order
|
||||
customer_id = Column(String(255), nullable=True) # Referenced customer
|
||||
|
||||
# Internal References
|
||||
created_transaction_id = Column(UUID(as_uuid=True), nullable=True) # Created POSTransaction record
|
||||
updated_transaction_id = Column(UUID(as_uuid=True), nullable=True) # Updated POSTransaction record
|
||||
|
||||
# Duplicate Detection
|
||||
is_duplicate = Column(Boolean, default=False, nullable=False, index=True)
|
||||
duplicate_of = Column(UUID(as_uuid=True), nullable=True)
|
||||
|
||||
# Processing Priority
|
||||
priority = Column(String(20), default="normal", nullable=False) # low, normal, high, urgent
|
||||
|
||||
# Debugging Information
|
||||
user_agent = Column(String(500), nullable=True)
|
||||
forwarded_for = Column(String(200), nullable=True) # X-Forwarded-For header
|
||||
request_id = Column(String(100), nullable=True) # For request tracing
|
||||
|
||||
# Timestamps
|
||||
received_at = Column(DateTime(timezone=True), server_default=func.now(), nullable=False, index=True)
|
||||
created_at = Column(DateTime(timezone=True), server_default=func.now(), nullable=False)
|
||||
updated_at = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now(), nullable=False)
|
||||
|
||||
# Indexes for performance
|
||||
__table_args__ = (
|
||||
Index('idx_webhook_pos_system_type', 'pos_system', 'webhook_type'),
|
||||
Index('idx_webhook_status', 'status'),
|
||||
Index('idx_webhook_event_id', 'event_id'),
|
||||
Index('idx_webhook_received_at', 'received_at'),
|
||||
Index('idx_webhook_tenant_received', 'tenant_id', 'received_at'),
|
||||
Index('idx_webhook_transaction_id', 'transaction_id'),
|
||||
Index('idx_webhook_order_id', 'order_id'),
|
||||
Index('idx_webhook_duplicate', 'is_duplicate'),
|
||||
Index('idx_webhook_priority', 'priority'),
|
||||
Index('idx_webhook_retry', 'retry_count'),
|
||||
Index('idx_webhook_signature_valid', 'is_signature_valid'),
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<POSWebhookLog(id={self.id}, pos_system='{self.pos_system}', type='{self.webhook_type}', status='{self.status}')>"
|
||||
119
services/pos/app/repositories/pos_config_repository.py
Normal file
119
services/pos/app/repositories/pos_config_repository.py
Normal file
@@ -0,0 +1,119 @@
|
||||
"""
|
||||
POS Configuration Repository using Repository Pattern
|
||||
"""
|
||||
|
||||
from typing import List, Optional, Dict, Any
|
||||
from uuid import UUID
|
||||
from sqlalchemy import select, and_, or_
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
import structlog
|
||||
|
||||
from app.models.pos_config import POSConfiguration
|
||||
from shared.database.repository import BaseRepository
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class POSConfigurationRepository(BaseRepository[POSConfiguration, dict, dict]):
|
||||
"""Repository for POS configuration operations"""
|
||||
|
||||
def __init__(self, session: AsyncSession):
|
||||
super().__init__(POSConfiguration, session)
|
||||
|
||||
async def get_configurations_by_tenant(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
pos_system: Optional[str] = None,
|
||||
is_active: Optional[bool] = None,
|
||||
skip: int = 0,
|
||||
limit: int = 100
|
||||
) -> List[POSConfiguration]:
|
||||
"""Get POS configurations for a specific tenant with optional filters"""
|
||||
try:
|
||||
query = select(self.model).where(self.model.tenant_id == tenant_id)
|
||||
|
||||
# Apply filters
|
||||
conditions = []
|
||||
if pos_system:
|
||||
conditions.append(self.model.pos_system == pos_system)
|
||||
if is_active is not None:
|
||||
conditions.append(self.model.is_active == is_active)
|
||||
|
||||
if conditions:
|
||||
query = query.where(and_(*conditions))
|
||||
|
||||
query = query.offset(skip).limit(limit).order_by(self.model.created_at.desc())
|
||||
|
||||
result = await self.session.execute(query)
|
||||
return result.scalars().all()
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get configurations by tenant", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def count_configurations_by_tenant(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
pos_system: Optional[str] = None,
|
||||
is_active: Optional[bool] = None
|
||||
) -> int:
|
||||
"""Count POS configurations for a specific tenant with optional filters"""
|
||||
try:
|
||||
from sqlalchemy import func
|
||||
|
||||
query = select(func.count(self.model.id)).where(self.model.tenant_id == tenant_id)
|
||||
|
||||
# Apply filters
|
||||
conditions = []
|
||||
if pos_system:
|
||||
conditions.append(self.model.pos_system == pos_system)
|
||||
if is_active is not None:
|
||||
conditions.append(self.model.is_active == is_active)
|
||||
|
||||
if conditions:
|
||||
query = query.where(and_(*conditions))
|
||||
|
||||
result = await self.session.execute(query)
|
||||
count = result.scalar() or 0
|
||||
return count
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to count configurations by tenant", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def get_by_pos_identifier(
|
||||
self,
|
||||
pos_system: str,
|
||||
identifier: str
|
||||
) -> Optional[POSConfiguration]:
|
||||
"""
|
||||
Get POS configuration by POS-specific identifier
|
||||
|
||||
Args:
|
||||
pos_system: POS system name (square, toast, lightspeed)
|
||||
identifier: merchant_id, location_id, or other POS-specific ID
|
||||
|
||||
Returns:
|
||||
POSConfiguration if found, None otherwise
|
||||
"""
|
||||
try:
|
||||
query = select(self.model).where(
|
||||
and_(
|
||||
self.model.pos_system == pos_system,
|
||||
or_(
|
||||
self.model.merchant_id == identifier,
|
||||
self.model.location_id == identifier
|
||||
),
|
||||
self.model.is_active == True
|
||||
)
|
||||
).order_by(self.model.created_at.desc())
|
||||
|
||||
result = await self.session.execute(query)
|
||||
return result.scalars().first()
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get config by POS identifier",
|
||||
error=str(e),
|
||||
pos_system=pos_system,
|
||||
identifier=identifier)
|
||||
raise
|
||||
113
services/pos/app/repositories/pos_transaction_item_repository.py
Normal file
113
services/pos/app/repositories/pos_transaction_item_repository.py
Normal file
@@ -0,0 +1,113 @@
|
||||
"""
|
||||
POS Transaction Item Repository using Repository Pattern
|
||||
"""
|
||||
|
||||
from typing import List, Optional
|
||||
from uuid import UUID
|
||||
from sqlalchemy import select, and_
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
import structlog
|
||||
|
||||
from app.models.pos_transaction import POSTransactionItem
|
||||
from shared.database.repository import BaseRepository
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class POSTransactionItemRepository(BaseRepository[POSTransactionItem, dict, dict]):
|
||||
"""Repository for POS transaction item operations"""
|
||||
|
||||
def __init__(self, session: AsyncSession):
|
||||
super().__init__(POSTransactionItem, session)
|
||||
|
||||
async def get_items_by_transaction(
|
||||
self,
|
||||
transaction_id: UUID
|
||||
) -> List[POSTransactionItem]:
|
||||
"""Get all items for a transaction"""
|
||||
try:
|
||||
query = select(POSTransactionItem).where(
|
||||
POSTransactionItem.transaction_id == transaction_id
|
||||
).order_by(POSTransactionItem.created_at)
|
||||
|
||||
result = await self.session.execute(query)
|
||||
return result.scalars().all()
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get transaction items",
|
||||
transaction_id=str(transaction_id),
|
||||
error=str(e))
|
||||
raise
|
||||
|
||||
async def get_items_by_product(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
product_name: str,
|
||||
skip: int = 0,
|
||||
limit: int = 100
|
||||
) -> List[POSTransactionItem]:
|
||||
"""Get all transaction items for a specific product"""
|
||||
try:
|
||||
query = select(POSTransactionItem).where(
|
||||
and_(
|
||||
POSTransactionItem.tenant_id == tenant_id,
|
||||
POSTransactionItem.product_name.ilike(f"%{product_name}%")
|
||||
)
|
||||
).order_by(POSTransactionItem.created_at.desc()).offset(skip).limit(limit)
|
||||
|
||||
result = await self.session.execute(query)
|
||||
return result.scalars().all()
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get items by product",
|
||||
product_name=product_name,
|
||||
error=str(e))
|
||||
raise
|
||||
|
||||
async def get_items_by_sku(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
sku: str
|
||||
) -> List[POSTransactionItem]:
|
||||
"""Get all transaction items for a specific SKU"""
|
||||
try:
|
||||
query = select(POSTransactionItem).where(
|
||||
and_(
|
||||
POSTransactionItem.tenant_id == tenant_id,
|
||||
POSTransactionItem.sku == sku
|
||||
)
|
||||
).order_by(POSTransactionItem.created_at.desc())
|
||||
|
||||
result = await self.session.execute(query)
|
||||
return result.scalars().all()
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get items by SKU",
|
||||
sku=sku,
|
||||
error=str(e))
|
||||
raise
|
||||
|
||||
async def get_items_by_category(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
category: str,
|
||||
skip: int = 0,
|
||||
limit: int = 100
|
||||
) -> List[POSTransactionItem]:
|
||||
"""Get all transaction items for a specific category"""
|
||||
try:
|
||||
query = select(POSTransactionItem).where(
|
||||
and_(
|
||||
POSTransactionItem.tenant_id == tenant_id,
|
||||
POSTransactionItem.product_category == category
|
||||
)
|
||||
).order_by(POSTransactionItem.created_at.desc()).offset(skip).limit(limit)
|
||||
|
||||
result = await self.session.execute(query)
|
||||
return result.scalars().all()
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get items by category",
|
||||
category=category,
|
||||
error=str(e))
|
||||
raise
|
||||
362
services/pos/app/repositories/pos_transaction_repository.py
Normal file
362
services/pos/app/repositories/pos_transaction_repository.py
Normal file
@@ -0,0 +1,362 @@
|
||||
"""
|
||||
POS Transaction Repository using Repository Pattern
|
||||
"""
|
||||
|
||||
from typing import List, Optional, Dict, Any
|
||||
from uuid import UUID
|
||||
from datetime import datetime, date, timedelta
|
||||
from sqlalchemy import select, func, and_, or_, desc
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.orm import selectinload
|
||||
import structlog
|
||||
|
||||
from app.models.pos_transaction import POSTransaction, POSTransactionItem
|
||||
from shared.database.repository import BaseRepository
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class POSTransactionRepository(BaseRepository[POSTransaction, dict, dict]):
|
||||
"""Repository for POS transaction operations"""
|
||||
|
||||
def __init__(self, session: AsyncSession):
|
||||
super().__init__(POSTransaction, session)
|
||||
|
||||
async def get_transactions_by_tenant(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
pos_system: Optional[str] = None,
|
||||
start_date: Optional[datetime] = None,
|
||||
end_date: Optional[datetime] = None,
|
||||
status: Optional[str] = None,
|
||||
is_synced: Optional[bool] = None,
|
||||
skip: int = 0,
|
||||
limit: int = 50
|
||||
) -> List[POSTransaction]:
|
||||
"""Get POS transactions for a specific tenant with optional filters"""
|
||||
try:
|
||||
query = select(self.model).options(
|
||||
selectinload(POSTransaction.items)
|
||||
).where(self.model.tenant_id == tenant_id)
|
||||
|
||||
# Apply filters
|
||||
conditions = []
|
||||
if pos_system:
|
||||
conditions.append(self.model.pos_system == pos_system)
|
||||
if status:
|
||||
conditions.append(self.model.status == status)
|
||||
if is_synced is not None:
|
||||
conditions.append(self.model.is_synced_to_sales == is_synced)
|
||||
if start_date:
|
||||
conditions.append(self.model.transaction_date >= start_date)
|
||||
if end_date:
|
||||
conditions.append(self.model.transaction_date <= end_date)
|
||||
|
||||
if conditions:
|
||||
query = query.where(and_(*conditions))
|
||||
|
||||
query = query.order_by(desc(self.model.transaction_date)).offset(skip).limit(limit)
|
||||
|
||||
result = await self.session.execute(query)
|
||||
return result.scalars().all()
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get transactions by tenant", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def count_transactions_by_tenant(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
pos_system: Optional[str] = None,
|
||||
start_date: Optional[datetime] = None,
|
||||
end_date: Optional[datetime] = None,
|
||||
status: Optional[str] = None,
|
||||
is_synced: Optional[bool] = None
|
||||
) -> int:
|
||||
"""Count POS transactions for a specific tenant with optional filters"""
|
||||
try:
|
||||
query = select(func.count(self.model.id)).where(self.model.tenant_id == tenant_id)
|
||||
|
||||
# Apply filters
|
||||
conditions = []
|
||||
if pos_system:
|
||||
conditions.append(self.model.pos_system == pos_system)
|
||||
if status:
|
||||
conditions.append(self.model.status == status)
|
||||
if is_synced is not None:
|
||||
conditions.append(self.model.is_synced_to_sales == is_synced)
|
||||
if start_date:
|
||||
conditions.append(self.model.transaction_date >= start_date)
|
||||
if end_date:
|
||||
conditions.append(self.model.transaction_date <= end_date)
|
||||
|
||||
if conditions:
|
||||
query = query.where(and_(*conditions))
|
||||
|
||||
result = await self.session.execute(query)
|
||||
count = result.scalar() or 0
|
||||
return count
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to count transactions by tenant", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def get_transaction_with_items(
|
||||
self,
|
||||
transaction_id: UUID,
|
||||
tenant_id: UUID
|
||||
) -> Optional[POSTransaction]:
|
||||
"""Get transaction with all its items"""
|
||||
try:
|
||||
query = select(POSTransaction).options(
|
||||
selectinload(POSTransaction.items)
|
||||
).where(
|
||||
and_(
|
||||
POSTransaction.id == transaction_id,
|
||||
POSTransaction.tenant_id == tenant_id
|
||||
)
|
||||
)
|
||||
result = await self.session.execute(query)
|
||||
return result.scalar_one_or_none()
|
||||
except Exception as e:
|
||||
logger.error("Failed to get transaction with items",
|
||||
transaction_id=str(transaction_id),
|
||||
error=str(e))
|
||||
raise
|
||||
|
||||
async def get_transactions_by_pos_config(
|
||||
self,
|
||||
pos_config_id: UUID,
|
||||
skip: int = 0,
|
||||
limit: int = 50
|
||||
) -> List[POSTransaction]:
|
||||
"""Get transactions for a specific POS configuration"""
|
||||
try:
|
||||
query = select(POSTransaction).options(
|
||||
selectinload(POSTransaction.items)
|
||||
).where(
|
||||
POSTransaction.pos_config_id == pos_config_id
|
||||
).order_by(desc(POSTransaction.transaction_date)).offset(skip).limit(limit)
|
||||
|
||||
result = await self.session.execute(query)
|
||||
return result.scalars().all()
|
||||
except Exception as e:
|
||||
logger.error("Failed to get transactions by pos config",
|
||||
pos_config_id=str(pos_config_id),
|
||||
error=str(e))
|
||||
raise
|
||||
|
||||
async def get_transactions_by_date_range(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
start_date: date,
|
||||
end_date: date,
|
||||
skip: int = 0,
|
||||
limit: int = 100
|
||||
) -> List[POSTransaction]:
|
||||
"""Get transactions within date range"""
|
||||
try:
|
||||
start_datetime = datetime.combine(start_date, datetime.min.time())
|
||||
end_datetime = datetime.combine(end_date, datetime.max.time())
|
||||
|
||||
query = select(POSTransaction).options(
|
||||
selectinload(POSTransaction.items)
|
||||
).where(
|
||||
and_(
|
||||
POSTransaction.tenant_id == tenant_id,
|
||||
POSTransaction.transaction_date >= start_datetime,
|
||||
POSTransaction.transaction_date <= end_datetime
|
||||
)
|
||||
).order_by(desc(POSTransaction.transaction_date)).offset(skip).limit(limit)
|
||||
|
||||
result = await self.session.execute(query)
|
||||
return result.scalars().all()
|
||||
except Exception as e:
|
||||
logger.error("Failed to get transactions by date range",
|
||||
start_date=str(start_date),
|
||||
end_date=str(end_date),
|
||||
error=str(e))
|
||||
raise
|
||||
|
||||
async def get_dashboard_metrics(
|
||||
self,
|
||||
tenant_id: UUID
|
||||
) -> Dict[str, Any]:
|
||||
"""Get dashboard metrics for transactions"""
|
||||
try:
|
||||
# Today's metrics
|
||||
today = datetime.now().date()
|
||||
today_start = datetime.combine(today, datetime.min.time())
|
||||
today_end = datetime.combine(today, datetime.max.time())
|
||||
|
||||
week_start = today - timedelta(days=today.weekday())
|
||||
week_start_datetime = datetime.combine(week_start, datetime.min.time())
|
||||
|
||||
month_start = today.replace(day=1)
|
||||
month_start_datetime = datetime.combine(month_start, datetime.min.time())
|
||||
|
||||
# Transaction counts by period
|
||||
transactions_today = await self.session.execute(
|
||||
select(func.count()).select_from(POSTransaction).where(
|
||||
and_(
|
||||
POSTransaction.tenant_id == tenant_id,
|
||||
POSTransaction.transaction_date >= today_start,
|
||||
POSTransaction.transaction_date <= today_end,
|
||||
POSTransaction.status == "completed"
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
transactions_week = await self.session.execute(
|
||||
select(func.count()).select_from(POSTransaction).where(
|
||||
and_(
|
||||
POSTransaction.tenant_id == tenant_id,
|
||||
POSTransaction.transaction_date >= week_start_datetime,
|
||||
POSTransaction.status == "completed"
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
transactions_month = await self.session.execute(
|
||||
select(func.count()).select_from(POSTransaction).where(
|
||||
and_(
|
||||
POSTransaction.tenant_id == tenant_id,
|
||||
POSTransaction.transaction_date >= month_start_datetime,
|
||||
POSTransaction.status == "completed"
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
# Revenue by period
|
||||
revenue_today = await self.session.execute(
|
||||
select(func.coalesce(func.sum(POSTransaction.total_amount), 0)).where(
|
||||
and_(
|
||||
POSTransaction.tenant_id == tenant_id,
|
||||
POSTransaction.transaction_date >= today_start,
|
||||
POSTransaction.transaction_date <= today_end,
|
||||
POSTransaction.status == "completed"
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
revenue_week = await self.session.execute(
|
||||
select(func.coalesce(func.sum(POSTransaction.total_amount), 0)).where(
|
||||
and_(
|
||||
POSTransaction.tenant_id == tenant_id,
|
||||
POSTransaction.transaction_date >= week_start_datetime,
|
||||
POSTransaction.status == "completed"
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
revenue_month = await self.session.execute(
|
||||
select(func.coalesce(func.sum(POSTransaction.total_amount), 0)).where(
|
||||
and_(
|
||||
POSTransaction.tenant_id == tenant_id,
|
||||
POSTransaction.transaction_date >= month_start_datetime,
|
||||
POSTransaction.status == "completed"
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
# Status breakdown
|
||||
status_counts = await self.session.execute(
|
||||
select(POSTransaction.status, func.count()).select_from(POSTransaction).where(
|
||||
POSTransaction.tenant_id == tenant_id
|
||||
).group_by(POSTransaction.status)
|
||||
)
|
||||
|
||||
status_breakdown = {status: count for status, count in status_counts.fetchall()}
|
||||
|
||||
# Payment method breakdown
|
||||
payment_counts = await self.session.execute(
|
||||
select(POSTransaction.payment_method, func.count()).select_from(POSTransaction).where(
|
||||
and_(
|
||||
POSTransaction.tenant_id == tenant_id,
|
||||
POSTransaction.status == "completed"
|
||||
)
|
||||
).group_by(POSTransaction.payment_method)
|
||||
)
|
||||
|
||||
payment_breakdown = {method: count for method, count in payment_counts.fetchall()}
|
||||
|
||||
# Average transaction value
|
||||
avg_transaction_value = await self.session.execute(
|
||||
select(func.coalesce(func.avg(POSTransaction.total_amount), 0)).where(
|
||||
and_(
|
||||
POSTransaction.tenant_id == tenant_id,
|
||||
POSTransaction.status == "completed"
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
return {
|
||||
"total_transactions_today": transactions_today.scalar(),
|
||||
"total_transactions_this_week": transactions_week.scalar(),
|
||||
"total_transactions_this_month": transactions_month.scalar(),
|
||||
"revenue_today": float(revenue_today.scalar()),
|
||||
"revenue_this_week": float(revenue_week.scalar()),
|
||||
"revenue_this_month": float(revenue_month.scalar()),
|
||||
"status_breakdown": status_breakdown,
|
||||
"payment_method_breakdown": payment_breakdown,
|
||||
"average_transaction_value": float(avg_transaction_value.scalar())
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error("Failed to get dashboard metrics", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def get_sync_status_summary(
|
||||
self,
|
||||
tenant_id: UUID
|
||||
) -> Dict[str, Any]:
|
||||
"""Get sync status summary for transactions"""
|
||||
try:
|
||||
# Count synced vs unsynced
|
||||
synced_count = await self.session.execute(
|
||||
select(func.count()).select_from(POSTransaction).where(
|
||||
and_(
|
||||
POSTransaction.tenant_id == tenant_id,
|
||||
POSTransaction.is_synced_to_sales == True
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
pending_count = await self.session.execute(
|
||||
select(func.count()).select_from(POSTransaction).where(
|
||||
and_(
|
||||
POSTransaction.tenant_id == tenant_id,
|
||||
POSTransaction.is_synced_to_sales == False,
|
||||
POSTransaction.sync_error.is_(None)
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
failed_count = await self.session.execute(
|
||||
select(func.count()).select_from(POSTransaction).where(
|
||||
and_(
|
||||
POSTransaction.tenant_id == tenant_id,
|
||||
POSTransaction.is_synced_to_sales == False,
|
||||
POSTransaction.sync_error.isnot(None)
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
# Get last sync time
|
||||
last_sync = await self.session.execute(
|
||||
select(func.max(POSTransaction.sync_completed_at)).where(
|
||||
and_(
|
||||
POSTransaction.tenant_id == tenant_id,
|
||||
POSTransaction.is_synced_to_sales == True
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
return {
|
||||
"synced": synced_count.scalar(),
|
||||
"pending": pending_count.scalar(),
|
||||
"failed": failed_count.scalar(),
|
||||
"last_sync_at": last_sync.scalar()
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error("Failed to get sync status summary", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
357
services/pos/app/scheduler.py
Normal file
357
services/pos/app/scheduler.py
Normal file
@@ -0,0 +1,357 @@
|
||||
"""
|
||||
Background Task Scheduler for POS Service
|
||||
|
||||
Sets up periodic background jobs for:
|
||||
- Syncing POS transactions to sales service
|
||||
- Other maintenance tasks as needed
|
||||
|
||||
Uses Redis-based leader election to ensure only one pod runs scheduled tasks
|
||||
when running with multiple replicas.
|
||||
|
||||
Usage in main.py:
|
||||
```python
|
||||
from app.scheduler import POSScheduler
|
||||
|
||||
# On startup
|
||||
scheduler = POSScheduler(redis_url=settings.REDIS_URL)
|
||||
await scheduler.start()
|
||||
|
||||
# On shutdown
|
||||
await scheduler.stop()
|
||||
```
|
||||
"""
|
||||
|
||||
import structlog
|
||||
from apscheduler.schedulers.asyncio import AsyncIOScheduler
|
||||
from apscheduler.triggers.interval import IntervalTrigger
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class POSScheduler:
|
||||
"""
|
||||
POS Scheduler service that manages background sync jobs.
|
||||
|
||||
Uses Redis-based leader election to ensure only one pod runs
|
||||
scheduled jobs in a multi-replica deployment.
|
||||
"""
|
||||
|
||||
def __init__(self, redis_url: str = None, sync_interval_minutes: int = 5):
|
||||
"""
|
||||
Initialize POS scheduler.
|
||||
|
||||
Args:
|
||||
redis_url: Redis connection URL for leader election
|
||||
sync_interval_minutes: Interval for POS-to-sales sync job
|
||||
"""
|
||||
self.scheduler = None
|
||||
self.sync_interval_minutes = sync_interval_minutes
|
||||
|
||||
# Leader election
|
||||
self._redis_url = redis_url
|
||||
self._leader_election = None
|
||||
self._redis_client = None
|
||||
self._scheduler_started = False
|
||||
|
||||
async def start(self):
|
||||
"""Start the POS scheduler with leader election"""
|
||||
if self._redis_url:
|
||||
await self._start_with_leader_election()
|
||||
else:
|
||||
# Fallback to standalone mode (for local development or single-pod deployments)
|
||||
logger.warning("Redis URL not provided, starting POS scheduler in standalone mode")
|
||||
await self._start_standalone()
|
||||
|
||||
async def _start_with_leader_election(self):
|
||||
"""Start with Redis-based leader election for horizontal scaling"""
|
||||
import redis.asyncio as redis
|
||||
from shared.leader_election import LeaderElectionService
|
||||
|
||||
try:
|
||||
# Create Redis connection
|
||||
self._redis_client = redis.from_url(self._redis_url, decode_responses=False)
|
||||
await self._redis_client.ping()
|
||||
|
||||
# Create scheduler (but don't start it yet)
|
||||
self.scheduler = AsyncIOScheduler()
|
||||
|
||||
# Create leader election
|
||||
self._leader_election = LeaderElectionService(
|
||||
self._redis_client,
|
||||
service_name="pos-scheduler"
|
||||
)
|
||||
|
||||
# Start leader election with callbacks
|
||||
await self._leader_election.start(
|
||||
on_become_leader=self._on_become_leader,
|
||||
on_lose_leader=self._on_lose_leader
|
||||
)
|
||||
|
||||
logger.info("POS scheduler started with leader election",
|
||||
is_leader=self._leader_election.is_leader,
|
||||
instance_id=self._leader_election.instance_id)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to start with leader election, falling back to standalone",
|
||||
error=str(e))
|
||||
await self._start_standalone()
|
||||
|
||||
async def _on_become_leader(self):
|
||||
"""Called when this instance becomes the leader"""
|
||||
logger.info("POS scheduler became leader, starting scheduled jobs")
|
||||
await self._start_scheduler()
|
||||
|
||||
async def _on_lose_leader(self):
|
||||
"""Called when this instance loses leadership"""
|
||||
logger.warning("POS scheduler lost leadership, stopping scheduled jobs")
|
||||
await self._stop_scheduler()
|
||||
|
||||
async def _start_scheduler(self):
|
||||
"""Start the APScheduler with POS jobs"""
|
||||
if self._scheduler_started:
|
||||
logger.warning("POS scheduler already started")
|
||||
return
|
||||
|
||||
try:
|
||||
# Import sync job
|
||||
from app.jobs.sync_pos_to_sales import run_pos_to_sales_sync
|
||||
|
||||
# Job 1: Sync POS transactions to sales service
|
||||
self.scheduler.add_job(
|
||||
run_pos_to_sales_sync,
|
||||
trigger=IntervalTrigger(minutes=self.sync_interval_minutes),
|
||||
id='pos_to_sales_sync',
|
||||
name='Sync POS Transactions to Sales',
|
||||
replace_existing=True,
|
||||
max_instances=1, # Prevent concurrent runs
|
||||
coalesce=True, # Combine multiple missed runs into one
|
||||
misfire_grace_time=60 # Allow 60 seconds grace for missed runs
|
||||
)
|
||||
|
||||
# Start scheduler
|
||||
if not self.scheduler.running:
|
||||
self.scheduler.start()
|
||||
self._scheduler_started = True
|
||||
logger.info("POS scheduler jobs started",
|
||||
sync_interval_minutes=self.sync_interval_minutes,
|
||||
job_count=len(self.scheduler.get_jobs()),
|
||||
next_run=self.scheduler.get_jobs()[0].next_run_time if self.scheduler.get_jobs() else None)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to start POS scheduler", error=str(e))
|
||||
|
||||
async def _stop_scheduler(self):
|
||||
"""Stop the APScheduler"""
|
||||
if not self._scheduler_started:
|
||||
return
|
||||
|
||||
try:
|
||||
if self.scheduler and self.scheduler.running:
|
||||
self.scheduler.shutdown(wait=False)
|
||||
self._scheduler_started = False
|
||||
logger.info("POS scheduler jobs stopped")
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to stop POS scheduler", error=str(e))
|
||||
|
||||
async def _start_standalone(self):
|
||||
"""Start scheduler without leader election (fallback mode)"""
|
||||
logger.warning("Starting POS scheduler in standalone mode (no leader election)")
|
||||
|
||||
self.scheduler = AsyncIOScheduler()
|
||||
|
||||
try:
|
||||
# Import sync job
|
||||
from app.jobs.sync_pos_to_sales import run_pos_to_sales_sync
|
||||
|
||||
self.scheduler.add_job(
|
||||
run_pos_to_sales_sync,
|
||||
trigger=IntervalTrigger(minutes=self.sync_interval_minutes),
|
||||
id='pos_to_sales_sync',
|
||||
name='Sync POS Transactions to Sales',
|
||||
replace_existing=True,
|
||||
max_instances=1,
|
||||
coalesce=True,
|
||||
misfire_grace_time=60
|
||||
)
|
||||
|
||||
if not self.scheduler.running:
|
||||
self.scheduler.start()
|
||||
self._scheduler_started = True
|
||||
logger.info("POS scheduler started (standalone mode)",
|
||||
sync_interval_minutes=self.sync_interval_minutes,
|
||||
next_run=self.scheduler.get_jobs()[0].next_run_time if self.scheduler.get_jobs() else None)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to start POS scheduler in standalone mode", error=str(e))
|
||||
|
||||
async def stop(self):
|
||||
"""Stop the POS scheduler and leader election"""
|
||||
# Stop leader election
|
||||
if self._leader_election:
|
||||
await self._leader_election.stop()
|
||||
|
||||
# Stop scheduler
|
||||
await self._stop_scheduler()
|
||||
|
||||
# Close Redis
|
||||
if self._redis_client:
|
||||
await self._redis_client.close()
|
||||
|
||||
logger.info("POS scheduler stopped")
|
||||
|
||||
@property
|
||||
def is_leader(self) -> bool:
|
||||
"""Check if this instance is the leader"""
|
||||
return self._leader_election.is_leader if self._leader_election else True
|
||||
|
||||
def get_leader_status(self) -> dict:
|
||||
"""Get leader election status"""
|
||||
if self._leader_election:
|
||||
return self._leader_election.get_status()
|
||||
return {"is_leader": True, "mode": "standalone"}
|
||||
|
||||
def get_scheduler_status(self) -> dict:
|
||||
"""
|
||||
Get current scheduler status
|
||||
|
||||
Returns:
|
||||
Dict with scheduler info and job statuses
|
||||
"""
|
||||
if self.scheduler is None or not self._scheduler_started:
|
||||
return {
|
||||
"running": False,
|
||||
"is_leader": self.is_leader,
|
||||
"jobs": []
|
||||
}
|
||||
|
||||
jobs = []
|
||||
for job in self.scheduler.get_jobs():
|
||||
jobs.append({
|
||||
"id": job.id,
|
||||
"name": job.name,
|
||||
"next_run": job.next_run_time.isoformat() if job.next_run_time else None,
|
||||
"trigger": str(job.trigger)
|
||||
})
|
||||
|
||||
return {
|
||||
"running": True,
|
||||
"is_leader": self.is_leader,
|
||||
"jobs": jobs,
|
||||
"state": self.scheduler.state
|
||||
}
|
||||
|
||||
def trigger_job_now(self, job_id: str) -> bool:
|
||||
"""
|
||||
Manually trigger a scheduled job immediately
|
||||
|
||||
Args:
|
||||
job_id: Job identifier (e.g., 'pos_to_sales_sync')
|
||||
|
||||
Returns:
|
||||
True if job was triggered, False otherwise
|
||||
"""
|
||||
if self.scheduler is None or not self._scheduler_started:
|
||||
logger.error("Cannot trigger job, scheduler not running")
|
||||
return False
|
||||
|
||||
if not self.is_leader:
|
||||
logger.warning("Cannot trigger job, this instance is not the leader")
|
||||
return False
|
||||
|
||||
try:
|
||||
job = self.scheduler.get_job(job_id)
|
||||
if job:
|
||||
self.scheduler.modify_job(job_id, next_run_time=datetime.now())
|
||||
logger.info("Job triggered manually", job_id=job_id)
|
||||
return True
|
||||
else:
|
||||
logger.warning("Job not found", job_id=job_id)
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to trigger job", job_id=job_id, error=str(e))
|
||||
return False
|
||||
|
||||
|
||||
# ================================================================
|
||||
# Legacy compatibility functions (deprecated - use POSScheduler class)
|
||||
# ================================================================
|
||||
|
||||
# Global scheduler instance for backward compatibility
|
||||
_scheduler_instance: Optional[POSScheduler] = None
|
||||
|
||||
|
||||
def start_scheduler():
|
||||
"""
|
||||
DEPRECATED: Use POSScheduler class directly for better leader election support.
|
||||
|
||||
Initialize and start the background scheduler (legacy function).
|
||||
"""
|
||||
global _scheduler_instance
|
||||
|
||||
if _scheduler_instance is not None:
|
||||
logger.warning("Scheduler already running")
|
||||
return
|
||||
|
||||
logger.warning("Using deprecated start_scheduler function. "
|
||||
"Consider migrating to POSScheduler class for leader election support.")
|
||||
|
||||
try:
|
||||
_scheduler_instance = POSScheduler()
|
||||
# Note: This is synchronous fallback, no leader election
|
||||
import asyncio
|
||||
asyncio.create_task(_scheduler_instance._start_standalone())
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to start scheduler", error=str(e), exc_info=True)
|
||||
_scheduler_instance = None
|
||||
|
||||
|
||||
def shutdown_scheduler():
|
||||
"""
|
||||
DEPRECATED: Use POSScheduler class directly.
|
||||
|
||||
Gracefully shutdown the scheduler (legacy function).
|
||||
"""
|
||||
global _scheduler_instance
|
||||
|
||||
if _scheduler_instance is None:
|
||||
logger.warning("Scheduler not running")
|
||||
return
|
||||
|
||||
try:
|
||||
import asyncio
|
||||
asyncio.create_task(_scheduler_instance.stop())
|
||||
_scheduler_instance = None
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to shutdown scheduler", error=str(e), exc_info=True)
|
||||
|
||||
|
||||
def get_scheduler_status():
|
||||
"""
|
||||
DEPRECATED: Use POSScheduler class directly.
|
||||
|
||||
Get current scheduler status (legacy function).
|
||||
"""
|
||||
if _scheduler_instance is None:
|
||||
return {
|
||||
"running": False,
|
||||
"jobs": []
|
||||
}
|
||||
return _scheduler_instance.get_scheduler_status()
|
||||
|
||||
|
||||
def trigger_job_now(job_id: str):
|
||||
"""
|
||||
DEPRECATED: Use POSScheduler class directly.
|
||||
|
||||
Manually trigger a scheduled job immediately (legacy function).
|
||||
"""
|
||||
if _scheduler_instance is None:
|
||||
logger.error("Cannot trigger job, scheduler not running")
|
||||
return False
|
||||
return _scheduler_instance.trigger_job_now(job_id)
|
||||
95
services/pos/app/schemas/pos_config.py
Normal file
95
services/pos/app/schemas/pos_config.py
Normal file
@@ -0,0 +1,95 @@
|
||||
"""
|
||||
Pydantic schemas for POS configuration API requests and responses
|
||||
"""
|
||||
|
||||
from typing import Optional, List, Dict, Any
|
||||
from datetime import datetime
|
||||
from pydantic import BaseModel, Field
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class POSProvider(str, Enum):
|
||||
"""POS provider types"""
|
||||
SQUARE = "square"
|
||||
TOAST = "toast"
|
||||
LIGHTSPEED = "lightspeed"
|
||||
|
||||
|
||||
class POSConfigurationBase(BaseModel):
|
||||
"""Base schema for POS configurations"""
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
use_enum_values = True
|
||||
json_encoders = {
|
||||
datetime: lambda v: v.isoformat() if v else None
|
||||
}
|
||||
|
||||
|
||||
class POSConfigurationResponse(POSConfigurationBase):
|
||||
"""Schema for POS configuration API responses"""
|
||||
id: str
|
||||
tenant_id: str
|
||||
pos_system: POSProvider
|
||||
provider_name: str
|
||||
is_active: bool
|
||||
is_connected: bool
|
||||
webhook_url: Optional[str] = None
|
||||
webhook_secret: Optional[str] = None
|
||||
environment: str = "sandbox"
|
||||
location_id: Optional[str] = None
|
||||
merchant_id: Optional[str] = None
|
||||
sync_enabled: bool = True
|
||||
sync_interval_minutes: str = "5"
|
||||
auto_sync_products: bool = True
|
||||
auto_sync_transactions: bool = True
|
||||
last_sync_at: Optional[datetime] = None
|
||||
last_successful_sync_at: Optional[datetime] = None
|
||||
last_sync_status: Optional[str] = None
|
||||
last_sync_message: Optional[str] = None
|
||||
provider_settings: Optional[Dict[str, Any]] = None
|
||||
last_health_check_at: Optional[datetime] = None
|
||||
health_status: str = "unknown"
|
||||
health_message: Optional[str] = None
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
notes: Optional[str] = None
|
||||
|
||||
@classmethod
|
||||
def from_orm(cls, obj):
|
||||
"""Convert ORM object to schema with proper UUID handling"""
|
||||
return cls(
|
||||
id=str(obj.id),
|
||||
tenant_id=str(obj.tenant_id),
|
||||
pos_system=obj.pos_system,
|
||||
provider_name=obj.provider_name,
|
||||
is_active=obj.is_active,
|
||||
is_connected=obj.is_connected,
|
||||
webhook_url=obj.webhook_url,
|
||||
webhook_secret=obj.webhook_secret,
|
||||
environment=obj.environment,
|
||||
location_id=obj.location_id,
|
||||
merchant_id=obj.merchant_id,
|
||||
sync_enabled=obj.sync_enabled,
|
||||
sync_interval_minutes=obj.sync_interval_minutes,
|
||||
auto_sync_products=obj.auto_sync_products,
|
||||
auto_sync_transactions=obj.auto_sync_transactions,
|
||||
last_sync_at=obj.last_sync_at,
|
||||
last_successful_sync_at=obj.last_successful_sync_at,
|
||||
last_sync_status=obj.last_sync_status,
|
||||
last_sync_message=obj.last_sync_message,
|
||||
provider_settings=obj.provider_settings,
|
||||
last_health_check_at=obj.last_health_check_at,
|
||||
health_status=obj.health_status,
|
||||
health_message=obj.health_message,
|
||||
created_at=obj.created_at,
|
||||
updated_at=obj.updated_at,
|
||||
notes=obj.notes
|
||||
)
|
||||
|
||||
|
||||
class POSConfigurationListResponse(BaseModel):
|
||||
"""Schema for POS configuration list API response"""
|
||||
configurations: List[POSConfigurationResponse]
|
||||
total: int
|
||||
supported_systems: List[str] = ["square", "toast", "lightspeed"]
|
||||
248
services/pos/app/schemas/pos_transaction.py
Normal file
248
services/pos/app/schemas/pos_transaction.py
Normal file
@@ -0,0 +1,248 @@
|
||||
"""
|
||||
Pydantic schemas for POS transaction API requests and responses
|
||||
"""
|
||||
|
||||
from typing import Optional, List, Dict, Any
|
||||
from datetime import datetime
|
||||
from decimal import Decimal
|
||||
from pydantic import BaseModel, Field
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class TransactionType(str, Enum):
|
||||
"""Transaction type enumeration"""
|
||||
SALE = "sale"
|
||||
REFUND = "refund"
|
||||
VOID = "void"
|
||||
EXCHANGE = "exchange"
|
||||
|
||||
|
||||
class TransactionStatus(str, Enum):
|
||||
"""Transaction status enumeration"""
|
||||
COMPLETED = "completed"
|
||||
PENDING = "pending"
|
||||
FAILED = "failed"
|
||||
REFUNDED = "refunded"
|
||||
VOIDED = "voided"
|
||||
|
||||
|
||||
class PaymentMethod(str, Enum):
|
||||
"""Payment method enumeration"""
|
||||
CARD = "card"
|
||||
CASH = "cash"
|
||||
DIGITAL_WALLET = "digital_wallet"
|
||||
OTHER = "other"
|
||||
|
||||
|
||||
class OrderType(str, Enum):
|
||||
"""Order type enumeration"""
|
||||
DINE_IN = "dine_in"
|
||||
TAKEOUT = "takeout"
|
||||
DELIVERY = "delivery"
|
||||
PICKUP = "pickup"
|
||||
|
||||
|
||||
class POSTransactionItemResponse(BaseModel):
|
||||
"""Schema for POS transaction item response"""
|
||||
id: str
|
||||
transaction_id: str
|
||||
tenant_id: str
|
||||
external_item_id: Optional[str] = None
|
||||
sku: Optional[str] = None
|
||||
product_name: str
|
||||
product_category: Optional[str] = None
|
||||
product_subcategory: Optional[str] = None
|
||||
quantity: Decimal
|
||||
unit_price: Decimal
|
||||
total_price: Decimal
|
||||
discount_amount: Decimal = Decimal("0")
|
||||
tax_amount: Decimal = Decimal("0")
|
||||
modifiers: Optional[Dict[str, Any]] = None
|
||||
inventory_product_id: Optional[str] = None
|
||||
is_mapped_to_inventory: bool = False
|
||||
is_synced_to_sales: bool = False
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
use_enum_values = True
|
||||
json_encoders = {
|
||||
datetime: lambda v: v.isoformat() if v else None,
|
||||
Decimal: lambda v: float(v) if v else 0.0
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_orm(cls, obj):
|
||||
"""Convert ORM object to schema with proper UUID and Decimal handling"""
|
||||
return cls(
|
||||
id=str(obj.id),
|
||||
transaction_id=str(obj.transaction_id),
|
||||
tenant_id=str(obj.tenant_id),
|
||||
external_item_id=obj.external_item_id,
|
||||
sku=obj.sku,
|
||||
product_name=obj.product_name,
|
||||
product_category=obj.product_category,
|
||||
product_subcategory=obj.product_subcategory,
|
||||
quantity=obj.quantity,
|
||||
unit_price=obj.unit_price,
|
||||
total_price=obj.total_price,
|
||||
discount_amount=obj.discount_amount,
|
||||
tax_amount=obj.tax_amount,
|
||||
modifiers=obj.modifiers,
|
||||
inventory_product_id=str(obj.inventory_product_id) if obj.inventory_product_id else None,
|
||||
is_mapped_to_inventory=obj.is_mapped_to_inventory,
|
||||
is_synced_to_sales=obj.is_synced_to_sales,
|
||||
created_at=obj.created_at,
|
||||
updated_at=obj.updated_at
|
||||
)
|
||||
|
||||
|
||||
class POSTransactionResponse(BaseModel):
|
||||
"""Schema for POS transaction response"""
|
||||
id: str
|
||||
tenant_id: str
|
||||
pos_config_id: str
|
||||
pos_system: str
|
||||
external_transaction_id: str
|
||||
external_order_id: Optional[str] = None
|
||||
transaction_type: TransactionType
|
||||
status: TransactionStatus
|
||||
subtotal: Decimal
|
||||
tax_amount: Decimal
|
||||
tip_amount: Decimal
|
||||
discount_amount: Decimal
|
||||
total_amount: Decimal
|
||||
currency: str = "EUR"
|
||||
payment_method: Optional[PaymentMethod] = None
|
||||
payment_status: Optional[str] = None
|
||||
transaction_date: datetime
|
||||
pos_created_at: datetime
|
||||
pos_updated_at: Optional[datetime] = None
|
||||
location_id: Optional[str] = None
|
||||
location_name: Optional[str] = None
|
||||
staff_id: Optional[str] = None
|
||||
staff_name: Optional[str] = None
|
||||
customer_id: Optional[str] = None
|
||||
customer_email: Optional[str] = None
|
||||
customer_phone: Optional[str] = None
|
||||
order_type: Optional[OrderType] = None
|
||||
table_number: Optional[str] = None
|
||||
receipt_number: Optional[str] = None
|
||||
is_synced_to_sales: bool = False
|
||||
sales_record_id: Optional[str] = None
|
||||
sync_attempted_at: Optional[datetime] = None
|
||||
sync_completed_at: Optional[datetime] = None
|
||||
sync_error: Optional[str] = None
|
||||
sync_retry_count: int = 0
|
||||
is_processed: bool = False
|
||||
is_duplicate: bool = False
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
items: List[POSTransactionItemResponse] = []
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
use_enum_values = True
|
||||
json_encoders = {
|
||||
datetime: lambda v: v.isoformat() if v else None,
|
||||
Decimal: lambda v: float(v) if v else 0.0
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_orm(cls, obj):
|
||||
"""Convert ORM object to schema with proper UUID and Decimal handling"""
|
||||
return cls(
|
||||
id=str(obj.id),
|
||||
tenant_id=str(obj.tenant_id),
|
||||
pos_config_id=str(obj.pos_config_id),
|
||||
pos_system=obj.pos_system,
|
||||
external_transaction_id=obj.external_transaction_id,
|
||||
external_order_id=obj.external_order_id,
|
||||
transaction_type=obj.transaction_type,
|
||||
status=obj.status,
|
||||
subtotal=obj.subtotal,
|
||||
tax_amount=obj.tax_amount,
|
||||
tip_amount=obj.tip_amount,
|
||||
discount_amount=obj.discount_amount,
|
||||
total_amount=obj.total_amount,
|
||||
currency=obj.currency,
|
||||
payment_method=obj.payment_method,
|
||||
payment_status=obj.payment_status,
|
||||
transaction_date=obj.transaction_date,
|
||||
pos_created_at=obj.pos_created_at,
|
||||
pos_updated_at=obj.pos_updated_at,
|
||||
location_id=obj.location_id,
|
||||
location_name=obj.location_name,
|
||||
staff_id=obj.staff_id,
|
||||
staff_name=obj.staff_name,
|
||||
customer_id=obj.customer_id,
|
||||
customer_email=obj.customer_email,
|
||||
customer_phone=obj.customer_phone,
|
||||
order_type=obj.order_type,
|
||||
table_number=obj.table_number,
|
||||
receipt_number=obj.receipt_number,
|
||||
is_synced_to_sales=obj.is_synced_to_sales,
|
||||
sales_record_id=str(obj.sales_record_id) if obj.sales_record_id else None,
|
||||
sync_attempted_at=obj.sync_attempted_at,
|
||||
sync_completed_at=obj.sync_completed_at,
|
||||
sync_error=obj.sync_error,
|
||||
sync_retry_count=obj.sync_retry_count,
|
||||
is_processed=obj.is_processed,
|
||||
is_duplicate=obj.is_duplicate,
|
||||
created_at=obj.created_at,
|
||||
updated_at=obj.updated_at,
|
||||
items=[POSTransactionItemResponse.from_orm(item) for item in obj.items] if hasattr(obj, 'items') and obj.items else []
|
||||
)
|
||||
|
||||
|
||||
class POSTransactionSummary(BaseModel):
|
||||
"""Summary information for a transaction (lightweight)"""
|
||||
id: str
|
||||
external_transaction_id: str
|
||||
transaction_date: datetime
|
||||
total_amount: Decimal
|
||||
status: TransactionStatus
|
||||
payment_method: Optional[PaymentMethod] = None
|
||||
is_synced_to_sales: bool
|
||||
item_count: int = 0
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
use_enum_values = True
|
||||
json_encoders = {
|
||||
datetime: lambda v: v.isoformat() if v else None,
|
||||
Decimal: lambda v: float(v) if v else 0.0
|
||||
}
|
||||
|
||||
|
||||
class POSTransactionListResponse(BaseModel):
|
||||
"""Schema for paginated transaction list response"""
|
||||
transactions: List[POSTransactionResponse]
|
||||
total: int
|
||||
has_more: bool = False
|
||||
summary: Optional[Dict[str, Any]] = None
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class POSTransactionDashboardSummary(BaseModel):
|
||||
"""Dashboard summary for POS transactions"""
|
||||
total_transactions_today: int = 0
|
||||
total_transactions_this_week: int = 0
|
||||
total_transactions_this_month: int = 0
|
||||
revenue_today: Decimal = Decimal("0")
|
||||
revenue_this_week: Decimal = Decimal("0")
|
||||
revenue_this_month: Decimal = Decimal("0")
|
||||
average_transaction_value: Decimal = Decimal("0")
|
||||
status_breakdown: Dict[str, int] = {}
|
||||
payment_method_breakdown: Dict[str, int] = {}
|
||||
sync_status: Dict[str, Any] = {}
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
json_encoders = {
|
||||
Decimal: lambda v: float(v) if v else 0.0,
|
||||
datetime: lambda v: v.isoformat() if v else None
|
||||
}
|
||||
1
services/pos/app/services/__init__.py
Normal file
1
services/pos/app/services/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# POS Services
|
||||
76
services/pos/app/services/pos_config_service.py
Normal file
76
services/pos/app/services/pos_config_service.py
Normal file
@@ -0,0 +1,76 @@
|
||||
"""
|
||||
POS Configuration Service - Business Logic Layer
|
||||
"""
|
||||
|
||||
from typing import List, Optional
|
||||
from uuid import UUID
|
||||
import structlog
|
||||
|
||||
from app.repositories.pos_config_repository import POSConfigurationRepository
|
||||
from app.schemas.pos_config import POSConfigurationResponse
|
||||
from app.core.database import get_db_transaction
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class POSConfigurationService:
|
||||
"""Service layer for POS configuration operations"""
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
async def get_configurations_by_tenant(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
pos_system: Optional[str] = None,
|
||||
is_active: Optional[bool] = None,
|
||||
skip: int = 0,
|
||||
limit: int = 100
|
||||
) -> List[POSConfigurationResponse]:
|
||||
"""Get POS configurations for a tenant with filtering"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
repository = POSConfigurationRepository(db)
|
||||
|
||||
configurations = await repository.get_configurations_by_tenant(
|
||||
tenant_id=tenant_id,
|
||||
pos_system=pos_system,
|
||||
is_active=is_active,
|
||||
skip=skip,
|
||||
limit=limit
|
||||
)
|
||||
|
||||
# Convert to response schemas using from_orm
|
||||
responses = []
|
||||
for config in configurations:
|
||||
response = POSConfigurationResponse.from_orm(config)
|
||||
responses.append(response)
|
||||
|
||||
return responses
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get configurations by tenant", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def count_configurations_by_tenant(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
pos_system: Optional[str] = None,
|
||||
is_active: Optional[bool] = None
|
||||
) -> int:
|
||||
"""Count POS configurations for a tenant with filtering"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
repository = POSConfigurationRepository(db)
|
||||
|
||||
count = await repository.count_configurations_by_tenant(
|
||||
tenant_id=tenant_id,
|
||||
pos_system=pos_system,
|
||||
is_active=is_active
|
||||
)
|
||||
|
||||
return count
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to count configurations by tenant", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
473
services/pos/app/services/pos_integration_service.py
Normal file
473
services/pos/app/services/pos_integration_service.py
Normal file
@@ -0,0 +1,473 @@
|
||||
# services/pos/app/services/pos_integration_service.py
|
||||
"""
|
||||
POS Integration Service
|
||||
Handles real-time sync and webhook processing for POS systems
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
from typing import Dict, List, Optional, Any
|
||||
from datetime import datetime, timedelta
|
||||
from uuid import UUID
|
||||
|
||||
import structlog
|
||||
import httpx
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.core.config import settings
|
||||
from app.core.database import get_db_transaction
|
||||
from app.models.pos_config import POSConfiguration
|
||||
from app.models.pos_transaction import POSTransaction, POSTransactionItem
|
||||
from app.models.pos_webhook import POSWebhookLog
|
||||
from app.models.pos_sync import POSSyncLog
|
||||
from app.integrations.base_pos_client import (
|
||||
POSCredentials,
|
||||
BasePOSClient,
|
||||
POSTransaction as ClientPOSTransaction,
|
||||
SyncResult
|
||||
)
|
||||
from app.integrations.square_client import SquarePOSClient
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class POSIntegrationService:
|
||||
"""
|
||||
Main service for POS integrations
|
||||
Handles webhook processing, real-time sync, and data transformation
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.supported_clients = {
|
||||
"square": SquarePOSClient,
|
||||
# "toast": ToastPOSClient, # To be implemented
|
||||
# "lightspeed": LightspeedPOSClient, # To be implemented
|
||||
}
|
||||
|
||||
def _create_pos_client(self, config: POSConfiguration) -> BasePOSClient:
|
||||
"""Create POS client from configuration"""
|
||||
|
||||
if config.pos_system not in self.supported_clients:
|
||||
raise ValueError(f"Unsupported POS system: {config.pos_system}")
|
||||
|
||||
# Decrypt credentials (simplified - in production use proper encryption)
|
||||
credentials_data = json.loads(config.encrypted_credentials or "{}")
|
||||
|
||||
credentials = POSCredentials(
|
||||
pos_system=config.pos_system,
|
||||
environment=config.environment,
|
||||
api_key=credentials_data.get("api_key"),
|
||||
api_secret=credentials_data.get("api_secret"),
|
||||
access_token=credentials_data.get("access_token"),
|
||||
application_id=credentials_data.get("application_id"),
|
||||
merchant_id=config.merchant_id,
|
||||
location_id=config.location_id,
|
||||
webhook_secret=config.webhook_secret
|
||||
)
|
||||
|
||||
client_class = self.supported_clients[config.pos_system]
|
||||
return client_class(credentials)
|
||||
|
||||
async def test_connection(self, config: POSConfiguration) -> Dict[str, Any]:
|
||||
"""Test connection to POS system"""
|
||||
try:
|
||||
client = self._create_pos_client(config)
|
||||
success, message = await client.test_connection()
|
||||
|
||||
# Update health status in database
|
||||
async with get_db_transaction() as session:
|
||||
config.health_status = "healthy" if success else "unhealthy"
|
||||
config.health_message = message
|
||||
config.last_health_check_at = datetime.utcnow()
|
||||
config.is_connected = success
|
||||
|
||||
session.add(config)
|
||||
await session.commit()
|
||||
|
||||
return {
|
||||
"success": success,
|
||||
"message": message,
|
||||
"tested_at": datetime.utcnow().isoformat()
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Connection test failed", error=str(e), config_id=config.id)
|
||||
|
||||
# Update health status
|
||||
async with get_db_transaction() as session:
|
||||
config.health_status = "unhealthy"
|
||||
config.health_message = f"Test failed: {str(e)}"
|
||||
config.last_health_check_at = datetime.utcnow()
|
||||
config.is_connected = False
|
||||
|
||||
session.add(config)
|
||||
await session.commit()
|
||||
|
||||
return {
|
||||
"success": False,
|
||||
"message": f"Connection test failed: {str(e)}",
|
||||
"tested_at": datetime.utcnow().isoformat()
|
||||
}
|
||||
|
||||
async def process_webhook(
|
||||
self,
|
||||
pos_system: str,
|
||||
payload: bytes,
|
||||
headers: Dict[str, str],
|
||||
query_params: Dict[str, str],
|
||||
method: str,
|
||||
url_path: str,
|
||||
source_ip: str
|
||||
) -> Dict[str, Any]:
|
||||
"""Process incoming webhook from POS system"""
|
||||
|
||||
webhook_log = None
|
||||
|
||||
try:
|
||||
# Parse payload
|
||||
raw_payload = payload.decode('utf-8')
|
||||
payload_data = json.loads(raw_payload) if raw_payload else {}
|
||||
|
||||
# Extract webhook type and event info
|
||||
webhook_type = self._extract_webhook_type(pos_system, payload_data)
|
||||
event_id = self._extract_event_id(pos_system, payload_data)
|
||||
|
||||
# Create webhook log
|
||||
async with get_db_transaction() as session:
|
||||
webhook_log = POSWebhookLog(
|
||||
pos_system=pos_system,
|
||||
webhook_type=webhook_type or "unknown",
|
||||
method=method,
|
||||
url_path=url_path,
|
||||
query_params=query_params,
|
||||
headers=headers,
|
||||
raw_payload=raw_payload,
|
||||
payload_size=len(payload),
|
||||
content_type=headers.get("content-type"),
|
||||
signature=headers.get("x-square-signature") or headers.get("x-toast-signature"),
|
||||
source_ip=source_ip,
|
||||
status="received",
|
||||
event_id=event_id,
|
||||
priority="normal"
|
||||
)
|
||||
|
||||
session.add(webhook_log)
|
||||
await session.commit()
|
||||
await session.refresh(webhook_log)
|
||||
|
||||
# Find relevant POS configuration
|
||||
config = await self._find_pos_config_for_webhook(pos_system, payload_data)
|
||||
|
||||
if not config:
|
||||
logger.warning("No POS configuration found for webhook", pos_system=pos_system)
|
||||
await self._update_webhook_status(webhook_log.id, "failed", "No configuration found")
|
||||
return {"status": "error", "message": "No configuration found"}
|
||||
|
||||
# Update webhook log with tenant info
|
||||
async with get_db_transaction() as session:
|
||||
webhook_log.tenant_id = config.tenant_id
|
||||
session.add(webhook_log)
|
||||
await session.commit()
|
||||
|
||||
# Verify webhook signature
|
||||
if config.webhook_secret:
|
||||
client = self._create_pos_client(config)
|
||||
signature = webhook_log.signature or ""
|
||||
is_valid = client.verify_webhook_signature(payload, signature)
|
||||
|
||||
async with get_db_transaction() as session:
|
||||
webhook_log.is_signature_valid = is_valid
|
||||
session.add(webhook_log)
|
||||
await session.commit()
|
||||
|
||||
if not is_valid:
|
||||
logger.warning("Invalid webhook signature", config_id=config.id)
|
||||
await self._update_webhook_status(webhook_log.id, "failed", "Invalid signature")
|
||||
return {"status": "error", "message": "Invalid signature"}
|
||||
|
||||
# Process webhook payload
|
||||
await self._update_webhook_status(webhook_log.id, "processing")
|
||||
|
||||
result = await self._process_webhook_payload(config, payload_data, webhook_log)
|
||||
|
||||
if result["success"]:
|
||||
await self._update_webhook_status(webhook_log.id, "processed", result.get("message"))
|
||||
return {"status": "success", "message": result.get("message", "Processed successfully")}
|
||||
else:
|
||||
await self._update_webhook_status(webhook_log.id, "failed", result.get("error"))
|
||||
return {"status": "error", "message": result.get("error", "Processing failed")}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Webhook processing failed", error=str(e), pos_system=pos_system)
|
||||
|
||||
if webhook_log:
|
||||
await self._update_webhook_status(webhook_log.id, "failed", f"Processing error: {str(e)}")
|
||||
|
||||
return {"status": "error", "message": "Processing failed"}
|
||||
|
||||
async def _process_webhook_payload(
|
||||
self,
|
||||
config: POSConfiguration,
|
||||
payload_data: Dict[str, Any],
|
||||
webhook_log: POSWebhookLog
|
||||
) -> Dict[str, Any]:
|
||||
"""Process webhook payload and extract transaction data"""
|
||||
|
||||
try:
|
||||
client = self._create_pos_client(config)
|
||||
|
||||
# Parse webhook into transaction
|
||||
client_transaction = client.parse_webhook_payload(payload_data)
|
||||
|
||||
if not client_transaction:
|
||||
return {"success": False, "error": "No transaction data in webhook"}
|
||||
|
||||
# Convert to database model and save
|
||||
transaction = await self._save_pos_transaction(
|
||||
config,
|
||||
client_transaction,
|
||||
webhook_log.id
|
||||
)
|
||||
|
||||
if transaction:
|
||||
# Queue for sync to sales service
|
||||
await self._queue_sales_sync(transaction)
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"message": f"Transaction {transaction.external_transaction_id} processed",
|
||||
"transaction_id": str(transaction.id)
|
||||
}
|
||||
else:
|
||||
return {"success": False, "error": "Failed to save transaction"}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Webhook payload processing failed", error=str(e), config_id=config.id)
|
||||
return {"success": False, "error": str(e)}
|
||||
|
||||
async def _save_pos_transaction(
|
||||
self,
|
||||
config: POSConfiguration,
|
||||
client_transaction: ClientPOSTransaction,
|
||||
webhook_log_id: Optional[UUID] = None
|
||||
) -> Optional[POSTransaction]:
|
||||
"""Save POS transaction to database"""
|
||||
|
||||
try:
|
||||
async with get_db_transaction() as session:
|
||||
# Check for duplicate
|
||||
existing = await session.execute(
|
||||
"SELECT id FROM pos_transactions WHERE external_transaction_id = :ext_id AND pos_config_id = :config_id",
|
||||
{
|
||||
"ext_id": client_transaction.external_id,
|
||||
"config_id": config.id
|
||||
}
|
||||
)
|
||||
|
||||
if existing.first():
|
||||
logger.info("Duplicate transaction detected",
|
||||
external_id=client_transaction.external_id)
|
||||
return None
|
||||
|
||||
# Create transaction record
|
||||
transaction = POSTransaction(
|
||||
tenant_id=config.tenant_id,
|
||||
pos_config_id=config.id,
|
||||
pos_system=config.pos_system,
|
||||
external_transaction_id=client_transaction.external_id,
|
||||
external_order_id=client_transaction.external_order_id,
|
||||
transaction_type=client_transaction.transaction_type,
|
||||
status=client_transaction.status,
|
||||
subtotal=client_transaction.subtotal,
|
||||
tax_amount=client_transaction.tax_amount,
|
||||
tip_amount=client_transaction.tip_amount,
|
||||
discount_amount=client_transaction.discount_amount,
|
||||
total_amount=client_transaction.total_amount,
|
||||
currency=client_transaction.currency,
|
||||
payment_method=client_transaction.payment_method,
|
||||
payment_status=client_transaction.payment_status,
|
||||
transaction_date=client_transaction.transaction_date,
|
||||
pos_created_at=client_transaction.transaction_date,
|
||||
location_id=client_transaction.location_id,
|
||||
location_name=client_transaction.location_name,
|
||||
staff_id=client_transaction.staff_id,
|
||||
staff_name=client_transaction.staff_name,
|
||||
customer_id=client_transaction.customer_id,
|
||||
customer_email=client_transaction.customer_email,
|
||||
order_type=client_transaction.order_type,
|
||||
table_number=client_transaction.table_number,
|
||||
receipt_number=client_transaction.receipt_number,
|
||||
raw_data=client_transaction.raw_data,
|
||||
is_processed=True
|
||||
)
|
||||
|
||||
session.add(transaction)
|
||||
await session.flush() # Get the ID
|
||||
|
||||
# Create transaction items
|
||||
for client_item in client_transaction.items:
|
||||
item = POSTransactionItem(
|
||||
transaction_id=transaction.id,
|
||||
tenant_id=config.tenant_id,
|
||||
external_item_id=client_item.external_id,
|
||||
sku=client_item.sku,
|
||||
product_name=client_item.name,
|
||||
product_category=client_item.category,
|
||||
quantity=client_item.quantity,
|
||||
unit_price=client_item.unit_price,
|
||||
total_price=client_item.total_price,
|
||||
discount_amount=client_item.discount_amount,
|
||||
tax_amount=client_item.tax_amount,
|
||||
modifiers=client_item.modifiers,
|
||||
raw_data=client_item.raw_data
|
||||
)
|
||||
session.add(item)
|
||||
|
||||
await session.commit()
|
||||
await session.refresh(transaction)
|
||||
|
||||
logger.info("Transaction saved",
|
||||
transaction_id=transaction.id,
|
||||
external_id=client_transaction.external_id)
|
||||
|
||||
return transaction
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to save transaction", error=str(e))
|
||||
return None
|
||||
|
||||
async def _queue_sales_sync(self, transaction: POSTransaction):
|
||||
"""Queue transaction for sync to sales service"""
|
||||
try:
|
||||
# Send transaction data to sales service
|
||||
sales_data = {
|
||||
"product_name": f"POS Transaction {transaction.external_transaction_id}",
|
||||
"quantity_sold": 1,
|
||||
"unit_price": float(transaction.total_amount),
|
||||
"total_revenue": float(transaction.total_amount),
|
||||
"sale_date": transaction.transaction_date.isoformat(),
|
||||
"sales_channel": f"{transaction.pos_system}_pos",
|
||||
"location_id": transaction.location_id,
|
||||
"source": "pos_integration",
|
||||
"external_transaction_id": transaction.external_transaction_id,
|
||||
"payment_method": transaction.payment_method,
|
||||
"raw_pos_data": transaction.raw_data
|
||||
}
|
||||
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.post(
|
||||
f"{settings.SALES_SERVICE_URL}/api/v1/tenants/{transaction.tenant_id}/sales",
|
||||
json=sales_data,
|
||||
timeout=30.0
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
# Update transaction as synced
|
||||
async with get_db_transaction() as session:
|
||||
transaction.is_synced_to_sales = True
|
||||
transaction.sync_completed_at = datetime.utcnow()
|
||||
session.add(transaction)
|
||||
await session.commit()
|
||||
|
||||
logger.info("Transaction synced to sales service",
|
||||
transaction_id=transaction.id)
|
||||
else:
|
||||
logger.error("Failed to sync to sales service",
|
||||
status_code=response.status_code,
|
||||
transaction_id=transaction.id)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Sales sync failed", error=str(e), transaction_id=transaction.id)
|
||||
|
||||
def _extract_webhook_type(self, pos_system: str, payload: Dict[str, Any]) -> Optional[str]:
|
||||
"""Extract webhook type from payload"""
|
||||
if pos_system == "square":
|
||||
return payload.get("type")
|
||||
elif pos_system == "toast":
|
||||
return payload.get("eventType")
|
||||
elif pos_system == "lightspeed":
|
||||
return payload.get("action")
|
||||
return None
|
||||
|
||||
def _extract_event_id(self, pos_system: str, payload: Dict[str, Any]) -> Optional[str]:
|
||||
"""Extract event ID from payload"""
|
||||
if pos_system == "square":
|
||||
return payload.get("event_id")
|
||||
elif pos_system == "toast":
|
||||
return payload.get("guid")
|
||||
elif pos_system == "lightspeed":
|
||||
return payload.get("id")
|
||||
return None
|
||||
|
||||
async def _find_pos_config_for_webhook(
|
||||
self,
|
||||
pos_system: str,
|
||||
payload: Dict[str, Any]
|
||||
) -> Optional[POSConfiguration]:
|
||||
"""Find POS configuration that matches the webhook"""
|
||||
|
||||
# Extract location ID or merchant ID from payload
|
||||
location_id = self._extract_location_id(pos_system, payload)
|
||||
merchant_id = self._extract_merchant_id(pos_system, payload)
|
||||
|
||||
async with get_db_transaction() as session:
|
||||
query = """
|
||||
SELECT * FROM pos_configurations
|
||||
WHERE pos_system = :pos_system
|
||||
AND is_active = true
|
||||
"""
|
||||
|
||||
params = {"pos_system": pos_system}
|
||||
|
||||
if location_id:
|
||||
query += " AND location_id = :location_id"
|
||||
params["location_id"] = location_id
|
||||
elif merchant_id:
|
||||
query += " AND merchant_id = :merchant_id"
|
||||
params["merchant_id"] = merchant_id
|
||||
|
||||
query += " LIMIT 1"
|
||||
|
||||
result = await session.execute(query, params)
|
||||
row = result.first()
|
||||
|
||||
if row:
|
||||
return POSConfiguration(**row._asdict())
|
||||
return None
|
||||
|
||||
def _extract_location_id(self, pos_system: str, payload: Dict[str, Any]) -> Optional[str]:
|
||||
"""Extract location ID from webhook payload"""
|
||||
if pos_system == "square":
|
||||
# Square includes location_id in various places
|
||||
return (payload.get("data", {})
|
||||
.get("object", {})
|
||||
.get("order", {})
|
||||
.get("location_id"))
|
||||
return None
|
||||
|
||||
def _extract_merchant_id(self, pos_system: str, payload: Dict[str, Any]) -> Optional[str]:
|
||||
"""Extract merchant ID from webhook payload"""
|
||||
if pos_system == "toast":
|
||||
return payload.get("restaurantGuid")
|
||||
return None
|
||||
|
||||
async def _update_webhook_status(
|
||||
self,
|
||||
webhook_id: UUID,
|
||||
status: str,
|
||||
message: Optional[str] = None
|
||||
):
|
||||
"""Update webhook log status"""
|
||||
try:
|
||||
async with get_db_transaction() as session:
|
||||
webhook_log = await session.get(POSWebhookLog, webhook_id)
|
||||
if webhook_log:
|
||||
webhook_log.status = status
|
||||
webhook_log.processing_completed_at = datetime.utcnow()
|
||||
if message:
|
||||
webhook_log.error_message = message
|
||||
|
||||
session.add(webhook_log)
|
||||
await session.commit()
|
||||
except Exception as e:
|
||||
logger.error("Failed to update webhook status", error=str(e), webhook_id=webhook_id)
|
||||
234
services/pos/app/services/pos_sync_service.py
Normal file
234
services/pos/app/services/pos_sync_service.py
Normal file
@@ -0,0 +1,234 @@
|
||||
"""
|
||||
POS Sync Service - Business Logic Layer
|
||||
Handles sync job creation, tracking, and metrics
|
||||
"""
|
||||
|
||||
from typing import Optional, List, Dict, Any
|
||||
from uuid import UUID, uuid4
|
||||
from datetime import datetime, timedelta
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, func, and_, desc
|
||||
import structlog
|
||||
|
||||
from app.models.pos_sync import POSSyncLog
|
||||
from app.core.database import get_db_transaction
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class POSSyncService:
|
||||
"""Service layer for POS sync operations"""
|
||||
|
||||
def __init__(self, db: Optional[AsyncSession] = None):
|
||||
self.db = db
|
||||
|
||||
async def create_sync_job(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
pos_config_id: UUID,
|
||||
pos_system: str,
|
||||
sync_type: str = "manual",
|
||||
data_types: List[str] = None
|
||||
) -> POSSyncLog:
|
||||
"""
|
||||
Create a new sync job
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
pos_config_id: POS configuration UUID
|
||||
pos_system: POS system name
|
||||
sync_type: Type of sync (manual, scheduled, incremental, full)
|
||||
data_types: List of data types to sync
|
||||
|
||||
Returns:
|
||||
Created sync log
|
||||
"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
sync_log = POSSyncLog(
|
||||
tenant_id=tenant_id,
|
||||
pos_config_id=pos_config_id,
|
||||
pos_system=pos_system,
|
||||
sync_type=sync_type,
|
||||
sync_direction="inbound",
|
||||
data_type=",".join(data_types) if data_types else "transactions",
|
||||
status="started",
|
||||
started_at=datetime.utcnow(),
|
||||
triggered_by="user"
|
||||
)
|
||||
|
||||
db.add(sync_log)
|
||||
await db.commit()
|
||||
await db.refresh(sync_log)
|
||||
|
||||
logger.info("Sync job created",
|
||||
sync_id=str(sync_log.id),
|
||||
tenant_id=str(tenant_id),
|
||||
pos_system=pos_system,
|
||||
sync_type=sync_type)
|
||||
|
||||
return sync_log
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to create sync job", error=str(e))
|
||||
raise
|
||||
|
||||
async def get_sync_by_id(self, sync_id: UUID) -> Optional[POSSyncLog]:
|
||||
"""Get sync log by ID"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
return await db.get(POSSyncLog, sync_id)
|
||||
except Exception as e:
|
||||
logger.error("Failed to get sync log", error=str(e), sync_id=str(sync_id))
|
||||
raise
|
||||
|
||||
async def update_sync_status(
|
||||
self,
|
||||
sync_id: UUID,
|
||||
status: str,
|
||||
error_message: Optional[str] = None,
|
||||
stats: Optional[Dict[str, int]] = None
|
||||
) -> None:
|
||||
"""Update sync job status"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
sync_log = await db.get(POSSyncLog, sync_id)
|
||||
|
||||
if sync_log:
|
||||
sync_log.status = status
|
||||
sync_log.completed_at = datetime.utcnow()
|
||||
|
||||
if sync_log.started_at:
|
||||
duration = (datetime.utcnow() - sync_log.started_at).total_seconds()
|
||||
sync_log.duration_seconds = duration
|
||||
|
||||
if error_message:
|
||||
sync_log.error_message = error_message
|
||||
|
||||
if stats:
|
||||
sync_log.records_processed = stats.get("processed", 0)
|
||||
sync_log.records_created = stats.get("created", 0)
|
||||
sync_log.records_updated = stats.get("updated", 0)
|
||||
sync_log.records_failed = stats.get("failed", 0)
|
||||
|
||||
await db.commit()
|
||||
|
||||
logger.info("Sync status updated",
|
||||
sync_id=str(sync_id),
|
||||
status=status)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to update sync status", error=str(e))
|
||||
raise
|
||||
|
||||
async def get_sync_logs(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
config_id: Optional[UUID] = None,
|
||||
status: Optional[str] = None,
|
||||
sync_type: Optional[str] = None,
|
||||
limit: int = 50,
|
||||
offset: int = 0
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Get sync logs with filtering
|
||||
|
||||
Returns:
|
||||
Dict with logs and pagination info
|
||||
"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
query = select(POSSyncLog).where(POSSyncLog.tenant_id == tenant_id)
|
||||
|
||||
# Apply filters
|
||||
if config_id:
|
||||
query = query.where(POSSyncLog.pos_config_id == config_id)
|
||||
if status:
|
||||
query = query.where(POSSyncLog.status == status)
|
||||
if sync_type:
|
||||
query = query.where(POSSyncLog.sync_type == sync_type)
|
||||
|
||||
# Get total count
|
||||
count_query = select(func.count()).select_from(query.subquery())
|
||||
result = await db.execute(count_query)
|
||||
total = result.scalar() or 0
|
||||
|
||||
# Get paginated results
|
||||
query = query.order_by(desc(POSSyncLog.started_at)).offset(offset).limit(limit)
|
||||
result = await db.execute(query)
|
||||
logs = result.scalars().all()
|
||||
|
||||
return {
|
||||
"logs": [self._sync_log_to_dict(log) for log in logs],
|
||||
"total": total,
|
||||
"has_more": offset + len(logs) < total
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get sync logs", error=str(e))
|
||||
raise
|
||||
|
||||
async def calculate_average_duration(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
pos_config_id: Optional[UUID] = None,
|
||||
days: int = 30
|
||||
) -> float:
|
||||
"""
|
||||
Calculate average sync duration for recent successful syncs
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
pos_config_id: Optional POS config filter
|
||||
days: Number of days to look back
|
||||
|
||||
Returns:
|
||||
Average duration in minutes
|
||||
"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
cutoff_date = datetime.utcnow() - timedelta(days=days)
|
||||
|
||||
query = select(func.avg(POSSyncLog.duration_seconds)).where(
|
||||
and_(
|
||||
POSSyncLog.tenant_id == tenant_id,
|
||||
POSSyncLog.status == "completed",
|
||||
POSSyncLog.started_at >= cutoff_date,
|
||||
POSSyncLog.duration_seconds.isnot(None)
|
||||
)
|
||||
)
|
||||
|
||||
if pos_config_id:
|
||||
query = query.where(POSSyncLog.pos_config_id == pos_config_id)
|
||||
|
||||
result = await db.execute(query)
|
||||
avg_seconds = result.scalar()
|
||||
|
||||
if avg_seconds:
|
||||
return round(float(avg_seconds) / 60, 2) # Convert to minutes
|
||||
else:
|
||||
return 0.0
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to calculate average duration", error=str(e))
|
||||
return 0.0
|
||||
|
||||
def _sync_log_to_dict(self, sync_log: POSSyncLog) -> Dict[str, Any]:
|
||||
"""Convert sync log to dictionary"""
|
||||
return {
|
||||
"id": str(sync_log.id),
|
||||
"tenant_id": str(sync_log.tenant_id),
|
||||
"pos_config_id": str(sync_log.pos_config_id),
|
||||
"pos_system": sync_log.pos_system,
|
||||
"sync_type": sync_log.sync_type,
|
||||
"data_type": sync_log.data_type,
|
||||
"status": sync_log.status,
|
||||
"started_at": sync_log.started_at.isoformat() if sync_log.started_at else None,
|
||||
"completed_at": sync_log.completed_at.isoformat() if sync_log.completed_at else None,
|
||||
"duration_seconds": float(sync_log.duration_seconds) if sync_log.duration_seconds else None,
|
||||
"records_processed": sync_log.records_processed,
|
||||
"records_created": sync_log.records_created,
|
||||
"records_updated": sync_log.records_updated,
|
||||
"records_failed": sync_log.records_failed,
|
||||
"error_message": sync_log.error_message
|
||||
}
|
||||
482
services/pos/app/services/pos_transaction_service.py
Normal file
482
services/pos/app/services/pos_transaction_service.py
Normal file
@@ -0,0 +1,482 @@
|
||||
"""
|
||||
POS Transaction Service - Business Logic Layer
|
||||
"""
|
||||
|
||||
from typing import List, Optional, Dict, Any
|
||||
from uuid import UUID
|
||||
from datetime import datetime
|
||||
from decimal import Decimal
|
||||
import structlog
|
||||
|
||||
from app.repositories.pos_transaction_repository import POSTransactionRepository
|
||||
from app.repositories.pos_transaction_item_repository import POSTransactionItemRepository
|
||||
from app.schemas.pos_transaction import (
|
||||
POSTransactionResponse,
|
||||
POSTransactionDashboardSummary
|
||||
)
|
||||
from app.core.database import get_db_transaction
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class POSTransactionService:
|
||||
"""Service layer for POS transaction operations"""
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
async def get_transactions_by_tenant(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
pos_system: Optional[str] = None,
|
||||
start_date: Optional[datetime] = None,
|
||||
end_date: Optional[datetime] = None,
|
||||
status: Optional[str] = None,
|
||||
is_synced: Optional[bool] = None,
|
||||
skip: int = 0,
|
||||
limit: int = 50
|
||||
) -> List[POSTransactionResponse]:
|
||||
"""Get POS transactions for a tenant with filtering"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
repository = POSTransactionRepository(db)
|
||||
|
||||
transactions = await repository.get_transactions_by_tenant(
|
||||
tenant_id=tenant_id,
|
||||
pos_system=pos_system,
|
||||
start_date=start_date,
|
||||
end_date=end_date,
|
||||
status=status,
|
||||
is_synced=is_synced,
|
||||
skip=skip,
|
||||
limit=limit
|
||||
)
|
||||
|
||||
# Convert to response schemas
|
||||
responses = []
|
||||
for transaction in transactions:
|
||||
response = POSTransactionResponse.from_orm(transaction)
|
||||
responses.append(response)
|
||||
|
||||
return responses
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get transactions by tenant", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def count_transactions_by_tenant(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
pos_system: Optional[str] = None,
|
||||
start_date: Optional[datetime] = None,
|
||||
end_date: Optional[datetime] = None,
|
||||
status: Optional[str] = None,
|
||||
is_synced: Optional[bool] = None
|
||||
) -> int:
|
||||
"""Count POS transactions for a tenant with filtering"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
repository = POSTransactionRepository(db)
|
||||
|
||||
count = await repository.count_transactions_by_tenant(
|
||||
tenant_id=tenant_id,
|
||||
pos_system=pos_system,
|
||||
start_date=start_date,
|
||||
end_date=end_date,
|
||||
status=status,
|
||||
is_synced=is_synced
|
||||
)
|
||||
|
||||
return count
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to count transactions by tenant", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def get_transaction_with_items(
|
||||
self,
|
||||
transaction_id: UUID,
|
||||
tenant_id: UUID
|
||||
) -> Optional[POSTransactionResponse]:
|
||||
"""Get transaction with all its items"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
repository = POSTransactionRepository(db)
|
||||
|
||||
transaction = await repository.get_transaction_with_items(
|
||||
transaction_id=transaction_id,
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
|
||||
if not transaction:
|
||||
return None
|
||||
|
||||
return POSTransactionResponse.from_orm(transaction)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get transaction with items",
|
||||
transaction_id=str(transaction_id),
|
||||
error=str(e))
|
||||
raise
|
||||
|
||||
async def get_dashboard_summary(
|
||||
self,
|
||||
tenant_id: UUID
|
||||
) -> POSTransactionDashboardSummary:
|
||||
"""Get dashboard summary for POS transactions"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
repository = POSTransactionRepository(db)
|
||||
|
||||
# Get metrics from repository
|
||||
metrics = await repository.get_dashboard_metrics(tenant_id)
|
||||
|
||||
# Get sync status
|
||||
sync_status = await repository.get_sync_status_summary(tenant_id)
|
||||
|
||||
# Construct dashboard summary
|
||||
return POSTransactionDashboardSummary(
|
||||
total_transactions_today=metrics["total_transactions_today"],
|
||||
total_transactions_this_week=metrics["total_transactions_this_week"],
|
||||
total_transactions_this_month=metrics["total_transactions_this_month"],
|
||||
revenue_today=Decimal(str(metrics["revenue_today"])),
|
||||
revenue_this_week=Decimal(str(metrics["revenue_this_week"])),
|
||||
revenue_this_month=Decimal(str(metrics["revenue_this_month"])),
|
||||
average_transaction_value=Decimal(str(metrics["average_transaction_value"])),
|
||||
status_breakdown=metrics["status_breakdown"],
|
||||
payment_method_breakdown=metrics["payment_method_breakdown"],
|
||||
sync_status=sync_status
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get dashboard summary", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def get_sync_metrics(
|
||||
self,
|
||||
tenant_id: UUID
|
||||
) -> Dict[str, Any]:
|
||||
"""Get sync metrics for transactions"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
repository = POSTransactionRepository(db)
|
||||
|
||||
sync_status = await repository.get_sync_status_summary(tenant_id)
|
||||
|
||||
# Calculate sync rate
|
||||
total = sync_status["synced"] + sync_status["pending"] + sync_status["failed"]
|
||||
sync_rate = (sync_status["synced"] / total * 100) if total > 0 else 0
|
||||
|
||||
return {
|
||||
"sync_status": sync_status,
|
||||
"sync_rate_percentage": round(sync_rate, 2),
|
||||
"total_transactions": total
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get sync metrics", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def calculate_transaction_analytics(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
start_date: datetime,
|
||||
end_date: datetime
|
||||
) -> Dict[str, Any]:
|
||||
"""Calculate analytics for transactions within a date range"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
repository = POSTransactionRepository(db)
|
||||
|
||||
transactions = await repository.get_transactions_by_date_range(
|
||||
tenant_id=tenant_id,
|
||||
start_date=start_date.date(),
|
||||
end_date=end_date.date(),
|
||||
skip=0,
|
||||
limit=10000 # Large limit for analytics
|
||||
)
|
||||
|
||||
# Calculate analytics
|
||||
total_revenue = Decimal("0")
|
||||
total_transactions = len(transactions)
|
||||
payment_methods = {}
|
||||
order_types = {}
|
||||
hourly_distribution = {}
|
||||
|
||||
for transaction in transactions:
|
||||
if transaction.status == "completed":
|
||||
total_revenue += transaction.total_amount
|
||||
|
||||
# Payment method breakdown
|
||||
pm = transaction.payment_method or "unknown"
|
||||
payment_methods[pm] = payment_methods.get(pm, 0) + 1
|
||||
|
||||
# Order type breakdown
|
||||
ot = transaction.order_type or "unknown"
|
||||
order_types[ot] = order_types.get(ot, 0) + 1
|
||||
|
||||
# Hourly distribution
|
||||
hour = transaction.transaction_date.hour
|
||||
hourly_distribution[hour] = hourly_distribution.get(hour, 0) + 1
|
||||
|
||||
avg_transaction_value = (total_revenue / total_transactions) if total_transactions > 0 else Decimal("0")
|
||||
|
||||
return {
|
||||
"period": {
|
||||
"start_date": start_date.isoformat(),
|
||||
"end_date": end_date.isoformat()
|
||||
},
|
||||
"total_revenue": float(total_revenue),
|
||||
"total_transactions": total_transactions,
|
||||
"average_transaction_value": float(avg_transaction_value),
|
||||
"payment_methods": payment_methods,
|
||||
"order_types": order_types,
|
||||
"hourly_distribution": hourly_distribution
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to calculate transaction analytics", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def sync_transaction_to_sales(
|
||||
self,
|
||||
transaction_id: UUID,
|
||||
tenant_id: UUID
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Sync a single POS transaction to the sales service
|
||||
|
||||
Args:
|
||||
transaction_id: Transaction UUID
|
||||
tenant_id: Tenant UUID
|
||||
|
||||
Returns:
|
||||
Dict with sync status and details
|
||||
"""
|
||||
try:
|
||||
from shared.clients.sales_client import SalesServiceClient
|
||||
from app.core.config import settings
|
||||
|
||||
async with get_db_transaction() as db:
|
||||
transaction_repo = POSTransactionRepository(db)
|
||||
items_repo = POSTransactionItemRepository(db)
|
||||
|
||||
# Get transaction
|
||||
transaction = await transaction_repo.get_by_id(transaction_id)
|
||||
if not transaction or transaction.tenant_id != tenant_id:
|
||||
return {
|
||||
"success": False,
|
||||
"error": "Transaction not found or unauthorized"
|
||||
}
|
||||
|
||||
# Check if already synced
|
||||
if transaction.is_synced_to_sales:
|
||||
logger.info("Transaction already synced to sales",
|
||||
transaction_id=transaction_id,
|
||||
sales_record_id=transaction.sales_record_id)
|
||||
return {
|
||||
"success": True,
|
||||
"already_synced": True,
|
||||
"sales_record_id": str(transaction.sales_record_id)
|
||||
}
|
||||
|
||||
# Get transaction items
|
||||
items = await items_repo.get_by_transaction_id(transaction_id)
|
||||
|
||||
# Initialize sales client
|
||||
sales_client = SalesServiceClient(settings, calling_service_name="pos")
|
||||
|
||||
# Create sales records for each item
|
||||
sales_record_ids = []
|
||||
failed_items = []
|
||||
|
||||
for item in items:
|
||||
try:
|
||||
sales_data = {
|
||||
"inventory_product_id": str(item.product_id) if item.product_id else None,
|
||||
"product_name": item.product_name,
|
||||
"product_category": "finished_product",
|
||||
"quantity_sold": float(item.quantity),
|
||||
"unit_price": float(item.unit_price),
|
||||
"total_amount": float(item.subtotal),
|
||||
"sale_date": transaction.transaction_date.strftime("%Y-%m-%d"),
|
||||
"sales_channel": "pos",
|
||||
"source": f"pos_sync_{transaction.pos_system}",
|
||||
"payment_method": transaction.payment_method or "unknown",
|
||||
"notes": f"POS Transaction: {transaction.external_transaction_id or transaction_id}"
|
||||
}
|
||||
|
||||
result = await sales_client.create_sales_record(
|
||||
tenant_id=str(tenant_id),
|
||||
sales_data=sales_data
|
||||
)
|
||||
|
||||
if result and result.get("id"):
|
||||
sales_record_ids.append(result["id"])
|
||||
logger.info("Synced item to sales",
|
||||
transaction_id=transaction_id,
|
||||
item_id=item.id,
|
||||
sales_record_id=result["id"])
|
||||
else:
|
||||
failed_items.append({
|
||||
"item_id": str(item.id),
|
||||
"product_name": item.product_name,
|
||||
"error": "No sales record ID returned"
|
||||
})
|
||||
|
||||
except Exception as item_error:
|
||||
logger.error("Failed to sync item to sales",
|
||||
error=str(item_error),
|
||||
transaction_id=transaction_id,
|
||||
item_id=item.id)
|
||||
failed_items.append({
|
||||
"item_id": str(item.id),
|
||||
"product_name": item.product_name,
|
||||
"error": str(item_error)
|
||||
})
|
||||
|
||||
# Update transaction sync status
|
||||
if sales_record_ids and len(failed_items) == 0:
|
||||
# Full success
|
||||
transaction.is_synced_to_sales = True
|
||||
transaction.sales_record_id = UUID(sales_record_ids[0]) # Store first record ID
|
||||
transaction.sync_completed_at = datetime.utcnow()
|
||||
await db.commit()
|
||||
|
||||
logger.info("Transaction fully synced to sales",
|
||||
transaction_id=transaction_id,
|
||||
items_synced=len(sales_record_ids))
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"items_synced": len(sales_record_ids),
|
||||
"sales_record_ids": sales_record_ids,
|
||||
"failed_items": []
|
||||
}
|
||||
|
||||
elif sales_record_ids and len(failed_items) > 0:
|
||||
# Partial success
|
||||
transaction.sync_attempted_at = datetime.utcnow()
|
||||
transaction.sync_error = f"Partial sync: {len(failed_items)} items failed"
|
||||
transaction.sync_retry_count = (transaction.sync_retry_count or 0) + 1
|
||||
await db.commit()
|
||||
|
||||
logger.warning("Transaction partially synced to sales",
|
||||
transaction_id=transaction_id,
|
||||
items_synced=len(sales_record_ids),
|
||||
items_failed=len(failed_items))
|
||||
|
||||
return {
|
||||
"success": False,
|
||||
"partial_success": True,
|
||||
"items_synced": len(sales_record_ids),
|
||||
"sales_record_ids": sales_record_ids,
|
||||
"failed_items": failed_items
|
||||
}
|
||||
|
||||
else:
|
||||
# Complete failure
|
||||
transaction.sync_attempted_at = datetime.utcnow()
|
||||
transaction.sync_error = "All items failed to sync"
|
||||
transaction.sync_retry_count = (transaction.sync_retry_count or 0) + 1
|
||||
await db.commit()
|
||||
|
||||
logger.error("Transaction sync failed completely",
|
||||
transaction_id=transaction_id,
|
||||
items_failed=len(failed_items))
|
||||
|
||||
return {
|
||||
"success": False,
|
||||
"items_synced": 0,
|
||||
"failed_items": failed_items
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to sync transaction to sales",
|
||||
error=str(e),
|
||||
transaction_id=transaction_id,
|
||||
tenant_id=tenant_id)
|
||||
return {
|
||||
"success": False,
|
||||
"error": str(e)
|
||||
}
|
||||
|
||||
async def sync_unsynced_transactions(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
limit: int = 50
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Sync all unsynced transactions to the sales service
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
limit: Maximum number of transactions to sync in one batch
|
||||
|
||||
Returns:
|
||||
Dict with sync summary
|
||||
"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
repository = POSTransactionRepository(db)
|
||||
|
||||
# Get unsynced transactions
|
||||
unsynced_transactions = await repository.get_transactions_by_tenant(
|
||||
tenant_id=tenant_id,
|
||||
is_synced=False,
|
||||
status="completed", # Only sync completed transactions
|
||||
limit=limit
|
||||
)
|
||||
|
||||
if not unsynced_transactions:
|
||||
logger.info("No unsynced transactions found", tenant_id=tenant_id)
|
||||
return {
|
||||
"success": True,
|
||||
"total_transactions": 0,
|
||||
"synced": 0,
|
||||
"failed": 0
|
||||
}
|
||||
|
||||
synced_count = 0
|
||||
failed_count = 0
|
||||
results = []
|
||||
|
||||
for transaction in unsynced_transactions:
|
||||
result = await self.sync_transaction_to_sales(
|
||||
transaction.id,
|
||||
tenant_id
|
||||
)
|
||||
|
||||
if result.get("success"):
|
||||
synced_count += 1
|
||||
else:
|
||||
failed_count += 1
|
||||
|
||||
results.append({
|
||||
"transaction_id": str(transaction.id),
|
||||
"external_id": transaction.external_transaction_id,
|
||||
"result": result
|
||||
})
|
||||
|
||||
logger.info("Batch sync completed",
|
||||
tenant_id=tenant_id,
|
||||
total=len(unsynced_transactions),
|
||||
synced=synced_count,
|
||||
failed=failed_count)
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"total_transactions": len(unsynced_transactions),
|
||||
"synced": synced_count,
|
||||
"failed": failed_count,
|
||||
"results": results
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to batch sync transactions",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id)
|
||||
return {
|
||||
"success": False,
|
||||
"error": str(e)
|
||||
}
|
||||
409
services/pos/app/services/pos_webhook_service.py
Normal file
409
services/pos/app/services/pos_webhook_service.py
Normal file
@@ -0,0 +1,409 @@
|
||||
"""
|
||||
POS Webhook Service - Business Logic Layer
|
||||
Handles webhook processing, signature verification, and logging
|
||||
"""
|
||||
|
||||
from typing import Optional, Dict, Any, Tuple
|
||||
from uuid import UUID
|
||||
import structlog
|
||||
import hashlib
|
||||
import hmac
|
||||
import base64
|
||||
import json
|
||||
from datetime import datetime
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from app.models.pos_webhook import POSWebhookLog
|
||||
from app.repositories.pos_config_repository import POSConfigurationRepository
|
||||
from app.core.database import get_db_transaction
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class POSWebhookService:
|
||||
"""Service layer for POS webhook operations"""
|
||||
|
||||
def __init__(self, db: Optional[AsyncSession] = None):
|
||||
self.db = db
|
||||
|
||||
async def verify_webhook_signature(
|
||||
self,
|
||||
pos_system: str,
|
||||
payload: str,
|
||||
signature: str,
|
||||
webhook_secret: str
|
||||
) -> bool:
|
||||
"""
|
||||
Verify webhook signature based on POS system
|
||||
|
||||
Args:
|
||||
pos_system: POS system name (square, toast, lightspeed)
|
||||
payload: Raw webhook payload
|
||||
signature: Signature from webhook headers
|
||||
webhook_secret: Secret key from POS configuration
|
||||
|
||||
Returns:
|
||||
True if signature is valid, False otherwise
|
||||
"""
|
||||
try:
|
||||
if pos_system.lower() == "square":
|
||||
return self._verify_square_signature(payload, signature, webhook_secret)
|
||||
elif pos_system.lower() == "toast":
|
||||
return self._verify_toast_signature(payload, signature, webhook_secret)
|
||||
elif pos_system.lower() == "lightspeed":
|
||||
return self._verify_lightspeed_signature(payload, signature, webhook_secret)
|
||||
else:
|
||||
logger.warning("Unknown POS system for signature verification", pos_system=pos_system)
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Signature verification failed", error=str(e), pos_system=pos_system)
|
||||
return False
|
||||
|
||||
def _verify_square_signature(self, payload: str, signature: str, secret: str) -> bool:
|
||||
"""Verify Square webhook signature using HMAC-SHA256"""
|
||||
try:
|
||||
# Square combines URL + body for signature
|
||||
# Format: <notification_url> + <request_body>
|
||||
# For simplicity, we'll just verify the body
|
||||
expected_signature = hmac.new(
|
||||
secret.encode('utf-8'),
|
||||
payload.encode('utf-8'),
|
||||
hashlib.sha256
|
||||
).digest()
|
||||
|
||||
# Square sends base64-encoded signature
|
||||
expected_b64 = base64.b64encode(expected_signature).decode('utf-8')
|
||||
|
||||
return hmac.compare_digest(signature, expected_b64)
|
||||
except Exception as e:
|
||||
logger.error("Square signature verification error", error=str(e))
|
||||
return False
|
||||
|
||||
def _verify_toast_signature(self, payload: str, signature: str, secret: str) -> bool:
|
||||
"""Verify Toast webhook signature using HMAC-SHA256"""
|
||||
try:
|
||||
expected_signature = hmac.new(
|
||||
secret.encode('utf-8'),
|
||||
payload.encode('utf-8'),
|
||||
hashlib.sha256
|
||||
).hexdigest()
|
||||
|
||||
return hmac.compare_digest(signature, expected_signature)
|
||||
except Exception as e:
|
||||
logger.error("Toast signature verification error", error=str(e))
|
||||
return False
|
||||
|
||||
def _verify_lightspeed_signature(self, payload: str, signature: str, secret: str) -> bool:
|
||||
"""Verify Lightspeed webhook signature using HMAC-SHA256"""
|
||||
try:
|
||||
expected_signature = hmac.new(
|
||||
secret.encode('utf-8'),
|
||||
payload.encode('utf-8'),
|
||||
hashlib.sha256
|
||||
).hexdigest()
|
||||
|
||||
return hmac.compare_digest(signature.lower(), expected_signature.lower())
|
||||
except Exception as e:
|
||||
logger.error("Lightspeed signature verification error", error=str(e))
|
||||
return False
|
||||
|
||||
async def extract_tenant_id_from_payload(
|
||||
self,
|
||||
pos_system: str,
|
||||
parsed_payload: Dict[str, Any]
|
||||
) -> Optional[UUID]:
|
||||
"""
|
||||
Extract tenant_id from webhook payload by matching POS system identifiers
|
||||
|
||||
Args:
|
||||
pos_system: POS system name
|
||||
parsed_payload: Parsed JSON payload
|
||||
|
||||
Returns:
|
||||
tenant_id if found, None otherwise
|
||||
"""
|
||||
try:
|
||||
# Extract POS-specific identifiers
|
||||
pos_identifier = None
|
||||
|
||||
if pos_system.lower() == "square":
|
||||
# Square uses merchant_id or location_id
|
||||
pos_identifier = (
|
||||
parsed_payload.get("merchant_id") or
|
||||
parsed_payload.get("data", {}).get("object", {}).get("merchant_id") or
|
||||
parsed_payload.get("location_id")
|
||||
)
|
||||
elif pos_system.lower() == "toast":
|
||||
# Toast uses restaurantGuid
|
||||
pos_identifier = (
|
||||
parsed_payload.get("restaurantGuid") or
|
||||
parsed_payload.get("restaurant", {}).get("guid")
|
||||
)
|
||||
elif pos_system.lower() == "lightspeed":
|
||||
# Lightspeed uses accountID
|
||||
pos_identifier = (
|
||||
parsed_payload.get("accountID") or
|
||||
parsed_payload.get("account", {}).get("id")
|
||||
)
|
||||
|
||||
if not pos_identifier:
|
||||
logger.warning("Could not extract POS identifier from payload", pos_system=pos_system)
|
||||
return None
|
||||
|
||||
# Query database to find tenant_id by POS identifier
|
||||
async with get_db_transaction() as db:
|
||||
repository = POSConfigurationRepository(db)
|
||||
config = await repository.get_by_pos_identifier(pos_system, pos_identifier)
|
||||
|
||||
if config:
|
||||
return config.tenant_id
|
||||
else:
|
||||
logger.warning("No tenant found for POS identifier",
|
||||
pos_system=pos_system,
|
||||
identifier=pos_identifier)
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to extract tenant_id", error=str(e), pos_system=pos_system)
|
||||
return None
|
||||
|
||||
async def log_webhook(
|
||||
self,
|
||||
pos_system: str,
|
||||
webhook_type: str,
|
||||
method: str,
|
||||
url_path: str,
|
||||
query_params: Dict[str, Any],
|
||||
headers: Dict[str, str],
|
||||
raw_payload: str,
|
||||
payload_size: int,
|
||||
content_type: Optional[str],
|
||||
signature: Optional[str],
|
||||
is_signature_valid: Optional[bool],
|
||||
source_ip: Optional[str],
|
||||
event_id: Optional[str] = None,
|
||||
tenant_id: Optional[UUID] = None,
|
||||
transaction_id: Optional[str] = None,
|
||||
order_id: Optional[str] = None
|
||||
) -> POSWebhookLog:
|
||||
"""
|
||||
Create a webhook log entry in the database
|
||||
|
||||
Returns:
|
||||
Created POSWebhookLog instance
|
||||
"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
webhook_log = POSWebhookLog(
|
||||
tenant_id=tenant_id,
|
||||
pos_system=pos_system,
|
||||
webhook_type=webhook_type,
|
||||
method=method,
|
||||
url_path=url_path,
|
||||
query_params=query_params,
|
||||
headers=headers,
|
||||
raw_payload=raw_payload,
|
||||
payload_size=payload_size,
|
||||
content_type=content_type,
|
||||
signature=signature,
|
||||
is_signature_valid=is_signature_valid,
|
||||
source_ip=source_ip,
|
||||
status="received",
|
||||
event_id=event_id,
|
||||
transaction_id=transaction_id,
|
||||
order_id=order_id,
|
||||
received_at=datetime.utcnow(),
|
||||
user_agent=headers.get("user-agent"),
|
||||
forwarded_for=headers.get("x-forwarded-for"),
|
||||
request_id=headers.get("x-request-id")
|
||||
)
|
||||
|
||||
db.add(webhook_log)
|
||||
await db.commit()
|
||||
await db.refresh(webhook_log)
|
||||
|
||||
logger.info("Webhook logged to database",
|
||||
webhook_log_id=str(webhook_log.id),
|
||||
pos_system=pos_system,
|
||||
webhook_type=webhook_type,
|
||||
tenant_id=str(tenant_id) if tenant_id else None)
|
||||
|
||||
return webhook_log
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to log webhook", error=str(e), pos_system=pos_system)
|
||||
raise
|
||||
|
||||
async def get_webhook_secret(
|
||||
self,
|
||||
pos_system: str,
|
||||
tenant_id: Optional[UUID] = None
|
||||
) -> Optional[str]:
|
||||
"""
|
||||
Get webhook secret for signature verification
|
||||
|
||||
Args:
|
||||
pos_system: POS system name
|
||||
tenant_id: Optional tenant_id if known
|
||||
|
||||
Returns:
|
||||
Webhook secret if found
|
||||
"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
repository = POSConfigurationRepository(db)
|
||||
|
||||
if tenant_id:
|
||||
# Get active config for tenant and POS system
|
||||
configs = await repository.get_configurations_by_tenant(
|
||||
tenant_id=tenant_id,
|
||||
pos_system=pos_system,
|
||||
is_active=True,
|
||||
skip=0,
|
||||
limit=1
|
||||
)
|
||||
|
||||
if configs:
|
||||
return configs[0].webhook_secret
|
||||
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get webhook secret", error=str(e))
|
||||
return None
|
||||
|
||||
async def update_webhook_status(
|
||||
self,
|
||||
webhook_log_id: UUID,
|
||||
status: str,
|
||||
error_message: Optional[str] = None,
|
||||
processing_duration_ms: Optional[int] = None
|
||||
) -> None:
|
||||
"""Update webhook processing status"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
webhook_log = await db.get(POSWebhookLog, webhook_log_id)
|
||||
|
||||
if webhook_log:
|
||||
webhook_log.status = status
|
||||
webhook_log.processing_completed_at = datetime.utcnow()
|
||||
|
||||
if error_message:
|
||||
webhook_log.error_message = error_message
|
||||
webhook_log.retry_count += 1
|
||||
|
||||
if processing_duration_ms:
|
||||
webhook_log.processing_duration_ms = processing_duration_ms
|
||||
|
||||
await db.commit()
|
||||
|
||||
logger.info("Webhook status updated",
|
||||
webhook_log_id=str(webhook_log_id),
|
||||
status=status)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to update webhook status", error=str(e))
|
||||
raise
|
||||
|
||||
async def check_duplicate_webhook(
|
||||
self,
|
||||
pos_system: str,
|
||||
event_id: str,
|
||||
tenant_id: Optional[UUID] = None
|
||||
) -> Tuple[bool, Optional[UUID]]:
|
||||
"""
|
||||
Check if webhook has already been processed
|
||||
|
||||
Returns:
|
||||
Tuple of (is_duplicate, original_webhook_id)
|
||||
"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
from sqlalchemy import select
|
||||
|
||||
query = select(POSWebhookLog).where(
|
||||
POSWebhookLog.pos_system == pos_system,
|
||||
POSWebhookLog.event_id == event_id,
|
||||
POSWebhookLog.status == "processed"
|
||||
)
|
||||
|
||||
if tenant_id:
|
||||
query = query.where(POSWebhookLog.tenant_id == tenant_id)
|
||||
|
||||
result = await db.execute(query)
|
||||
existing = result.scalar_one_or_none()
|
||||
|
||||
if existing:
|
||||
logger.info("Duplicate webhook detected",
|
||||
pos_system=pos_system,
|
||||
event_id=event_id,
|
||||
original_id=str(existing.id))
|
||||
return True, existing.id
|
||||
|
||||
return False, None
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to check duplicate webhook", error=str(e))
|
||||
return False, None
|
||||
|
||||
def parse_webhook_event_details(
|
||||
self,
|
||||
pos_system: str,
|
||||
parsed_payload: Dict[str, Any]
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Extract standardized event details from POS-specific payload
|
||||
|
||||
Returns:
|
||||
Dict with event_id, webhook_type, transaction_id, order_id, etc.
|
||||
"""
|
||||
details = {
|
||||
"event_id": None,
|
||||
"webhook_type": None,
|
||||
"transaction_id": None,
|
||||
"order_id": None,
|
||||
"customer_id": None,
|
||||
"event_timestamp": None
|
||||
}
|
||||
|
||||
try:
|
||||
if pos_system.lower() == "square":
|
||||
details["event_id"] = parsed_payload.get("event_id")
|
||||
details["webhook_type"] = parsed_payload.get("type")
|
||||
|
||||
data = parsed_payload.get("data", {}).get("object", {})
|
||||
details["transaction_id"] = data.get("id")
|
||||
details["order_id"] = data.get("order_id")
|
||||
details["customer_id"] = data.get("customer_id")
|
||||
|
||||
created_at = parsed_payload.get("created_at")
|
||||
if created_at:
|
||||
details["event_timestamp"] = datetime.fromisoformat(created_at.replace('Z', '+00:00'))
|
||||
|
||||
elif pos_system.lower() == "toast":
|
||||
details["event_id"] = parsed_payload.get("guid")
|
||||
details["webhook_type"] = parsed_payload.get("eventType")
|
||||
details["order_id"] = parsed_payload.get("entityId")
|
||||
|
||||
created_at = parsed_payload.get("eventTime")
|
||||
if created_at:
|
||||
try:
|
||||
details["event_timestamp"] = datetime.fromtimestamp(created_at / 1000)
|
||||
except:
|
||||
pass
|
||||
|
||||
elif pos_system.lower() == "lightspeed":
|
||||
details["event_id"] = parsed_payload.get("id")
|
||||
details["webhook_type"] = parsed_payload.get("action")
|
||||
details["transaction_id"] = parsed_payload.get("objectID")
|
||||
|
||||
created_at = parsed_payload.get("createdAt")
|
||||
if created_at:
|
||||
details["event_timestamp"] = datetime.fromisoformat(created_at.replace('Z', '+00:00'))
|
||||
|
||||
return details
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to parse webhook event details", error=str(e))
|
||||
return details
|
||||
260
services/pos/app/services/tenant_deletion_service.py
Normal file
260
services/pos/app/services/tenant_deletion_service.py
Normal file
@@ -0,0 +1,260 @@
|
||||
# services/pos/app/services/tenant_deletion_service.py
|
||||
"""
|
||||
Tenant Data Deletion Service for POS Service
|
||||
Handles deletion of all POS-related data for a tenant
|
||||
"""
|
||||
|
||||
from typing import Dict
|
||||
from sqlalchemy import select, func, delete
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
import structlog
|
||||
|
||||
from shared.services.tenant_deletion import (
|
||||
BaseTenantDataDeletionService,
|
||||
TenantDataDeletionResult
|
||||
)
|
||||
from app.models import (
|
||||
POSConfiguration,
|
||||
POSTransaction,
|
||||
POSTransactionItem,
|
||||
POSWebhookLog,
|
||||
POSSyncLog,
|
||||
AuditLog
|
||||
)
|
||||
|
||||
logger = structlog.get_logger(__name__)
|
||||
|
||||
|
||||
class POSTenantDeletionService(BaseTenantDataDeletionService):
|
||||
"""Service for deleting all POS-related data for a tenant"""
|
||||
|
||||
def __init__(self, db: AsyncSession):
|
||||
self.db = db
|
||||
self.service_name = "pos"
|
||||
|
||||
async def get_tenant_data_preview(self, tenant_id: str) -> Dict[str, int]:
|
||||
"""
|
||||
Get counts of what would be deleted for a tenant (dry-run)
|
||||
|
||||
Args:
|
||||
tenant_id: The tenant ID to preview deletion for
|
||||
|
||||
Returns:
|
||||
Dictionary with entity names and their counts
|
||||
"""
|
||||
logger.info("pos.tenant_deletion.preview", tenant_id=tenant_id)
|
||||
preview = {}
|
||||
|
||||
try:
|
||||
# Count POS configurations
|
||||
config_count = await self.db.scalar(
|
||||
select(func.count(POSConfiguration.id)).where(
|
||||
POSConfiguration.tenant_id == tenant_id
|
||||
)
|
||||
)
|
||||
preview["pos_configurations"] = config_count or 0
|
||||
|
||||
# Count POS transactions
|
||||
transaction_count = await self.db.scalar(
|
||||
select(func.count(POSTransaction.id)).where(
|
||||
POSTransaction.tenant_id == tenant_id
|
||||
)
|
||||
)
|
||||
preview["pos_transactions"] = transaction_count or 0
|
||||
|
||||
# Count POS transaction items
|
||||
item_count = await self.db.scalar(
|
||||
select(func.count(POSTransactionItem.id)).where(
|
||||
POSTransactionItem.tenant_id == tenant_id
|
||||
)
|
||||
)
|
||||
preview["pos_transaction_items"] = item_count or 0
|
||||
|
||||
# Count webhook logs
|
||||
webhook_count = await self.db.scalar(
|
||||
select(func.count(POSWebhookLog.id)).where(
|
||||
POSWebhookLog.tenant_id == tenant_id
|
||||
)
|
||||
)
|
||||
preview["pos_webhook_logs"] = webhook_count or 0
|
||||
|
||||
# Count sync logs
|
||||
sync_count = await self.db.scalar(
|
||||
select(func.count(POSSyncLog.id)).where(
|
||||
POSSyncLog.tenant_id == tenant_id
|
||||
)
|
||||
)
|
||||
preview["pos_sync_logs"] = sync_count or 0
|
||||
|
||||
# Count audit logs
|
||||
audit_count = await self.db.scalar(
|
||||
select(func.count(AuditLog.id)).where(
|
||||
AuditLog.tenant_id == tenant_id
|
||||
)
|
||||
)
|
||||
preview["audit_logs"] = audit_count or 0
|
||||
|
||||
logger.info(
|
||||
"pos.tenant_deletion.preview_complete",
|
||||
tenant_id=tenant_id,
|
||||
preview=preview
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"pos.tenant_deletion.preview_error",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
raise
|
||||
|
||||
return preview
|
||||
|
||||
async def delete_tenant_data(self, tenant_id: str) -> TenantDataDeletionResult:
|
||||
"""
|
||||
Permanently delete all POS data for a tenant
|
||||
|
||||
Deletion order (respecting foreign key constraints):
|
||||
1. POSTransactionItem (references POSTransaction)
|
||||
2. POSTransaction (references POSConfiguration)
|
||||
3. POSWebhookLog (independent)
|
||||
4. POSSyncLog (references POSConfiguration)
|
||||
5. POSConfiguration (base configuration)
|
||||
6. AuditLog (independent)
|
||||
|
||||
Args:
|
||||
tenant_id: The tenant ID to delete data for
|
||||
|
||||
Returns:
|
||||
TenantDataDeletionResult with deletion counts and any errors
|
||||
"""
|
||||
logger.info("pos.tenant_deletion.started", tenant_id=tenant_id)
|
||||
result = TenantDataDeletionResult(tenant_id=tenant_id, service_name=self.service_name)
|
||||
|
||||
try:
|
||||
# Step 1: Delete POS transaction items (child of transactions)
|
||||
logger.info("pos.tenant_deletion.deleting_transaction_items", tenant_id=tenant_id)
|
||||
items_result = await self.db.execute(
|
||||
delete(POSTransactionItem).where(
|
||||
POSTransactionItem.tenant_id == tenant_id
|
||||
)
|
||||
)
|
||||
result.deleted_counts["pos_transaction_items"] = items_result.rowcount
|
||||
logger.info(
|
||||
"pos.tenant_deletion.transaction_items_deleted",
|
||||
tenant_id=tenant_id,
|
||||
count=items_result.rowcount
|
||||
)
|
||||
|
||||
# Step 2: Delete POS transactions
|
||||
logger.info("pos.tenant_deletion.deleting_transactions", tenant_id=tenant_id)
|
||||
transactions_result = await self.db.execute(
|
||||
delete(POSTransaction).where(
|
||||
POSTransaction.tenant_id == tenant_id
|
||||
)
|
||||
)
|
||||
result.deleted_counts["pos_transactions"] = transactions_result.rowcount
|
||||
logger.info(
|
||||
"pos.tenant_deletion.transactions_deleted",
|
||||
tenant_id=tenant_id,
|
||||
count=transactions_result.rowcount
|
||||
)
|
||||
|
||||
# Step 3: Delete webhook logs
|
||||
logger.info("pos.tenant_deletion.deleting_webhook_logs", tenant_id=tenant_id)
|
||||
webhook_result = await self.db.execute(
|
||||
delete(POSWebhookLog).where(
|
||||
POSWebhookLog.tenant_id == tenant_id
|
||||
)
|
||||
)
|
||||
result.deleted_counts["pos_webhook_logs"] = webhook_result.rowcount
|
||||
logger.info(
|
||||
"pos.tenant_deletion.webhook_logs_deleted",
|
||||
tenant_id=tenant_id,
|
||||
count=webhook_result.rowcount
|
||||
)
|
||||
|
||||
# Step 4: Delete sync logs
|
||||
logger.info("pos.tenant_deletion.deleting_sync_logs", tenant_id=tenant_id)
|
||||
sync_result = await self.db.execute(
|
||||
delete(POSSyncLog).where(
|
||||
POSSyncLog.tenant_id == tenant_id
|
||||
)
|
||||
)
|
||||
result.deleted_counts["pos_sync_logs"] = sync_result.rowcount
|
||||
logger.info(
|
||||
"pos.tenant_deletion.sync_logs_deleted",
|
||||
tenant_id=tenant_id,
|
||||
count=sync_result.rowcount
|
||||
)
|
||||
|
||||
# Step 5: Delete POS configurations (last, as it's referenced by transactions and sync logs)
|
||||
logger.info("pos.tenant_deletion.deleting_configurations", tenant_id=tenant_id)
|
||||
config_result = await self.db.execute(
|
||||
delete(POSConfiguration).where(
|
||||
POSConfiguration.tenant_id == tenant_id
|
||||
)
|
||||
)
|
||||
result.deleted_counts["pos_configurations"] = config_result.rowcount
|
||||
logger.info(
|
||||
"pos.tenant_deletion.configurations_deleted",
|
||||
tenant_id=tenant_id,
|
||||
count=config_result.rowcount
|
||||
)
|
||||
|
||||
# Step 6: Delete audit logs
|
||||
logger.info("pos.tenant_deletion.deleting_audit_logs", tenant_id=tenant_id)
|
||||
audit_result = await self.db.execute(
|
||||
delete(AuditLog).where(
|
||||
AuditLog.tenant_id == tenant_id
|
||||
)
|
||||
)
|
||||
result.deleted_counts["audit_logs"] = audit_result.rowcount
|
||||
logger.info(
|
||||
"pos.tenant_deletion.audit_logs_deleted",
|
||||
tenant_id=tenant_id,
|
||||
count=audit_result.rowcount
|
||||
)
|
||||
|
||||
# Commit the transaction
|
||||
await self.db.commit()
|
||||
|
||||
# Calculate total deleted
|
||||
total_deleted = sum(result.deleted_counts.values())
|
||||
|
||||
logger.info(
|
||||
"pos.tenant_deletion.completed",
|
||||
tenant_id=tenant_id,
|
||||
total_deleted=total_deleted,
|
||||
breakdown=result.deleted_counts
|
||||
)
|
||||
|
||||
result.success = True
|
||||
|
||||
except Exception as e:
|
||||
await self.db.rollback()
|
||||
error_msg = f"Failed to delete POS data for tenant {tenant_id}: {str(e)}"
|
||||
logger.error(
|
||||
"pos.tenant_deletion.failed",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
result.errors.append(error_msg)
|
||||
result.success = False
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def get_pos_tenant_deletion_service(db: AsyncSession) -> POSTenantDeletionService:
|
||||
"""
|
||||
Factory function to create POSTenantDeletionService instance
|
||||
|
||||
Args:
|
||||
db: AsyncSession database session
|
||||
|
||||
Returns:
|
||||
POSTenantDeletionService instance
|
||||
"""
|
||||
return POSTenantDeletionService(db)
|
||||
141
services/pos/migrations/env.py
Normal file
141
services/pos/migrations/env.py
Normal file
@@ -0,0 +1,141 @@
|
||||
"""Alembic environment configuration for pos service"""
|
||||
|
||||
import asyncio
|
||||
import os
|
||||
import sys
|
||||
from logging.config import fileConfig
|
||||
from sqlalchemy import pool
|
||||
from sqlalchemy.engine import Connection
|
||||
from sqlalchemy.ext.asyncio import async_engine_from_config
|
||||
from alembic import context
|
||||
|
||||
# Add the service directory to the Python path
|
||||
service_path = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
|
||||
if service_path not in sys.path:
|
||||
sys.path.insert(0, service_path)
|
||||
|
||||
# Add shared modules to path
|
||||
shared_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", "shared"))
|
||||
if shared_path not in sys.path:
|
||||
sys.path.insert(0, shared_path)
|
||||
|
||||
try:
|
||||
from app.core.config import settings
|
||||
from shared.database.base import Base
|
||||
|
||||
# Import all models to ensure they are registered with Base.metadata
|
||||
from app.models import * # noqa: F401, F403
|
||||
|
||||
except ImportError as e:
|
||||
print(f"Import error in migrations env.py: {e}")
|
||||
print(f"Current Python path: {sys.path}")
|
||||
raise
|
||||
|
||||
# this is the Alembic Config object
|
||||
config = context.config
|
||||
|
||||
# Determine service name from file path
|
||||
service_name = os.path.basename(os.path.dirname(os.path.dirname(__file__)))
|
||||
service_name_upper = service_name.upper().replace('-', '_')
|
||||
|
||||
# Set database URL from environment variables with multiple fallback strategies
|
||||
database_url = (
|
||||
os.getenv(f'{service_name_upper}_DATABASE_URL') or # Service-specific
|
||||
os.getenv('DATABASE_URL') # Generic fallback
|
||||
)
|
||||
|
||||
# If DATABASE_URL is not set, construct from individual components
|
||||
if not database_url:
|
||||
# Try generic PostgreSQL environment variables first
|
||||
postgres_host = os.getenv('POSTGRES_HOST')
|
||||
postgres_port = os.getenv('POSTGRES_PORT', '5432')
|
||||
postgres_db = os.getenv('POSTGRES_DB')
|
||||
postgres_user = os.getenv('POSTGRES_USER')
|
||||
postgres_password = os.getenv('POSTGRES_PASSWORD')
|
||||
|
||||
if all([postgres_host, postgres_db, postgres_user, postgres_password]):
|
||||
database_url = f"postgresql+asyncpg://{postgres_user}:{postgres_password}@{postgres_host}:{postgres_port}/{postgres_db}"
|
||||
else:
|
||||
# Try service-specific environment variables
|
||||
db_host = os.getenv(f'{service_name_upper}_DB_HOST', f'{service_name}-db-service')
|
||||
db_port = os.getenv(f'{service_name_upper}_DB_PORT', '5432')
|
||||
db_name = os.getenv(f'{service_name_upper}_DB_NAME', f'{service_name.replace("-", "_")}_db')
|
||||
db_user = os.getenv(f'{service_name_upper}_DB_USER', f'{service_name.replace("-", "_")}_user')
|
||||
db_password = os.getenv(f'{service_name_upper}_DB_PASSWORD')
|
||||
|
||||
if db_password:
|
||||
database_url = f"postgresql+asyncpg://{db_user}:{db_password}@{db_host}:{db_port}/{db_name}"
|
||||
else:
|
||||
# Final fallback: try to get from settings object
|
||||
try:
|
||||
database_url = getattr(settings, 'DATABASE_URL', None)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if not database_url:
|
||||
error_msg = f"ERROR: No database URL configured for {service_name} service"
|
||||
print(error_msg)
|
||||
raise Exception(error_msg)
|
||||
|
||||
config.set_main_option("sqlalchemy.url", database_url)
|
||||
|
||||
# Interpret the config file for Python logging
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# Set target metadata
|
||||
target_metadata = Base.metadata
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode."""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
compare_type=True,
|
||||
compare_server_default=True,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def do_run_migrations(connection: Connection) -> None:
|
||||
"""Execute migrations with the given connection."""
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
compare_type=True,
|
||||
compare_server_default=True,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
async def run_async_migrations() -> None:
|
||||
"""Run migrations in 'online' mode with async support."""
|
||||
connectable = async_engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
async with connectable.connect() as connection:
|
||||
await connection.run_sync(do_run_migrations)
|
||||
|
||||
await connectable.dispose()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode."""
|
||||
asyncio.run(run_async_migrations())
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
26
services/pos/migrations/script.py.mako
Normal file
26
services/pos/migrations/script.py.mako
Normal file
@@ -0,0 +1,26 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
${downgrades if downgrades else "pass"}
|
||||
@@ -0,0 +1,438 @@
|
||||
"""initial_schema_20251015_1228
|
||||
|
||||
Revision ID: e9976ec9fe9e
|
||||
Revises:
|
||||
Create Date: 2025-10-15 12:28:31.849997+02:00
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = 'e9976ec9fe9e'
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('audit_logs',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('user_id', sa.UUID(), nullable=False),
|
||||
sa.Column('action', sa.String(length=100), nullable=False),
|
||||
sa.Column('resource_type', sa.String(length=100), nullable=False),
|
||||
sa.Column('resource_id', sa.String(length=255), nullable=True),
|
||||
sa.Column('severity', sa.String(length=20), nullable=False),
|
||||
sa.Column('service_name', sa.String(length=100), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('changes', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('audit_metadata', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('ip_address', sa.String(length=45), nullable=True),
|
||||
sa.Column('user_agent', sa.Text(), nullable=True),
|
||||
sa.Column('endpoint', sa.String(length=255), nullable=True),
|
||||
sa.Column('method', sa.String(length=10), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('idx_audit_resource_type_action', 'audit_logs', ['resource_type', 'action'], unique=False)
|
||||
op.create_index('idx_audit_service_created', 'audit_logs', ['service_name', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_severity_created', 'audit_logs', ['severity', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_tenant_created', 'audit_logs', ['tenant_id', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_user_created', 'audit_logs', ['user_id', 'created_at'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_action'), 'audit_logs', ['action'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_created_at'), 'audit_logs', ['created_at'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_resource_id'), 'audit_logs', ['resource_id'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_resource_type'), 'audit_logs', ['resource_type'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_service_name'), 'audit_logs', ['service_name'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_severity'), 'audit_logs', ['severity'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_tenant_id'), 'audit_logs', ['tenant_id'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_user_id'), 'audit_logs', ['user_id'], unique=False)
|
||||
op.create_table('pos_configurations',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('pos_system', sa.String(length=50), nullable=False),
|
||||
sa.Column('provider_name', sa.String(length=100), nullable=False),
|
||||
sa.Column('is_active', sa.Boolean(), nullable=False),
|
||||
sa.Column('is_connected', sa.Boolean(), nullable=False),
|
||||
sa.Column('encrypted_credentials', sa.Text(), nullable=True),
|
||||
sa.Column('webhook_url', sa.String(length=500), nullable=True),
|
||||
sa.Column('webhook_secret', sa.String(length=255), nullable=True),
|
||||
sa.Column('environment', sa.String(length=20), nullable=False),
|
||||
sa.Column('location_id', sa.String(length=100), nullable=True),
|
||||
sa.Column('merchant_id', sa.String(length=100), nullable=True),
|
||||
sa.Column('sync_enabled', sa.Boolean(), nullable=False),
|
||||
sa.Column('sync_interval_minutes', sa.String(length=10), nullable=False),
|
||||
sa.Column('auto_sync_products', sa.Boolean(), nullable=False),
|
||||
sa.Column('auto_sync_transactions', sa.Boolean(), nullable=False),
|
||||
sa.Column('last_sync_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('last_successful_sync_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('last_sync_status', sa.String(length=50), nullable=True),
|
||||
sa.Column('last_sync_message', sa.Text(), nullable=True),
|
||||
sa.Column('provider_settings', sa.JSON(), nullable=True),
|
||||
sa.Column('last_health_check_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('health_status', sa.String(length=50), nullable=False),
|
||||
sa.Column('health_message', sa.Text(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('created_by', sa.UUID(), nullable=True),
|
||||
sa.Column('notes', sa.Text(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('idx_pos_config_active', 'pos_configurations', ['is_active'], unique=False)
|
||||
op.create_index('idx_pos_config_connected', 'pos_configurations', ['is_connected'], unique=False)
|
||||
op.create_index('idx_pos_config_created_at', 'pos_configurations', ['created_at'], unique=False)
|
||||
op.create_index('idx_pos_config_health_status', 'pos_configurations', ['health_status'], unique=False)
|
||||
op.create_index('idx_pos_config_sync_enabled', 'pos_configurations', ['sync_enabled'], unique=False)
|
||||
op.create_index('idx_pos_config_tenant_pos_system', 'pos_configurations', ['tenant_id', 'pos_system'], unique=False)
|
||||
op.create_index(op.f('ix_pos_configurations_id'), 'pos_configurations', ['id'], unique=False)
|
||||
op.create_index(op.f('ix_pos_configurations_tenant_id'), 'pos_configurations', ['tenant_id'], unique=False)
|
||||
op.create_table('pos_sync_logs',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('pos_config_id', sa.UUID(), nullable=False),
|
||||
sa.Column('sync_type', sa.String(length=50), nullable=False),
|
||||
sa.Column('sync_direction', sa.String(length=20), nullable=False),
|
||||
sa.Column('data_type', sa.String(length=50), nullable=False),
|
||||
sa.Column('pos_system', sa.String(length=50), nullable=False),
|
||||
sa.Column('status', sa.String(length=50), nullable=False),
|
||||
sa.Column('started_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('completed_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('duration_seconds', sa.Numeric(precision=10, scale=3), nullable=True),
|
||||
sa.Column('sync_from_date', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('sync_to_date', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('records_requested', sa.Integer(), nullable=False),
|
||||
sa.Column('records_processed', sa.Integer(), nullable=False),
|
||||
sa.Column('records_created', sa.Integer(), nullable=False),
|
||||
sa.Column('records_updated', sa.Integer(), nullable=False),
|
||||
sa.Column('records_skipped', sa.Integer(), nullable=False),
|
||||
sa.Column('records_failed', sa.Integer(), nullable=False),
|
||||
sa.Column('api_calls_made', sa.Integer(), nullable=False),
|
||||
sa.Column('api_rate_limit_hits', sa.Integer(), nullable=False),
|
||||
sa.Column('total_api_time_ms', sa.Integer(), nullable=False),
|
||||
sa.Column('error_message', sa.Text(), nullable=True),
|
||||
sa.Column('error_code', sa.String(length=100), nullable=True),
|
||||
sa.Column('error_details', sa.JSON(), nullable=True),
|
||||
sa.Column('retry_attempt', sa.Integer(), nullable=False),
|
||||
sa.Column('max_retries', sa.Integer(), nullable=False),
|
||||
sa.Column('parent_sync_id', sa.UUID(), nullable=True),
|
||||
sa.Column('sync_configuration', sa.JSON(), nullable=True),
|
||||
sa.Column('current_page', sa.Integer(), nullable=True),
|
||||
sa.Column('total_pages', sa.Integer(), nullable=True),
|
||||
sa.Column('current_batch', sa.Integer(), nullable=True),
|
||||
sa.Column('total_batches', sa.Integer(), nullable=True),
|
||||
sa.Column('progress_percentage', sa.Numeric(precision=5, scale=2), nullable=True),
|
||||
sa.Column('validation_errors', sa.JSON(), nullable=True),
|
||||
sa.Column('data_quality_score', sa.Numeric(precision=5, scale=2), nullable=True),
|
||||
sa.Column('memory_usage_mb', sa.Numeric(precision=10, scale=2), nullable=True),
|
||||
sa.Column('cpu_usage_percentage', sa.Numeric(precision=5, scale=2), nullable=True),
|
||||
sa.Column('network_bytes_received', sa.Integer(), nullable=True),
|
||||
sa.Column('network_bytes_sent', sa.Integer(), nullable=True),
|
||||
sa.Column('revenue_synced', sa.Numeric(precision=12, scale=2), nullable=True),
|
||||
sa.Column('transactions_synced', sa.Integer(), nullable=False),
|
||||
sa.Column('triggered_by', sa.String(length=50), nullable=True),
|
||||
sa.Column('triggered_by_user_id', sa.UUID(), nullable=True),
|
||||
sa.Column('trigger_details', sa.JSON(), nullable=True),
|
||||
sa.Column('external_batch_id', sa.String(length=255), nullable=True),
|
||||
sa.Column('webhook_log_id', sa.UUID(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('notes', sa.Text(), nullable=True),
|
||||
sa.Column('tags', sa.JSON(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('idx_sync_log_completed', 'pos_sync_logs', ['completed_at'], unique=False)
|
||||
op.create_index('idx_sync_log_data_type', 'pos_sync_logs', ['data_type'], unique=False)
|
||||
op.create_index('idx_sync_log_duration', 'pos_sync_logs', ['duration_seconds'], unique=False)
|
||||
op.create_index('idx_sync_log_external_batch', 'pos_sync_logs', ['external_batch_id'], unique=False)
|
||||
op.create_index('idx_sync_log_parent', 'pos_sync_logs', ['parent_sync_id'], unique=False)
|
||||
op.create_index('idx_sync_log_pos_system_type', 'pos_sync_logs', ['pos_system', 'sync_type'], unique=False)
|
||||
op.create_index('idx_sync_log_retry', 'pos_sync_logs', ['retry_attempt'], unique=False)
|
||||
op.create_index('idx_sync_log_status', 'pos_sync_logs', ['status'], unique=False)
|
||||
op.create_index('idx_sync_log_tenant_started', 'pos_sync_logs', ['tenant_id', 'started_at'], unique=False)
|
||||
op.create_index('idx_sync_log_trigger', 'pos_sync_logs', ['triggered_by'], unique=False)
|
||||
op.create_index('idx_sync_log_webhook', 'pos_sync_logs', ['webhook_log_id'], unique=False)
|
||||
op.create_index(op.f('ix_pos_sync_logs_data_type'), 'pos_sync_logs', ['data_type'], unique=False)
|
||||
op.create_index(op.f('ix_pos_sync_logs_id'), 'pos_sync_logs', ['id'], unique=False)
|
||||
op.create_index(op.f('ix_pos_sync_logs_pos_config_id'), 'pos_sync_logs', ['pos_config_id'], unique=False)
|
||||
op.create_index(op.f('ix_pos_sync_logs_pos_system'), 'pos_sync_logs', ['pos_system'], unique=False)
|
||||
op.create_index(op.f('ix_pos_sync_logs_started_at'), 'pos_sync_logs', ['started_at'], unique=False)
|
||||
op.create_index(op.f('ix_pos_sync_logs_status'), 'pos_sync_logs', ['status'], unique=False)
|
||||
op.create_index(op.f('ix_pos_sync_logs_sync_type'), 'pos_sync_logs', ['sync_type'], unique=False)
|
||||
op.create_index(op.f('ix_pos_sync_logs_tenant_id'), 'pos_sync_logs', ['tenant_id'], unique=False)
|
||||
op.create_table('pos_webhook_logs',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=True),
|
||||
sa.Column('pos_system', sa.String(length=50), nullable=False),
|
||||
sa.Column('webhook_type', sa.String(length=100), nullable=False),
|
||||
sa.Column('method', sa.String(length=10), nullable=False),
|
||||
sa.Column('url_path', sa.String(length=500), nullable=False),
|
||||
sa.Column('query_params', sa.JSON(), nullable=True),
|
||||
sa.Column('headers', sa.JSON(), nullable=True),
|
||||
sa.Column('raw_payload', sa.Text(), nullable=False),
|
||||
sa.Column('payload_size', sa.Integer(), nullable=False),
|
||||
sa.Column('content_type', sa.String(length=100), nullable=True),
|
||||
sa.Column('signature', sa.String(length=500), nullable=True),
|
||||
sa.Column('is_signature_valid', sa.Boolean(), nullable=True),
|
||||
sa.Column('source_ip', sa.String(length=45), nullable=True),
|
||||
sa.Column('status', sa.String(length=50), nullable=False),
|
||||
sa.Column('processing_started_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('processing_completed_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('processing_duration_ms', sa.Integer(), nullable=True),
|
||||
sa.Column('error_message', sa.Text(), nullable=True),
|
||||
sa.Column('error_code', sa.String(length=50), nullable=True),
|
||||
sa.Column('retry_count', sa.Integer(), nullable=False),
|
||||
sa.Column('max_retries', sa.Integer(), nullable=False),
|
||||
sa.Column('response_status_code', sa.Integer(), nullable=True),
|
||||
sa.Column('response_body', sa.Text(), nullable=True),
|
||||
sa.Column('response_sent_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('event_id', sa.String(length=255), nullable=True),
|
||||
sa.Column('event_timestamp', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('sequence_number', sa.Integer(), nullable=True),
|
||||
sa.Column('transaction_id', sa.String(length=255), nullable=True),
|
||||
sa.Column('order_id', sa.String(length=255), nullable=True),
|
||||
sa.Column('customer_id', sa.String(length=255), nullable=True),
|
||||
sa.Column('created_transaction_id', sa.UUID(), nullable=True),
|
||||
sa.Column('updated_transaction_id', sa.UUID(), nullable=True),
|
||||
sa.Column('is_duplicate', sa.Boolean(), nullable=False),
|
||||
sa.Column('duplicate_of', sa.UUID(), nullable=True),
|
||||
sa.Column('priority', sa.String(length=20), nullable=False),
|
||||
sa.Column('user_agent', sa.String(length=500), nullable=True),
|
||||
sa.Column('forwarded_for', sa.String(length=200), nullable=True),
|
||||
sa.Column('request_id', sa.String(length=100), nullable=True),
|
||||
sa.Column('received_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('idx_webhook_duplicate', 'pos_webhook_logs', ['is_duplicate'], unique=False)
|
||||
op.create_index('idx_webhook_event_id', 'pos_webhook_logs', ['event_id'], unique=False)
|
||||
op.create_index('idx_webhook_order_id', 'pos_webhook_logs', ['order_id'], unique=False)
|
||||
op.create_index('idx_webhook_pos_system_type', 'pos_webhook_logs', ['pos_system', 'webhook_type'], unique=False)
|
||||
op.create_index('idx_webhook_priority', 'pos_webhook_logs', ['priority'], unique=False)
|
||||
op.create_index('idx_webhook_received_at', 'pos_webhook_logs', ['received_at'], unique=False)
|
||||
op.create_index('idx_webhook_retry', 'pos_webhook_logs', ['retry_count'], unique=False)
|
||||
op.create_index('idx_webhook_signature_valid', 'pos_webhook_logs', ['is_signature_valid'], unique=False)
|
||||
op.create_index('idx_webhook_status', 'pos_webhook_logs', ['status'], unique=False)
|
||||
op.create_index('idx_webhook_tenant_received', 'pos_webhook_logs', ['tenant_id', 'received_at'], unique=False)
|
||||
op.create_index('idx_webhook_transaction_id', 'pos_webhook_logs', ['transaction_id'], unique=False)
|
||||
op.create_index(op.f('ix_pos_webhook_logs_event_id'), 'pos_webhook_logs', ['event_id'], unique=False)
|
||||
op.create_index(op.f('ix_pos_webhook_logs_id'), 'pos_webhook_logs', ['id'], unique=False)
|
||||
op.create_index(op.f('ix_pos_webhook_logs_is_duplicate'), 'pos_webhook_logs', ['is_duplicate'], unique=False)
|
||||
op.create_index(op.f('ix_pos_webhook_logs_order_id'), 'pos_webhook_logs', ['order_id'], unique=False)
|
||||
op.create_index(op.f('ix_pos_webhook_logs_pos_system'), 'pos_webhook_logs', ['pos_system'], unique=False)
|
||||
op.create_index(op.f('ix_pos_webhook_logs_received_at'), 'pos_webhook_logs', ['received_at'], unique=False)
|
||||
op.create_index(op.f('ix_pos_webhook_logs_status'), 'pos_webhook_logs', ['status'], unique=False)
|
||||
op.create_index(op.f('ix_pos_webhook_logs_tenant_id'), 'pos_webhook_logs', ['tenant_id'], unique=False)
|
||||
op.create_index(op.f('ix_pos_webhook_logs_transaction_id'), 'pos_webhook_logs', ['transaction_id'], unique=False)
|
||||
op.create_index(op.f('ix_pos_webhook_logs_webhook_type'), 'pos_webhook_logs', ['webhook_type'], unique=False)
|
||||
op.create_table('pos_transactions',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('pos_config_id', sa.UUID(), nullable=False),
|
||||
sa.Column('pos_system', sa.String(length=50), nullable=False),
|
||||
sa.Column('external_transaction_id', sa.String(length=255), nullable=False),
|
||||
sa.Column('external_order_id', sa.String(length=255), nullable=True),
|
||||
sa.Column('transaction_type', sa.String(length=50), nullable=False),
|
||||
sa.Column('status', sa.String(length=50), nullable=False),
|
||||
sa.Column('subtotal', sa.Numeric(precision=10, scale=2), nullable=False),
|
||||
sa.Column('tax_amount', sa.Numeric(precision=10, scale=2), nullable=False),
|
||||
sa.Column('tip_amount', sa.Numeric(precision=10, scale=2), nullable=False),
|
||||
sa.Column('discount_amount', sa.Numeric(precision=10, scale=2), nullable=False),
|
||||
sa.Column('total_amount', sa.Numeric(precision=10, scale=2), nullable=False),
|
||||
sa.Column('currency', sa.String(length=3), nullable=False),
|
||||
sa.Column('payment_method', sa.String(length=50), nullable=True),
|
||||
sa.Column('payment_status', sa.String(length=50), nullable=True),
|
||||
sa.Column('transaction_date', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('pos_created_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('pos_updated_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('location_id', sa.String(length=100), nullable=True),
|
||||
sa.Column('location_name', sa.String(length=255), nullable=True),
|
||||
sa.Column('staff_id', sa.String(length=100), nullable=True),
|
||||
sa.Column('staff_name', sa.String(length=255), nullable=True),
|
||||
sa.Column('customer_id', sa.String(length=100), nullable=True),
|
||||
sa.Column('customer_email', sa.String(length=255), nullable=True),
|
||||
sa.Column('customer_phone', sa.String(length=50), nullable=True),
|
||||
sa.Column('order_type', sa.String(length=50), nullable=True),
|
||||
sa.Column('table_number', sa.String(length=20), nullable=True),
|
||||
sa.Column('receipt_number', sa.String(length=100), nullable=True),
|
||||
sa.Column('is_synced_to_sales', sa.Boolean(), nullable=False),
|
||||
sa.Column('sales_record_id', sa.UUID(), nullable=True),
|
||||
sa.Column('sync_attempted_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('sync_completed_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('sync_error', sa.Text(), nullable=True),
|
||||
sa.Column('sync_retry_count', sa.Integer(), nullable=False),
|
||||
sa.Column('raw_data', sa.JSON(), nullable=True),
|
||||
sa.Column('is_processed', sa.Boolean(), nullable=False),
|
||||
sa.Column('processing_error', sa.Text(), nullable=True),
|
||||
sa.Column('is_duplicate', sa.Boolean(), nullable=False),
|
||||
sa.Column('duplicate_of', sa.UUID(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.ForeignKeyConstraint(['pos_config_id'], ['pos_configurations.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('idx_pos_transaction_customer', 'pos_transactions', ['customer_id'], unique=False)
|
||||
op.create_index('idx_pos_transaction_duplicate', 'pos_transactions', ['is_duplicate'], unique=False)
|
||||
op.create_index('idx_pos_transaction_external_id', 'pos_transactions', ['pos_system', 'external_transaction_id'], unique=False)
|
||||
op.create_index('idx_pos_transaction_location', 'pos_transactions', ['location_id'], unique=False)
|
||||
op.create_index('idx_pos_transaction_processed', 'pos_transactions', ['is_processed'], unique=False)
|
||||
op.create_index('idx_pos_transaction_status', 'pos_transactions', ['status'], unique=False)
|
||||
op.create_index('idx_pos_transaction_sync_status', 'pos_transactions', ['is_synced_to_sales'], unique=False)
|
||||
op.create_index('idx_pos_transaction_tenant_date', 'pos_transactions', ['tenant_id', 'transaction_date'], unique=False)
|
||||
op.create_index('idx_pos_transaction_type', 'pos_transactions', ['transaction_type'], unique=False)
|
||||
op.create_index(op.f('ix_pos_transactions_external_order_id'), 'pos_transactions', ['external_order_id'], unique=False)
|
||||
op.create_index(op.f('ix_pos_transactions_external_transaction_id'), 'pos_transactions', ['external_transaction_id'], unique=False)
|
||||
op.create_index(op.f('ix_pos_transactions_id'), 'pos_transactions', ['id'], unique=False)
|
||||
op.create_index(op.f('ix_pos_transactions_is_synced_to_sales'), 'pos_transactions', ['is_synced_to_sales'], unique=False)
|
||||
op.create_index(op.f('ix_pos_transactions_pos_config_id'), 'pos_transactions', ['pos_config_id'], unique=False)
|
||||
op.create_index(op.f('ix_pos_transactions_pos_system'), 'pos_transactions', ['pos_system'], unique=False)
|
||||
op.create_index(op.f('ix_pos_transactions_sales_record_id'), 'pos_transactions', ['sales_record_id'], unique=False)
|
||||
op.create_index(op.f('ix_pos_transactions_tenant_id'), 'pos_transactions', ['tenant_id'], unique=False)
|
||||
op.create_index(op.f('ix_pos_transactions_transaction_date'), 'pos_transactions', ['transaction_date'], unique=False)
|
||||
op.create_table('pos_transaction_items',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('transaction_id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('external_item_id', sa.String(length=255), nullable=True),
|
||||
sa.Column('sku', sa.String(length=100), nullable=True),
|
||||
sa.Column('product_name', sa.String(length=255), nullable=False),
|
||||
sa.Column('product_category', sa.String(length=100), nullable=True),
|
||||
sa.Column('product_subcategory', sa.String(length=100), nullable=True),
|
||||
sa.Column('quantity', sa.Numeric(precision=10, scale=3), nullable=False),
|
||||
sa.Column('unit_price', sa.Numeric(precision=10, scale=2), nullable=False),
|
||||
sa.Column('total_price', sa.Numeric(precision=10, scale=2), nullable=False),
|
||||
sa.Column('discount_amount', sa.Numeric(precision=10, scale=2), nullable=False),
|
||||
sa.Column('tax_amount', sa.Numeric(precision=10, scale=2), nullable=False),
|
||||
sa.Column('modifiers', sa.JSON(), nullable=True),
|
||||
sa.Column('inventory_product_id', sa.UUID(), nullable=True),
|
||||
sa.Column('is_mapped_to_inventory', sa.Boolean(), nullable=False),
|
||||
sa.Column('is_synced_to_sales', sa.Boolean(), nullable=False),
|
||||
sa.Column('sync_error', sa.Text(), nullable=True),
|
||||
sa.Column('raw_data', sa.JSON(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.ForeignKeyConstraint(['transaction_id'], ['pos_transactions.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('idx_pos_item_category', 'pos_transaction_items', ['product_category'], unique=False)
|
||||
op.create_index('idx_pos_item_inventory', 'pos_transaction_items', ['inventory_product_id'], unique=False)
|
||||
op.create_index('idx_pos_item_mapped', 'pos_transaction_items', ['is_mapped_to_inventory'], unique=False)
|
||||
op.create_index('idx_pos_item_product', 'pos_transaction_items', ['product_name'], unique=False)
|
||||
op.create_index('idx_pos_item_sku', 'pos_transaction_items', ['sku'], unique=False)
|
||||
op.create_index('idx_pos_item_sync', 'pos_transaction_items', ['is_synced_to_sales'], unique=False)
|
||||
op.create_index('idx_pos_item_transaction', 'pos_transaction_items', ['transaction_id'], unique=False)
|
||||
op.create_index(op.f('ix_pos_transaction_items_id'), 'pos_transaction_items', ['id'], unique=False)
|
||||
op.create_index(op.f('ix_pos_transaction_items_inventory_product_id'), 'pos_transaction_items', ['inventory_product_id'], unique=False)
|
||||
op.create_index(op.f('ix_pos_transaction_items_product_category'), 'pos_transaction_items', ['product_category'], unique=False)
|
||||
op.create_index(op.f('ix_pos_transaction_items_sku'), 'pos_transaction_items', ['sku'], unique=False)
|
||||
op.create_index(op.f('ix_pos_transaction_items_tenant_id'), 'pos_transaction_items', ['tenant_id'], unique=False)
|
||||
op.create_index(op.f('ix_pos_transaction_items_transaction_id'), 'pos_transaction_items', ['transaction_id'], unique=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index(op.f('ix_pos_transaction_items_transaction_id'), table_name='pos_transaction_items')
|
||||
op.drop_index(op.f('ix_pos_transaction_items_tenant_id'), table_name='pos_transaction_items')
|
||||
op.drop_index(op.f('ix_pos_transaction_items_sku'), table_name='pos_transaction_items')
|
||||
op.drop_index(op.f('ix_pos_transaction_items_product_category'), table_name='pos_transaction_items')
|
||||
op.drop_index(op.f('ix_pos_transaction_items_inventory_product_id'), table_name='pos_transaction_items')
|
||||
op.drop_index(op.f('ix_pos_transaction_items_id'), table_name='pos_transaction_items')
|
||||
op.drop_index('idx_pos_item_transaction', table_name='pos_transaction_items')
|
||||
op.drop_index('idx_pos_item_sync', table_name='pos_transaction_items')
|
||||
op.drop_index('idx_pos_item_sku', table_name='pos_transaction_items')
|
||||
op.drop_index('idx_pos_item_product', table_name='pos_transaction_items')
|
||||
op.drop_index('idx_pos_item_mapped', table_name='pos_transaction_items')
|
||||
op.drop_index('idx_pos_item_inventory', table_name='pos_transaction_items')
|
||||
op.drop_index('idx_pos_item_category', table_name='pos_transaction_items')
|
||||
op.drop_table('pos_transaction_items')
|
||||
op.drop_index(op.f('ix_pos_transactions_transaction_date'), table_name='pos_transactions')
|
||||
op.drop_index(op.f('ix_pos_transactions_tenant_id'), table_name='pos_transactions')
|
||||
op.drop_index(op.f('ix_pos_transactions_sales_record_id'), table_name='pos_transactions')
|
||||
op.drop_index(op.f('ix_pos_transactions_pos_system'), table_name='pos_transactions')
|
||||
op.drop_index(op.f('ix_pos_transactions_pos_config_id'), table_name='pos_transactions')
|
||||
op.drop_index(op.f('ix_pos_transactions_is_synced_to_sales'), table_name='pos_transactions')
|
||||
op.drop_index(op.f('ix_pos_transactions_id'), table_name='pos_transactions')
|
||||
op.drop_index(op.f('ix_pos_transactions_external_transaction_id'), table_name='pos_transactions')
|
||||
op.drop_index(op.f('ix_pos_transactions_external_order_id'), table_name='pos_transactions')
|
||||
op.drop_index('idx_pos_transaction_type', table_name='pos_transactions')
|
||||
op.drop_index('idx_pos_transaction_tenant_date', table_name='pos_transactions')
|
||||
op.drop_index('idx_pos_transaction_sync_status', table_name='pos_transactions')
|
||||
op.drop_index('idx_pos_transaction_status', table_name='pos_transactions')
|
||||
op.drop_index('idx_pos_transaction_processed', table_name='pos_transactions')
|
||||
op.drop_index('idx_pos_transaction_location', table_name='pos_transactions')
|
||||
op.drop_index('idx_pos_transaction_external_id', table_name='pos_transactions')
|
||||
op.drop_index('idx_pos_transaction_duplicate', table_name='pos_transactions')
|
||||
op.drop_index('idx_pos_transaction_customer', table_name='pos_transactions')
|
||||
op.drop_table('pos_transactions')
|
||||
op.drop_index(op.f('ix_pos_webhook_logs_webhook_type'), table_name='pos_webhook_logs')
|
||||
op.drop_index(op.f('ix_pos_webhook_logs_transaction_id'), table_name='pos_webhook_logs')
|
||||
op.drop_index(op.f('ix_pos_webhook_logs_tenant_id'), table_name='pos_webhook_logs')
|
||||
op.drop_index(op.f('ix_pos_webhook_logs_status'), table_name='pos_webhook_logs')
|
||||
op.drop_index(op.f('ix_pos_webhook_logs_received_at'), table_name='pos_webhook_logs')
|
||||
op.drop_index(op.f('ix_pos_webhook_logs_pos_system'), table_name='pos_webhook_logs')
|
||||
op.drop_index(op.f('ix_pos_webhook_logs_order_id'), table_name='pos_webhook_logs')
|
||||
op.drop_index(op.f('ix_pos_webhook_logs_is_duplicate'), table_name='pos_webhook_logs')
|
||||
op.drop_index(op.f('ix_pos_webhook_logs_id'), table_name='pos_webhook_logs')
|
||||
op.drop_index(op.f('ix_pos_webhook_logs_event_id'), table_name='pos_webhook_logs')
|
||||
op.drop_index('idx_webhook_transaction_id', table_name='pos_webhook_logs')
|
||||
op.drop_index('idx_webhook_tenant_received', table_name='pos_webhook_logs')
|
||||
op.drop_index('idx_webhook_status', table_name='pos_webhook_logs')
|
||||
op.drop_index('idx_webhook_signature_valid', table_name='pos_webhook_logs')
|
||||
op.drop_index('idx_webhook_retry', table_name='pos_webhook_logs')
|
||||
op.drop_index('idx_webhook_received_at', table_name='pos_webhook_logs')
|
||||
op.drop_index('idx_webhook_priority', table_name='pos_webhook_logs')
|
||||
op.drop_index('idx_webhook_pos_system_type', table_name='pos_webhook_logs')
|
||||
op.drop_index('idx_webhook_order_id', table_name='pos_webhook_logs')
|
||||
op.drop_index('idx_webhook_event_id', table_name='pos_webhook_logs')
|
||||
op.drop_index('idx_webhook_duplicate', table_name='pos_webhook_logs')
|
||||
op.drop_table('pos_webhook_logs')
|
||||
op.drop_index(op.f('ix_pos_sync_logs_tenant_id'), table_name='pos_sync_logs')
|
||||
op.drop_index(op.f('ix_pos_sync_logs_sync_type'), table_name='pos_sync_logs')
|
||||
op.drop_index(op.f('ix_pos_sync_logs_status'), table_name='pos_sync_logs')
|
||||
op.drop_index(op.f('ix_pos_sync_logs_started_at'), table_name='pos_sync_logs')
|
||||
op.drop_index(op.f('ix_pos_sync_logs_pos_system'), table_name='pos_sync_logs')
|
||||
op.drop_index(op.f('ix_pos_sync_logs_pos_config_id'), table_name='pos_sync_logs')
|
||||
op.drop_index(op.f('ix_pos_sync_logs_id'), table_name='pos_sync_logs')
|
||||
op.drop_index(op.f('ix_pos_sync_logs_data_type'), table_name='pos_sync_logs')
|
||||
op.drop_index('idx_sync_log_webhook', table_name='pos_sync_logs')
|
||||
op.drop_index('idx_sync_log_trigger', table_name='pos_sync_logs')
|
||||
op.drop_index('idx_sync_log_tenant_started', table_name='pos_sync_logs')
|
||||
op.drop_index('idx_sync_log_status', table_name='pos_sync_logs')
|
||||
op.drop_index('idx_sync_log_retry', table_name='pos_sync_logs')
|
||||
op.drop_index('idx_sync_log_pos_system_type', table_name='pos_sync_logs')
|
||||
op.drop_index('idx_sync_log_parent', table_name='pos_sync_logs')
|
||||
op.drop_index('idx_sync_log_external_batch', table_name='pos_sync_logs')
|
||||
op.drop_index('idx_sync_log_duration', table_name='pos_sync_logs')
|
||||
op.drop_index('idx_sync_log_data_type', table_name='pos_sync_logs')
|
||||
op.drop_index('idx_sync_log_completed', table_name='pos_sync_logs')
|
||||
op.drop_table('pos_sync_logs')
|
||||
op.drop_index(op.f('ix_pos_configurations_tenant_id'), table_name='pos_configurations')
|
||||
op.drop_index(op.f('ix_pos_configurations_id'), table_name='pos_configurations')
|
||||
op.drop_index('idx_pos_config_tenant_pos_system', table_name='pos_configurations')
|
||||
op.drop_index('idx_pos_config_sync_enabled', table_name='pos_configurations')
|
||||
op.drop_index('idx_pos_config_health_status', table_name='pos_configurations')
|
||||
op.drop_index('idx_pos_config_created_at', table_name='pos_configurations')
|
||||
op.drop_index('idx_pos_config_connected', table_name='pos_configurations')
|
||||
op.drop_index('idx_pos_config_active', table_name='pos_configurations')
|
||||
op.drop_table('pos_configurations')
|
||||
op.drop_index(op.f('ix_audit_logs_user_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_tenant_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_severity'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_service_name'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_resource_type'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_resource_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_created_at'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_action'), table_name='audit_logs')
|
||||
op.drop_index('idx_audit_user_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_tenant_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_severity_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_service_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_resource_type_action', table_name='audit_logs')
|
||||
op.drop_table('audit_logs')
|
||||
# ### end Alembic commands ###
|
||||
22
services/pos/requirements.txt
Normal file
22
services/pos/requirements.txt
Normal file
@@ -0,0 +1,22 @@
|
||||
fastapi==0.119.0
|
||||
uvicorn[standard]==0.32.1
|
||||
pydantic==2.12.3
|
||||
pydantic-settings==2.7.1
|
||||
sqlalchemy==2.0.44
|
||||
asyncpg==0.30.0
|
||||
alembic==1.17.0
|
||||
structlog==25.4.0
|
||||
aiohttp==3.11.10
|
||||
redis==6.4.0
|
||||
celery==5.4.0
|
||||
cryptography==44.0.0
|
||||
python-jose[cryptography]==3.3.0
|
||||
httpx==0.28.1
|
||||
websockets==14.1
|
||||
psutil==5.9.8
|
||||
python-multipart==0.0.6
|
||||
aio-pika==9.4.3
|
||||
email-validator==2.2.0
|
||||
psycopg2-binary==2.9.10
|
||||
pytz==2024.2
|
||||
apscheduler==3.10.4
|
||||
Reference in New Issue
Block a user