REFACTOR - Database logic
This commit is contained in:
@@ -0,0 +1,8 @@
|
||||
"""
|
||||
Notification API Package
|
||||
API endpoints for notification management
|
||||
"""
|
||||
|
||||
from . import notifications
|
||||
|
||||
__all__ = ["notifications"]
|
||||
File diff suppressed because it is too large
Load Diff
@@ -70,8 +70,9 @@ async def lifespan(app: FastAPI):
|
||||
async def check_database():
|
||||
try:
|
||||
from app.core.database import get_db
|
||||
from sqlalchemy import text
|
||||
async for db in get_db():
|
||||
await db.execute("SELECT 1")
|
||||
await db.execute(text("SELECT 1"))
|
||||
return True
|
||||
except Exception as e:
|
||||
return f"Database error: {e}"
|
||||
|
||||
18
services/notification/app/repositories/__init__.py
Normal file
18
services/notification/app/repositories/__init__.py
Normal file
@@ -0,0 +1,18 @@
|
||||
"""
|
||||
Notification Service Repositories
|
||||
Repository implementations for notification service
|
||||
"""
|
||||
|
||||
from .base import NotificationBaseRepository
|
||||
from .notification_repository import NotificationRepository
|
||||
from .template_repository import TemplateRepository
|
||||
from .preference_repository import PreferenceRepository
|
||||
from .log_repository import LogRepository
|
||||
|
||||
__all__ = [
|
||||
"NotificationBaseRepository",
|
||||
"NotificationRepository",
|
||||
"TemplateRepository",
|
||||
"PreferenceRepository",
|
||||
"LogRepository"
|
||||
]
|
||||
259
services/notification/app/repositories/base.py
Normal file
259
services/notification/app/repositories/base.py
Normal file
@@ -0,0 +1,259 @@
|
||||
"""
|
||||
Base Repository for Notification Service
|
||||
Service-specific repository base class with notification utilities
|
||||
"""
|
||||
|
||||
from typing import Optional, List, Dict, Any, Type
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import text, and_
|
||||
from datetime import datetime, timedelta
|
||||
import structlog
|
||||
|
||||
from shared.database.repository import BaseRepository
|
||||
from shared.database.exceptions import DatabaseError
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class NotificationBaseRepository(BaseRepository):
|
||||
"""Base repository for notification service with common notification operations"""
|
||||
|
||||
def __init__(self, model: Type, session: AsyncSession, cache_ttl: Optional[int] = 300):
|
||||
# Notifications change frequently, shorter cache time (5 minutes)
|
||||
super().__init__(model, session, cache_ttl)
|
||||
|
||||
async def get_by_tenant_id(self, tenant_id: str, skip: int = 0, limit: int = 100) -> List:
|
||||
"""Get records by tenant ID"""
|
||||
if hasattr(self.model, 'tenant_id'):
|
||||
return await self.get_multi(
|
||||
skip=skip,
|
||||
limit=limit,
|
||||
filters={"tenant_id": tenant_id},
|
||||
order_by="created_at",
|
||||
order_desc=True
|
||||
)
|
||||
return await self.get_multi(skip=skip, limit=limit)
|
||||
|
||||
async def get_by_user_id(self, user_id: str, skip: int = 0, limit: int = 100) -> List:
|
||||
"""Get records by user ID (recipient or sender)"""
|
||||
filters = {}
|
||||
|
||||
if hasattr(self.model, 'recipient_id'):
|
||||
filters["recipient_id"] = user_id
|
||||
elif hasattr(self.model, 'sender_id'):
|
||||
filters["sender_id"] = user_id
|
||||
elif hasattr(self.model, 'user_id'):
|
||||
filters["user_id"] = user_id
|
||||
|
||||
if filters:
|
||||
return await self.get_multi(
|
||||
skip=skip,
|
||||
limit=limit,
|
||||
filters=filters,
|
||||
order_by="created_at",
|
||||
order_desc=True
|
||||
)
|
||||
return []
|
||||
|
||||
async def get_by_status(self, status: str, skip: int = 0, limit: int = 100) -> List:
|
||||
"""Get records by status"""
|
||||
if hasattr(self.model, 'status'):
|
||||
return await self.get_multi(
|
||||
skip=skip,
|
||||
limit=limit,
|
||||
filters={"status": status},
|
||||
order_by="created_at",
|
||||
order_desc=True
|
||||
)
|
||||
return await self.get_multi(skip=skip, limit=limit)
|
||||
|
||||
async def get_active_records(self, skip: int = 0, limit: int = 100) -> List:
|
||||
"""Get active records (if model has is_active field)"""
|
||||
if hasattr(self.model, 'is_active'):
|
||||
return await self.get_multi(
|
||||
skip=skip,
|
||||
limit=limit,
|
||||
filters={"is_active": True},
|
||||
order_by="created_at",
|
||||
order_desc=True
|
||||
)
|
||||
return await self.get_multi(skip=skip, limit=limit)
|
||||
|
||||
async def get_recent_records(self, hours: int = 24, skip: int = 0, limit: int = 100) -> List:
|
||||
"""Get records created in the last N hours"""
|
||||
try:
|
||||
cutoff_time = datetime.utcnow() - timedelta(hours=hours)
|
||||
table_name = self.model.__tablename__
|
||||
|
||||
query_text = f"""
|
||||
SELECT * FROM {table_name}
|
||||
WHERE created_at >= :cutoff_time
|
||||
ORDER BY created_at DESC
|
||||
LIMIT :limit OFFSET :skip
|
||||
"""
|
||||
|
||||
result = await self.session.execute(text(query_text), {
|
||||
"cutoff_time": cutoff_time,
|
||||
"limit": limit,
|
||||
"skip": skip
|
||||
})
|
||||
|
||||
records = []
|
||||
for row in result.fetchall():
|
||||
record_dict = dict(row._mapping)
|
||||
record = self.model(**record_dict)
|
||||
records.append(record)
|
||||
|
||||
return records
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get recent records",
|
||||
model=self.model.__name__,
|
||||
hours=hours,
|
||||
error=str(e))
|
||||
return []
|
||||
|
||||
async def cleanup_old_records(self, days_old: int = 90) -> int:
|
||||
"""Clean up old notification records (90 days by default)"""
|
||||
try:
|
||||
cutoff_date = datetime.utcnow() - timedelta(days=days_old)
|
||||
table_name = self.model.__tablename__
|
||||
|
||||
# Only delete successfully processed or cancelled records that are old
|
||||
conditions = [
|
||||
"created_at < :cutoff_date"
|
||||
]
|
||||
|
||||
# Add status condition if model has status field
|
||||
if hasattr(self.model, 'status'):
|
||||
conditions.append("status IN ('delivered', 'cancelled', 'failed')")
|
||||
|
||||
query_text = f"""
|
||||
DELETE FROM {table_name}
|
||||
WHERE {' AND '.join(conditions)}
|
||||
"""
|
||||
|
||||
result = await self.session.execute(text(query_text), {"cutoff_date": cutoff_date})
|
||||
deleted_count = result.rowcount
|
||||
|
||||
logger.info(f"Cleaned up old {self.model.__name__} records",
|
||||
deleted_count=deleted_count,
|
||||
days_old=days_old)
|
||||
|
||||
return deleted_count
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to cleanup old records",
|
||||
model=self.model.__name__,
|
||||
error=str(e))
|
||||
raise DatabaseError(f"Cleanup failed: {str(e)}")
|
||||
|
||||
async def get_statistics_by_tenant(self, tenant_id: str) -> Dict[str, Any]:
|
||||
"""Get statistics for a tenant"""
|
||||
try:
|
||||
table_name = self.model.__tablename__
|
||||
|
||||
# Get basic counts
|
||||
total_records = await self.count(filters={"tenant_id": tenant_id})
|
||||
|
||||
# Get recent activity (records in last 24 hours)
|
||||
twenty_four_hours_ago = datetime.utcnow() - timedelta(hours=24)
|
||||
recent_query = text(f"""
|
||||
SELECT COUNT(*) as count
|
||||
FROM {table_name}
|
||||
WHERE tenant_id = :tenant_id
|
||||
AND created_at >= :twenty_four_hours_ago
|
||||
""")
|
||||
|
||||
result = await self.session.execute(recent_query, {
|
||||
"tenant_id": tenant_id,
|
||||
"twenty_four_hours_ago": twenty_four_hours_ago
|
||||
})
|
||||
recent_records = result.scalar() or 0
|
||||
|
||||
# Get status breakdown if applicable
|
||||
status_breakdown = {}
|
||||
if hasattr(self.model, 'status'):
|
||||
status_query = text(f"""
|
||||
SELECT status, COUNT(*) as count
|
||||
FROM {table_name}
|
||||
WHERE tenant_id = :tenant_id
|
||||
GROUP BY status
|
||||
""")
|
||||
|
||||
result = await self.session.execute(status_query, {"tenant_id": tenant_id})
|
||||
status_breakdown = {row.status: row.count for row in result.fetchall()}
|
||||
|
||||
return {
|
||||
"total_records": total_records,
|
||||
"recent_records_24h": recent_records,
|
||||
"status_breakdown": status_breakdown
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get tenant statistics",
|
||||
model=self.model.__name__,
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
return {
|
||||
"total_records": 0,
|
||||
"recent_records_24h": 0,
|
||||
"status_breakdown": {}
|
||||
}
|
||||
|
||||
def _validate_notification_data(self, data: Dict[str, Any], required_fields: List[str]) -> Dict[str, Any]:
|
||||
"""Validate notification-related data"""
|
||||
errors = []
|
||||
|
||||
for field in required_fields:
|
||||
if field not in data or not data[field]:
|
||||
errors.append(f"Missing required field: {field}")
|
||||
|
||||
# Validate tenant_id format if present
|
||||
if "tenant_id" in data and data["tenant_id"]:
|
||||
tenant_id = data["tenant_id"]
|
||||
if not isinstance(tenant_id, str) or len(tenant_id) < 1:
|
||||
errors.append("Invalid tenant_id format")
|
||||
|
||||
# Validate user IDs if present
|
||||
user_fields = ["user_id", "recipient_id", "sender_id"]
|
||||
for field in user_fields:
|
||||
if field in data and data[field]:
|
||||
user_id = data[field]
|
||||
if not isinstance(user_id, str) or len(user_id) < 1:
|
||||
errors.append(f"Invalid {field} format")
|
||||
|
||||
# Validate email format if present
|
||||
if "recipient_email" in data and data["recipient_email"]:
|
||||
email = data["recipient_email"]
|
||||
if "@" not in email or "." not in email.split("@")[-1]:
|
||||
errors.append("Invalid email format")
|
||||
|
||||
# Validate phone format if present
|
||||
if "recipient_phone" in data and data["recipient_phone"]:
|
||||
phone = data["recipient_phone"]
|
||||
if not isinstance(phone, str) or len(phone) < 9:
|
||||
errors.append("Invalid phone format")
|
||||
|
||||
# Validate priority if present
|
||||
if "priority" in data and data["priority"]:
|
||||
valid_priorities = ["low", "normal", "high", "urgent"]
|
||||
if data["priority"] not in valid_priorities:
|
||||
errors.append(f"Invalid priority. Must be one of: {valid_priorities}")
|
||||
|
||||
# Validate notification type if present
|
||||
if "type" in data and data["type"]:
|
||||
valid_types = ["email", "whatsapp", "push", "sms"]
|
||||
if data["type"] not in valid_types:
|
||||
errors.append(f"Invalid notification type. Must be one of: {valid_types}")
|
||||
|
||||
# Validate status if present
|
||||
if "status" in data and data["status"]:
|
||||
valid_statuses = ["pending", "sent", "delivered", "failed", "cancelled"]
|
||||
if data["status"] not in valid_statuses:
|
||||
errors.append(f"Invalid status. Must be one of: {valid_statuses}")
|
||||
|
||||
return {
|
||||
"is_valid": len(errors) == 0,
|
||||
"errors": errors
|
||||
}
|
||||
470
services/notification/app/repositories/log_repository.py
Normal file
470
services/notification/app/repositories/log_repository.py
Normal file
@@ -0,0 +1,470 @@
|
||||
"""
|
||||
Log Repository
|
||||
Repository for notification log operations
|
||||
"""
|
||||
|
||||
from typing import Optional, List, Dict, Any
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, text, and_
|
||||
from datetime import datetime, timedelta
|
||||
import structlog
|
||||
import json
|
||||
|
||||
from .base import NotificationBaseRepository
|
||||
from app.models.notifications import NotificationLog, NotificationStatus
|
||||
from shared.database.exceptions import DatabaseError, ValidationError
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class LogRepository(NotificationBaseRepository):
|
||||
"""Repository for notification log operations"""
|
||||
|
||||
def __init__(self, session: AsyncSession, cache_ttl: Optional[int] = 120):
|
||||
# Logs are very dynamic, very short cache time (2 minutes)
|
||||
super().__init__(NotificationLog, session, cache_ttl)
|
||||
|
||||
async def create_log_entry(self, log_data: Dict[str, Any]) -> NotificationLog:
|
||||
"""Create a new notification log entry"""
|
||||
try:
|
||||
# Validate log data
|
||||
validation_result = self._validate_notification_data(
|
||||
log_data,
|
||||
["notification_id", "attempt_number", "status"]
|
||||
)
|
||||
|
||||
if not validation_result["is_valid"]:
|
||||
raise ValidationError(f"Invalid log data: {validation_result['errors']}")
|
||||
|
||||
# Set default values
|
||||
if "attempted_at" not in log_data:
|
||||
log_data["attempted_at"] = datetime.utcnow()
|
||||
|
||||
# Serialize metadata if it's a dict
|
||||
if "log_metadata" in log_data and isinstance(log_data["log_metadata"], dict):
|
||||
log_data["log_metadata"] = json.dumps(log_data["log_metadata"])
|
||||
|
||||
# Serialize provider response if it's a dict
|
||||
if "provider_response" in log_data and isinstance(log_data["provider_response"], dict):
|
||||
log_data["provider_response"] = json.dumps(log_data["provider_response"])
|
||||
|
||||
# Create log entry
|
||||
log_entry = await self.create(log_data)
|
||||
|
||||
logger.debug("Notification log entry created",
|
||||
log_id=log_entry.id,
|
||||
notification_id=log_entry.notification_id,
|
||||
attempt_number=log_entry.attempt_number,
|
||||
status=log_entry.status.value)
|
||||
|
||||
return log_entry
|
||||
|
||||
except ValidationError:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Failed to create log entry",
|
||||
notification_id=log_data.get("notification_id"),
|
||||
error=str(e))
|
||||
raise DatabaseError(f"Failed to create log entry: {str(e)}")
|
||||
|
||||
async def get_logs_for_notification(
|
||||
self,
|
||||
notification_id: str,
|
||||
skip: int = 0,
|
||||
limit: int = 50
|
||||
) -> List[NotificationLog]:
|
||||
"""Get all log entries for a specific notification"""
|
||||
try:
|
||||
return await self.get_multi(
|
||||
filters={"notification_id": notification_id},
|
||||
skip=skip,
|
||||
limit=limit,
|
||||
order_by="attempt_number",
|
||||
order_desc=False
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get logs for notification",
|
||||
notification_id=notification_id,
|
||||
error=str(e))
|
||||
return []
|
||||
|
||||
async def get_latest_log_for_notification(
|
||||
self,
|
||||
notification_id: str
|
||||
) -> Optional[NotificationLog]:
|
||||
"""Get the most recent log entry for a notification"""
|
||||
try:
|
||||
logs = await self.get_multi(
|
||||
filters={"notification_id": notification_id},
|
||||
limit=1,
|
||||
order_by="attempt_number",
|
||||
order_desc=True
|
||||
)
|
||||
return logs[0] if logs else None
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get latest log for notification",
|
||||
notification_id=notification_id,
|
||||
error=str(e))
|
||||
return None
|
||||
|
||||
async def get_failed_delivery_logs(
|
||||
self,
|
||||
hours_back: int = 24,
|
||||
provider: str = None,
|
||||
limit: int = 100
|
||||
) -> List[NotificationLog]:
|
||||
"""Get failed delivery logs for analysis"""
|
||||
try:
|
||||
cutoff_time = datetime.utcnow() - timedelta(hours=hours_back)
|
||||
|
||||
conditions = [
|
||||
"status = 'failed'",
|
||||
"attempted_at >= :cutoff_time"
|
||||
]
|
||||
params = {"cutoff_time": cutoff_time, "limit": limit}
|
||||
|
||||
if provider:
|
||||
conditions.append("provider = :provider")
|
||||
params["provider"] = provider
|
||||
|
||||
query_text = f"""
|
||||
SELECT * FROM notification_logs
|
||||
WHERE {' AND '.join(conditions)}
|
||||
ORDER BY attempted_at DESC
|
||||
LIMIT :limit
|
||||
"""
|
||||
|
||||
result = await self.session.execute(text(query_text), params)
|
||||
|
||||
logs = []
|
||||
for row in result.fetchall():
|
||||
record_dict = dict(row._mapping)
|
||||
# Convert enum string back to enum object
|
||||
record_dict["status"] = NotificationStatus(record_dict["status"])
|
||||
log_entry = self.model(**record_dict)
|
||||
logs.append(log_entry)
|
||||
|
||||
return logs
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get failed delivery logs",
|
||||
hours_back=hours_back,
|
||||
provider=provider,
|
||||
error=str(e))
|
||||
return []
|
||||
|
||||
async def get_delivery_performance_stats(
|
||||
self,
|
||||
hours_back: int = 24,
|
||||
provider: str = None
|
||||
) -> Dict[str, Any]:
|
||||
"""Get delivery performance statistics"""
|
||||
try:
|
||||
cutoff_time = datetime.utcnow() - timedelta(hours=hours_back)
|
||||
|
||||
conditions = ["attempted_at >= :cutoff_time"]
|
||||
params = {"cutoff_time": cutoff_time}
|
||||
|
||||
if provider:
|
||||
conditions.append("provider = :provider")
|
||||
params["provider"] = provider
|
||||
|
||||
where_clause = " AND ".join(conditions)
|
||||
|
||||
# Get overall statistics
|
||||
stats_query = text(f"""
|
||||
SELECT
|
||||
COUNT(*) as total_attempts,
|
||||
COUNT(CASE WHEN status = 'sent' OR status = 'delivered' THEN 1 END) as successful_attempts,
|
||||
COUNT(CASE WHEN status = 'failed' THEN 1 END) as failed_attempts,
|
||||
AVG(response_time_ms) as avg_response_time_ms,
|
||||
MIN(response_time_ms) as min_response_time_ms,
|
||||
MAX(response_time_ms) as max_response_time_ms
|
||||
FROM notification_logs
|
||||
WHERE {where_clause}
|
||||
""")
|
||||
|
||||
result = await self.session.execute(stats_query, params)
|
||||
stats = result.fetchone()
|
||||
|
||||
total = stats.total_attempts or 0
|
||||
successful = stats.successful_attempts or 0
|
||||
failed = stats.failed_attempts or 0
|
||||
|
||||
success_rate = (successful / total * 100) if total > 0 else 0
|
||||
failure_rate = (failed / total * 100) if total > 0 else 0
|
||||
|
||||
# Get error breakdown
|
||||
error_query = text(f"""
|
||||
SELECT error_code, COUNT(*) as count
|
||||
FROM notification_logs
|
||||
WHERE {where_clause} AND status = 'failed' AND error_code IS NOT NULL
|
||||
GROUP BY error_code
|
||||
ORDER BY count DESC
|
||||
LIMIT 10
|
||||
""")
|
||||
|
||||
result = await self.session.execute(error_query, params)
|
||||
error_breakdown = {row.error_code: row.count for row in result.fetchall()}
|
||||
|
||||
# Get provider breakdown if not filtering by provider
|
||||
provider_breakdown = {}
|
||||
if not provider:
|
||||
provider_query = text(f"""
|
||||
SELECT provider,
|
||||
COUNT(*) as total,
|
||||
COUNT(CASE WHEN status = 'sent' OR status = 'delivered' THEN 1 END) as successful
|
||||
FROM notification_logs
|
||||
WHERE {where_clause} AND provider IS NOT NULL
|
||||
GROUP BY provider
|
||||
ORDER BY total DESC
|
||||
""")
|
||||
|
||||
result = await self.session.execute(provider_query, params)
|
||||
for row in result.fetchall():
|
||||
provider_success_rate = (row.successful / row.total * 100) if row.total > 0 else 0
|
||||
provider_breakdown[row.provider] = {
|
||||
"total": row.total,
|
||||
"successful": row.successful,
|
||||
"success_rate_percent": round(provider_success_rate, 2)
|
||||
}
|
||||
|
||||
return {
|
||||
"total_attempts": total,
|
||||
"successful_attempts": successful,
|
||||
"failed_attempts": failed,
|
||||
"success_rate_percent": round(success_rate, 2),
|
||||
"failure_rate_percent": round(failure_rate, 2),
|
||||
"avg_response_time_ms": float(stats.avg_response_time_ms or 0),
|
||||
"min_response_time_ms": int(stats.min_response_time_ms or 0),
|
||||
"max_response_time_ms": int(stats.max_response_time_ms or 0),
|
||||
"error_breakdown": error_breakdown,
|
||||
"provider_breakdown": provider_breakdown,
|
||||
"hours_analyzed": hours_back
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get delivery performance stats",
|
||||
hours_back=hours_back,
|
||||
provider=provider,
|
||||
error=str(e))
|
||||
return {
|
||||
"total_attempts": 0,
|
||||
"successful_attempts": 0,
|
||||
"failed_attempts": 0,
|
||||
"success_rate_percent": 0.0,
|
||||
"failure_rate_percent": 0.0,
|
||||
"avg_response_time_ms": 0.0,
|
||||
"min_response_time_ms": 0,
|
||||
"max_response_time_ms": 0,
|
||||
"error_breakdown": {},
|
||||
"provider_breakdown": {},
|
||||
"hours_analyzed": hours_back
|
||||
}
|
||||
|
||||
async def get_logs_by_provider(
|
||||
self,
|
||||
provider: str,
|
||||
hours_back: int = 24,
|
||||
status: NotificationStatus = None,
|
||||
limit: int = 100
|
||||
) -> List[NotificationLog]:
|
||||
"""Get logs for a specific provider"""
|
||||
try:
|
||||
cutoff_time = datetime.utcnow() - timedelta(hours=hours_back)
|
||||
|
||||
conditions = [
|
||||
"provider = :provider",
|
||||
"attempted_at >= :cutoff_time"
|
||||
]
|
||||
params = {"provider": provider, "cutoff_time": cutoff_time, "limit": limit}
|
||||
|
||||
if status:
|
||||
conditions.append("status = :status")
|
||||
params["status"] = status.value
|
||||
|
||||
query_text = f"""
|
||||
SELECT * FROM notification_logs
|
||||
WHERE {' AND '.join(conditions)}
|
||||
ORDER BY attempted_at DESC
|
||||
LIMIT :limit
|
||||
"""
|
||||
|
||||
result = await self.session.execute(text(query_text), params)
|
||||
|
||||
logs = []
|
||||
for row in result.fetchall():
|
||||
record_dict = dict(row._mapping)
|
||||
# Convert enum string back to enum object
|
||||
record_dict["status"] = NotificationStatus(record_dict["status"])
|
||||
log_entry = self.model(**record_dict)
|
||||
logs.append(log_entry)
|
||||
|
||||
return logs
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get logs by provider",
|
||||
provider=provider,
|
||||
error=str(e))
|
||||
return []
|
||||
|
||||
async def cleanup_old_logs(self, days_old: int = 30) -> int:
|
||||
"""Clean up old notification logs"""
|
||||
try:
|
||||
cutoff_date = datetime.utcnow() - timedelta(days=days_old)
|
||||
|
||||
# Only delete logs for successfully delivered or permanently failed notifications
|
||||
query_text = """
|
||||
DELETE FROM notification_logs
|
||||
WHERE attempted_at < :cutoff_date
|
||||
AND status IN ('delivered', 'failed')
|
||||
"""
|
||||
|
||||
result = await self.session.execute(text(query_text), {"cutoff_date": cutoff_date})
|
||||
deleted_count = result.rowcount
|
||||
|
||||
logger.info("Cleaned up old notification logs",
|
||||
deleted_count=deleted_count,
|
||||
days_old=days_old)
|
||||
|
||||
return deleted_count
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to cleanup old logs", error=str(e))
|
||||
raise DatabaseError(f"Cleanup failed: {str(e)}")
|
||||
|
||||
async def get_notification_timeline(
|
||||
self,
|
||||
notification_id: str
|
||||
) -> Dict[str, Any]:
|
||||
"""Get complete timeline for a notification including all attempts"""
|
||||
try:
|
||||
logs = await self.get_logs_for_notification(notification_id)
|
||||
|
||||
timeline = []
|
||||
for log in logs:
|
||||
entry = {
|
||||
"attempt_number": log.attempt_number,
|
||||
"status": log.status.value,
|
||||
"attempted_at": log.attempted_at.isoformat() if log.attempted_at else None,
|
||||
"provider": log.provider,
|
||||
"provider_message_id": log.provider_message_id,
|
||||
"response_time_ms": log.response_time_ms,
|
||||
"error_code": log.error_code,
|
||||
"error_message": log.error_message
|
||||
}
|
||||
|
||||
# Parse metadata if present
|
||||
if log.log_metadata:
|
||||
try:
|
||||
entry["metadata"] = json.loads(log.log_metadata)
|
||||
except json.JSONDecodeError:
|
||||
entry["metadata"] = log.log_metadata
|
||||
|
||||
# Parse provider response if present
|
||||
if log.provider_response:
|
||||
try:
|
||||
entry["provider_response"] = json.loads(log.provider_response)
|
||||
except json.JSONDecodeError:
|
||||
entry["provider_response"] = log.provider_response
|
||||
|
||||
timeline.append(entry)
|
||||
|
||||
# Calculate summary statistics
|
||||
total_attempts = len(logs)
|
||||
successful_attempts = len([log for log in logs if log.status in [NotificationStatus.SENT, NotificationStatus.DELIVERED]])
|
||||
failed_attempts = len([log for log in logs if log.status == NotificationStatus.FAILED])
|
||||
|
||||
avg_response_time = 0
|
||||
if logs:
|
||||
response_times = [log.response_time_ms for log in logs if log.response_time_ms is not None]
|
||||
avg_response_time = sum(response_times) / len(response_times) if response_times else 0
|
||||
|
||||
return {
|
||||
"notification_id": notification_id,
|
||||
"total_attempts": total_attempts,
|
||||
"successful_attempts": successful_attempts,
|
||||
"failed_attempts": failed_attempts,
|
||||
"avg_response_time_ms": round(avg_response_time, 2),
|
||||
"timeline": timeline
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get notification timeline",
|
||||
notification_id=notification_id,
|
||||
error=str(e))
|
||||
return {
|
||||
"notification_id": notification_id,
|
||||
"error": str(e),
|
||||
"timeline": []
|
||||
}
|
||||
|
||||
async def get_retry_analysis(self, days_back: int = 7) -> Dict[str, Any]:
|
||||
"""Analyze retry patterns and success rates"""
|
||||
try:
|
||||
cutoff_date = datetime.utcnow() - timedelta(days=days_back)
|
||||
|
||||
# Get retry statistics
|
||||
retry_query = text("""
|
||||
SELECT
|
||||
attempt_number,
|
||||
COUNT(*) as total_attempts,
|
||||
COUNT(CASE WHEN status = 'sent' OR status = 'delivered' THEN 1 END) as successful_attempts
|
||||
FROM notification_logs
|
||||
WHERE attempted_at >= :cutoff_date
|
||||
GROUP BY attempt_number
|
||||
ORDER BY attempt_number
|
||||
""")
|
||||
|
||||
result = await self.session.execute(retry_query, {"cutoff_date": cutoff_date})
|
||||
|
||||
retry_stats = {}
|
||||
for row in result.fetchall():
|
||||
success_rate = (row.successful_attempts / row.total_attempts * 100) if row.total_attempts > 0 else 0
|
||||
retry_stats[row.attempt_number] = {
|
||||
"total_attempts": row.total_attempts,
|
||||
"successful_attempts": row.successful_attempts,
|
||||
"success_rate_percent": round(success_rate, 2)
|
||||
}
|
||||
|
||||
# Get common failure patterns
|
||||
failure_query = text("""
|
||||
SELECT
|
||||
error_code,
|
||||
attempt_number,
|
||||
COUNT(*) as count
|
||||
FROM notification_logs
|
||||
WHERE attempted_at >= :cutoff_date
|
||||
AND status = 'failed'
|
||||
AND error_code IS NOT NULL
|
||||
GROUP BY error_code, attempt_number
|
||||
ORDER BY count DESC
|
||||
LIMIT 20
|
||||
""")
|
||||
|
||||
result = await self.session.execute(failure_query, {"cutoff_date": cutoff_date})
|
||||
|
||||
failure_patterns = []
|
||||
for row in result.fetchall():
|
||||
failure_patterns.append({
|
||||
"error_code": row.error_code,
|
||||
"attempt_number": row.attempt_number,
|
||||
"count": row.count
|
||||
})
|
||||
|
||||
return {
|
||||
"retry_statistics": retry_stats,
|
||||
"failure_patterns": failure_patterns,
|
||||
"days_analyzed": days_back
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get retry analysis", error=str(e))
|
||||
return {
|
||||
"retry_statistics": {},
|
||||
"failure_patterns": [],
|
||||
"days_analyzed": days_back,
|
||||
"error": str(e)
|
||||
}
|
||||
@@ -0,0 +1,515 @@
|
||||
"""
|
||||
Notification Repository
|
||||
Repository for notification operations
|
||||
"""
|
||||
|
||||
from typing import Optional, List, Dict, Any
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, text, and_, or_
|
||||
from datetime import datetime, timedelta
|
||||
import structlog
|
||||
import json
|
||||
|
||||
from .base import NotificationBaseRepository
|
||||
from app.models.notifications import Notification, NotificationStatus, NotificationType, NotificationPriority
|
||||
from shared.database.exceptions import DatabaseError, ValidationError, DuplicateRecordError
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class NotificationRepository(NotificationBaseRepository):
|
||||
"""Repository for notification operations"""
|
||||
|
||||
def __init__(self, session: AsyncSession, cache_ttl: Optional[int] = 300):
|
||||
# Notifications are very dynamic, short cache time (5 minutes)
|
||||
super().__init__(Notification, session, cache_ttl)
|
||||
|
||||
async def create_notification(self, notification_data: Dict[str, Any]) -> Notification:
|
||||
"""Create a new notification with validation"""
|
||||
try:
|
||||
# Validate notification data
|
||||
validation_result = self._validate_notification_data(
|
||||
notification_data,
|
||||
["tenant_id", "sender_id", "type", "message"]
|
||||
)
|
||||
|
||||
if not validation_result["is_valid"]:
|
||||
raise ValidationError(f"Invalid notification data: {validation_result['errors']}")
|
||||
|
||||
# Set default values
|
||||
if "status" not in notification_data:
|
||||
notification_data["status"] = NotificationStatus.PENDING
|
||||
if "priority" not in notification_data:
|
||||
notification_data["priority"] = NotificationPriority.NORMAL
|
||||
if "retry_count" not in notification_data:
|
||||
notification_data["retry_count"] = 0
|
||||
if "max_retries" not in notification_data:
|
||||
notification_data["max_retries"] = 3
|
||||
if "broadcast" not in notification_data:
|
||||
notification_data["broadcast"] = False
|
||||
if "read" not in notification_data:
|
||||
notification_data["read"] = False
|
||||
|
||||
# Create notification
|
||||
notification = await self.create(notification_data)
|
||||
|
||||
logger.info("Notification created successfully",
|
||||
notification_id=notification.id,
|
||||
tenant_id=notification.tenant_id,
|
||||
type=notification.type.value,
|
||||
recipient_id=notification.recipient_id,
|
||||
priority=notification.priority.value)
|
||||
|
||||
return notification
|
||||
|
||||
except ValidationError:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Failed to create notification",
|
||||
tenant_id=notification_data.get("tenant_id"),
|
||||
type=notification_data.get("type"),
|
||||
error=str(e))
|
||||
raise DatabaseError(f"Failed to create notification: {str(e)}")
|
||||
|
||||
async def get_pending_notifications(self, limit: int = 100) -> List[Notification]:
|
||||
"""Get pending notifications ready for processing"""
|
||||
try:
|
||||
# Get notifications that are pending and either not scheduled or scheduled for now/past
|
||||
now = datetime.utcnow()
|
||||
|
||||
query_text = """
|
||||
SELECT * FROM notifications
|
||||
WHERE status = 'pending'
|
||||
AND (scheduled_at IS NULL OR scheduled_at <= :now)
|
||||
AND retry_count < max_retries
|
||||
ORDER BY priority DESC, created_at ASC
|
||||
LIMIT :limit
|
||||
"""
|
||||
|
||||
result = await self.session.execute(text(query_text), {
|
||||
"now": now,
|
||||
"limit": limit
|
||||
})
|
||||
|
||||
notifications = []
|
||||
for row in result.fetchall():
|
||||
record_dict = dict(row._mapping)
|
||||
# Convert enum strings back to enum objects
|
||||
record_dict["status"] = NotificationStatus(record_dict["status"])
|
||||
record_dict["type"] = NotificationType(record_dict["type"])
|
||||
record_dict["priority"] = NotificationPriority(record_dict["priority"])
|
||||
notification = self.model(**record_dict)
|
||||
notifications.append(notification)
|
||||
|
||||
return notifications
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get pending notifications", error=str(e))
|
||||
return []
|
||||
|
||||
async def get_notifications_by_recipient(
|
||||
self,
|
||||
recipient_id: str,
|
||||
tenant_id: str = None,
|
||||
status: NotificationStatus = None,
|
||||
notification_type: NotificationType = None,
|
||||
unread_only: bool = False,
|
||||
skip: int = 0,
|
||||
limit: int = 50
|
||||
) -> List[Notification]:
|
||||
"""Get notifications for a specific recipient with filters"""
|
||||
try:
|
||||
filters = {"recipient_id": recipient_id}
|
||||
|
||||
if tenant_id:
|
||||
filters["tenant_id"] = tenant_id
|
||||
|
||||
if status:
|
||||
filters["status"] = status
|
||||
|
||||
if notification_type:
|
||||
filters["type"] = notification_type
|
||||
|
||||
if unread_only:
|
||||
filters["read"] = False
|
||||
|
||||
return await self.get_multi(
|
||||
filters=filters,
|
||||
skip=skip,
|
||||
limit=limit,
|
||||
order_by="created_at",
|
||||
order_desc=True
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get notifications by recipient",
|
||||
recipient_id=recipient_id,
|
||||
error=str(e))
|
||||
return []
|
||||
|
||||
async def get_broadcast_notifications(
|
||||
self,
|
||||
tenant_id: str,
|
||||
skip: int = 0,
|
||||
limit: int = 50
|
||||
) -> List[Notification]:
|
||||
"""Get broadcast notifications for a tenant"""
|
||||
try:
|
||||
return await self.get_multi(
|
||||
filters={
|
||||
"tenant_id": tenant_id,
|
||||
"broadcast": True
|
||||
},
|
||||
skip=skip,
|
||||
limit=limit,
|
||||
order_by="created_at",
|
||||
order_desc=True
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get broadcast notifications",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
return []
|
||||
|
||||
async def update_notification_status(
|
||||
self,
|
||||
notification_id: str,
|
||||
new_status: NotificationStatus,
|
||||
error_message: str = None,
|
||||
provider_message_id: str = None,
|
||||
metadata: Dict[str, Any] = None
|
||||
) -> Optional[Notification]:
|
||||
"""Update notification status and related fields"""
|
||||
try:
|
||||
update_data = {
|
||||
"status": new_status,
|
||||
"updated_at": datetime.utcnow()
|
||||
}
|
||||
|
||||
# Set timestamp based on status
|
||||
if new_status == NotificationStatus.SENT:
|
||||
update_data["sent_at"] = datetime.utcnow()
|
||||
elif new_status == NotificationStatus.DELIVERED:
|
||||
update_data["delivered_at"] = datetime.utcnow()
|
||||
if "sent_at" not in update_data:
|
||||
update_data["sent_at"] = datetime.utcnow()
|
||||
|
||||
# Add error message if provided
|
||||
if error_message:
|
||||
update_data["error_message"] = error_message
|
||||
|
||||
# Add metadata if provided
|
||||
if metadata:
|
||||
update_data["log_metadata"] = json.dumps(metadata)
|
||||
|
||||
updated_notification = await self.update(notification_id, update_data)
|
||||
|
||||
logger.info("Notification status updated",
|
||||
notification_id=notification_id,
|
||||
new_status=new_status.value,
|
||||
provider_message_id=provider_message_id)
|
||||
|
||||
return updated_notification
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to update notification status",
|
||||
notification_id=notification_id,
|
||||
new_status=new_status.value,
|
||||
error=str(e))
|
||||
raise DatabaseError(f"Failed to update status: {str(e)}")
|
||||
|
||||
async def increment_retry_count(self, notification_id: str) -> Optional[Notification]:
|
||||
"""Increment retry count for a notification"""
|
||||
try:
|
||||
notification = await self.get_by_id(notification_id)
|
||||
if not notification:
|
||||
return None
|
||||
|
||||
new_retry_count = notification.retry_count + 1
|
||||
update_data = {
|
||||
"retry_count": new_retry_count,
|
||||
"updated_at": datetime.utcnow()
|
||||
}
|
||||
|
||||
# If max retries exceeded, mark as failed
|
||||
if new_retry_count >= notification.max_retries:
|
||||
update_data["status"] = NotificationStatus.FAILED
|
||||
update_data["error_message"] = "Maximum retry attempts exceeded"
|
||||
|
||||
updated_notification = await self.update(notification_id, update_data)
|
||||
|
||||
logger.info("Notification retry count incremented",
|
||||
notification_id=notification_id,
|
||||
retry_count=new_retry_count,
|
||||
max_retries=notification.max_retries)
|
||||
|
||||
return updated_notification
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to increment retry count",
|
||||
notification_id=notification_id,
|
||||
error=str(e))
|
||||
raise DatabaseError(f"Failed to increment retry count: {str(e)}")
|
||||
|
||||
async def mark_as_read(self, notification_id: str) -> Optional[Notification]:
|
||||
"""Mark notification as read"""
|
||||
try:
|
||||
updated_notification = await self.update(notification_id, {
|
||||
"read": True,
|
||||
"read_at": datetime.utcnow()
|
||||
})
|
||||
|
||||
logger.info("Notification marked as read",
|
||||
notification_id=notification_id)
|
||||
|
||||
return updated_notification
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to mark notification as read",
|
||||
notification_id=notification_id,
|
||||
error=str(e))
|
||||
raise DatabaseError(f"Failed to mark as read: {str(e)}")
|
||||
|
||||
async def mark_multiple_as_read(
|
||||
self,
|
||||
recipient_id: str,
|
||||
notification_ids: List[str] = None,
|
||||
tenant_id: str = None
|
||||
) -> int:
|
||||
"""Mark multiple notifications as read"""
|
||||
try:
|
||||
conditions = ["recipient_id = :recipient_id", "read = false"]
|
||||
params = {"recipient_id": recipient_id}
|
||||
|
||||
if notification_ids:
|
||||
placeholders = ", ".join([f":id_{i}" for i in range(len(notification_ids))])
|
||||
conditions.append(f"id IN ({placeholders})")
|
||||
for i, notification_id in enumerate(notification_ids):
|
||||
params[f"id_{i}"] = notification_id
|
||||
|
||||
if tenant_id:
|
||||
conditions.append("tenant_id = :tenant_id")
|
||||
params["tenant_id"] = tenant_id
|
||||
|
||||
query_text = f"""
|
||||
UPDATE notifications
|
||||
SET read = true, read_at = :read_at
|
||||
WHERE {' AND '.join(conditions)}
|
||||
"""
|
||||
|
||||
params["read_at"] = datetime.utcnow()
|
||||
|
||||
result = await self.session.execute(text(query_text), params)
|
||||
updated_count = result.rowcount
|
||||
|
||||
logger.info("Multiple notifications marked as read",
|
||||
recipient_id=recipient_id,
|
||||
updated_count=updated_count)
|
||||
|
||||
return updated_count
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to mark multiple notifications as read",
|
||||
recipient_id=recipient_id,
|
||||
error=str(e))
|
||||
raise DatabaseError(f"Failed to mark multiple as read: {str(e)}")
|
||||
|
||||
async def get_failed_notifications_for_retry(self, hours_ago: int = 1) -> List[Notification]:
|
||||
"""Get failed notifications that can be retried"""
|
||||
try:
|
||||
cutoff_time = datetime.utcnow() - timedelta(hours=hours_ago)
|
||||
|
||||
query_text = """
|
||||
SELECT * FROM notifications
|
||||
WHERE status = 'failed'
|
||||
AND retry_count < max_retries
|
||||
AND updated_at >= :cutoff_time
|
||||
ORDER BY priority DESC, updated_at ASC
|
||||
LIMIT 100
|
||||
"""
|
||||
|
||||
result = await self.session.execute(text(query_text), {
|
||||
"cutoff_time": cutoff_time
|
||||
})
|
||||
|
||||
notifications = []
|
||||
for row in result.fetchall():
|
||||
record_dict = dict(row._mapping)
|
||||
# Convert enum strings back to enum objects
|
||||
record_dict["status"] = NotificationStatus(record_dict["status"])
|
||||
record_dict["type"] = NotificationType(record_dict["type"])
|
||||
record_dict["priority"] = NotificationPriority(record_dict["priority"])
|
||||
notification = self.model(**record_dict)
|
||||
notifications.append(notification)
|
||||
|
||||
return notifications
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get failed notifications for retry", error=str(e))
|
||||
return []
|
||||
|
||||
async def get_notification_statistics(
|
||||
self,
|
||||
tenant_id: str = None,
|
||||
days_back: int = 30
|
||||
) -> Dict[str, Any]:
|
||||
"""Get notification statistics"""
|
||||
try:
|
||||
cutoff_date = datetime.utcnow() - timedelta(days=days_back)
|
||||
|
||||
# Build base query conditions
|
||||
conditions = ["created_at >= :cutoff_date"]
|
||||
params = {"cutoff_date": cutoff_date}
|
||||
|
||||
if tenant_id:
|
||||
conditions.append("tenant_id = :tenant_id")
|
||||
params["tenant_id"] = tenant_id
|
||||
|
||||
where_clause = " AND ".join(conditions)
|
||||
|
||||
# Get statistics by status
|
||||
status_query = text(f"""
|
||||
SELECT status, COUNT(*) as count
|
||||
FROM notifications
|
||||
WHERE {where_clause}
|
||||
GROUP BY status
|
||||
ORDER BY count DESC
|
||||
""")
|
||||
|
||||
result = await self.session.execute(status_query, params)
|
||||
status_stats = {row.status: row.count for row in result.fetchall()}
|
||||
|
||||
# Get statistics by type
|
||||
type_query = text(f"""
|
||||
SELECT type, COUNT(*) as count
|
||||
FROM notifications
|
||||
WHERE {where_clause}
|
||||
GROUP BY type
|
||||
ORDER BY count DESC
|
||||
""")
|
||||
|
||||
result = await self.session.execute(type_query, params)
|
||||
type_stats = {row.type: row.count for row in result.fetchall()}
|
||||
|
||||
# Get delivery rate
|
||||
delivery_query = text(f"""
|
||||
SELECT
|
||||
COUNT(*) as total_notifications,
|
||||
COUNT(CASE WHEN status = 'delivered' THEN 1 END) as delivered_count,
|
||||
COUNT(CASE WHEN status = 'failed' THEN 1 END) as failed_count,
|
||||
AVG(CASE WHEN sent_at IS NOT NULL AND delivered_at IS NOT NULL
|
||||
THEN EXTRACT(EPOCH FROM (delivered_at - sent_at)) END) as avg_delivery_time_seconds
|
||||
FROM notifications
|
||||
WHERE {where_clause}
|
||||
""")
|
||||
|
||||
result = await self.session.execute(delivery_query, params)
|
||||
delivery_row = result.fetchone()
|
||||
|
||||
total = delivery_row.total_notifications or 0
|
||||
delivered = delivery_row.delivered_count or 0
|
||||
failed = delivery_row.failed_count or 0
|
||||
delivery_rate = (delivered / total * 100) if total > 0 else 0
|
||||
failure_rate = (failed / total * 100) if total > 0 else 0
|
||||
|
||||
# Get unread count (if tenant_id provided)
|
||||
unread_count = 0
|
||||
if tenant_id:
|
||||
unread_query = text(f"""
|
||||
SELECT COUNT(*) as count
|
||||
FROM notifications
|
||||
WHERE tenant_id = :tenant_id AND read = false
|
||||
""")
|
||||
|
||||
result = await self.session.execute(unread_query, {"tenant_id": tenant_id})
|
||||
unread_count = result.scalar() or 0
|
||||
|
||||
return {
|
||||
"total_notifications": total,
|
||||
"by_status": status_stats,
|
||||
"by_type": type_stats,
|
||||
"delivery_rate_percent": round(delivery_rate, 2),
|
||||
"failure_rate_percent": round(failure_rate, 2),
|
||||
"avg_delivery_time_seconds": float(delivery_row.avg_delivery_time_seconds or 0),
|
||||
"unread_count": unread_count,
|
||||
"days_analyzed": days_back
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get notification statistics",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
return {
|
||||
"total_notifications": 0,
|
||||
"by_status": {},
|
||||
"by_type": {},
|
||||
"delivery_rate_percent": 0.0,
|
||||
"failure_rate_percent": 0.0,
|
||||
"avg_delivery_time_seconds": 0.0,
|
||||
"unread_count": 0,
|
||||
"days_analyzed": days_back
|
||||
}
|
||||
|
||||
async def cancel_notification(self, notification_id: str, reason: str = None) -> Optional[Notification]:
|
||||
"""Cancel a pending notification"""
|
||||
try:
|
||||
notification = await self.get_by_id(notification_id)
|
||||
if not notification:
|
||||
return None
|
||||
|
||||
if notification.status != NotificationStatus.PENDING:
|
||||
raise ValidationError("Can only cancel pending notifications")
|
||||
|
||||
update_data = {
|
||||
"status": NotificationStatus.CANCELLED,
|
||||
"updated_at": datetime.utcnow()
|
||||
}
|
||||
|
||||
if reason:
|
||||
update_data["error_message"] = f"Cancelled: {reason}"
|
||||
|
||||
updated_notification = await self.update(notification_id, update_data)
|
||||
|
||||
logger.info("Notification cancelled",
|
||||
notification_id=notification_id,
|
||||
reason=reason)
|
||||
|
||||
return updated_notification
|
||||
|
||||
except ValidationError:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Failed to cancel notification",
|
||||
notification_id=notification_id,
|
||||
error=str(e))
|
||||
raise DatabaseError(f"Failed to cancel notification: {str(e)}")
|
||||
|
||||
async def schedule_notification(
|
||||
self,
|
||||
notification_id: str,
|
||||
scheduled_at: datetime
|
||||
) -> Optional[Notification]:
|
||||
"""Schedule a notification for future delivery"""
|
||||
try:
|
||||
if scheduled_at <= datetime.utcnow():
|
||||
raise ValidationError("Scheduled time must be in the future")
|
||||
|
||||
updated_notification = await self.update(notification_id, {
|
||||
"scheduled_at": scheduled_at,
|
||||
"updated_at": datetime.utcnow()
|
||||
})
|
||||
|
||||
logger.info("Notification scheduled",
|
||||
notification_id=notification_id,
|
||||
scheduled_at=scheduled_at)
|
||||
|
||||
return updated_notification
|
||||
|
||||
except ValidationError:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Failed to schedule notification",
|
||||
notification_id=notification_id,
|
||||
error=str(e))
|
||||
raise DatabaseError(f"Failed to schedule notification: {str(e)}")
|
||||
474
services/notification/app/repositories/preference_repository.py
Normal file
474
services/notification/app/repositories/preference_repository.py
Normal file
@@ -0,0 +1,474 @@
|
||||
"""
|
||||
Preference Repository
|
||||
Repository for notification preference operations
|
||||
"""
|
||||
|
||||
from typing import Optional, List, Dict, Any
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, text, and_
|
||||
from datetime import datetime
|
||||
import structlog
|
||||
|
||||
from .base import NotificationBaseRepository
|
||||
from app.models.notifications import NotificationPreference
|
||||
from shared.database.exceptions import DatabaseError, ValidationError, DuplicateRecordError
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class PreferenceRepository(NotificationBaseRepository):
|
||||
"""Repository for notification preference operations"""
|
||||
|
||||
def __init__(self, session: AsyncSession, cache_ttl: Optional[int] = 900):
|
||||
# Preferences are relatively stable, medium cache time (15 minutes)
|
||||
super().__init__(NotificationPreference, session, cache_ttl)
|
||||
|
||||
async def create_preferences(self, preference_data: Dict[str, Any]) -> NotificationPreference:
|
||||
"""Create user notification preferences with validation"""
|
||||
try:
|
||||
# Validate preference data
|
||||
validation_result = self._validate_notification_data(
|
||||
preference_data,
|
||||
["user_id", "tenant_id"]
|
||||
)
|
||||
|
||||
if not validation_result["is_valid"]:
|
||||
raise ValidationError(f"Invalid preference data: {validation_result['errors']}")
|
||||
|
||||
# Check if preferences already exist for this user and tenant
|
||||
existing_prefs = await self.get_user_preferences(
|
||||
preference_data["user_id"],
|
||||
preference_data["tenant_id"]
|
||||
)
|
||||
|
||||
if existing_prefs:
|
||||
raise DuplicateRecordError(f"Preferences already exist for user in this tenant")
|
||||
|
||||
# Set default values
|
||||
defaults = {
|
||||
"email_enabled": True,
|
||||
"email_alerts": True,
|
||||
"email_marketing": False,
|
||||
"email_reports": True,
|
||||
"whatsapp_enabled": False,
|
||||
"whatsapp_alerts": False,
|
||||
"whatsapp_reports": False,
|
||||
"push_enabled": True,
|
||||
"push_alerts": True,
|
||||
"push_reports": False,
|
||||
"quiet_hours_start": "22:00",
|
||||
"quiet_hours_end": "08:00",
|
||||
"timezone": "Europe/Madrid",
|
||||
"digest_frequency": "daily",
|
||||
"max_emails_per_day": 10,
|
||||
"language": "es"
|
||||
}
|
||||
|
||||
# Apply defaults for any missing fields
|
||||
for key, default_value in defaults.items():
|
||||
if key not in preference_data:
|
||||
preference_data[key] = default_value
|
||||
|
||||
# Create preferences
|
||||
preferences = await self.create(preference_data)
|
||||
|
||||
logger.info("User notification preferences created",
|
||||
preferences_id=preferences.id,
|
||||
user_id=preferences.user_id,
|
||||
tenant_id=preferences.tenant_id)
|
||||
|
||||
return preferences
|
||||
|
||||
except (ValidationError, DuplicateRecordError):
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Failed to create preferences",
|
||||
user_id=preference_data.get("user_id"),
|
||||
tenant_id=preference_data.get("tenant_id"),
|
||||
error=str(e))
|
||||
raise DatabaseError(f"Failed to create preferences: {str(e)}")
|
||||
|
||||
async def get_user_preferences(
|
||||
self,
|
||||
user_id: str,
|
||||
tenant_id: str
|
||||
) -> Optional[NotificationPreference]:
|
||||
"""Get notification preferences for a specific user and tenant"""
|
||||
try:
|
||||
preferences = await self.get_multi(
|
||||
filters={
|
||||
"user_id": user_id,
|
||||
"tenant_id": tenant_id
|
||||
},
|
||||
limit=1
|
||||
)
|
||||
return preferences[0] if preferences else None
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get user preferences",
|
||||
user_id=user_id,
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
raise DatabaseError(f"Failed to get preferences: {str(e)}")
|
||||
|
||||
async def update_user_preferences(
|
||||
self,
|
||||
user_id: str,
|
||||
tenant_id: str,
|
||||
update_data: Dict[str, Any]
|
||||
) -> Optional[NotificationPreference]:
|
||||
"""Update user notification preferences"""
|
||||
try:
|
||||
preferences = await self.get_user_preferences(user_id, tenant_id)
|
||||
if not preferences:
|
||||
# Create preferences if they don't exist
|
||||
create_data = {
|
||||
"user_id": user_id,
|
||||
"tenant_id": tenant_id,
|
||||
**update_data
|
||||
}
|
||||
return await self.create_preferences(create_data)
|
||||
|
||||
# Validate specific preference fields
|
||||
self._validate_preference_updates(update_data)
|
||||
|
||||
updated_preferences = await self.update(str(preferences.id), update_data)
|
||||
|
||||
logger.info("User preferences updated",
|
||||
preferences_id=preferences.id,
|
||||
user_id=user_id,
|
||||
tenant_id=tenant_id,
|
||||
updated_fields=list(update_data.keys()))
|
||||
|
||||
return updated_preferences
|
||||
|
||||
except ValidationError:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Failed to update user preferences",
|
||||
user_id=user_id,
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
raise DatabaseError(f"Failed to update preferences: {str(e)}")
|
||||
|
||||
async def get_users_with_email_enabled(
|
||||
self,
|
||||
tenant_id: str,
|
||||
notification_category: str = "alerts"
|
||||
) -> List[NotificationPreference]:
|
||||
"""Get users who have email notifications enabled for a category"""
|
||||
try:
|
||||
filters = {
|
||||
"tenant_id": tenant_id,
|
||||
"email_enabled": True
|
||||
}
|
||||
|
||||
# Add category-specific filter
|
||||
if notification_category == "alerts":
|
||||
filters["email_alerts"] = True
|
||||
elif notification_category == "marketing":
|
||||
filters["email_marketing"] = True
|
||||
elif notification_category == "reports":
|
||||
filters["email_reports"] = True
|
||||
|
||||
return await self.get_multi(filters=filters)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get users with email enabled",
|
||||
tenant_id=tenant_id,
|
||||
category=notification_category,
|
||||
error=str(e))
|
||||
return []
|
||||
|
||||
async def get_users_with_whatsapp_enabled(
|
||||
self,
|
||||
tenant_id: str,
|
||||
notification_category: str = "alerts"
|
||||
) -> List[NotificationPreference]:
|
||||
"""Get users who have WhatsApp notifications enabled for a category"""
|
||||
try:
|
||||
filters = {
|
||||
"tenant_id": tenant_id,
|
||||
"whatsapp_enabled": True
|
||||
}
|
||||
|
||||
# Add category-specific filter
|
||||
if notification_category == "alerts":
|
||||
filters["whatsapp_alerts"] = True
|
||||
elif notification_category == "reports":
|
||||
filters["whatsapp_reports"] = True
|
||||
|
||||
return await self.get_multi(filters=filters)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get users with WhatsApp enabled",
|
||||
tenant_id=tenant_id,
|
||||
category=notification_category,
|
||||
error=str(e))
|
||||
return []
|
||||
|
||||
async def get_users_with_push_enabled(
|
||||
self,
|
||||
tenant_id: str,
|
||||
notification_category: str = "alerts"
|
||||
) -> List[NotificationPreference]:
|
||||
"""Get users who have push notifications enabled for a category"""
|
||||
try:
|
||||
filters = {
|
||||
"tenant_id": tenant_id,
|
||||
"push_enabled": True
|
||||
}
|
||||
|
||||
# Add category-specific filter
|
||||
if notification_category == "alerts":
|
||||
filters["push_alerts"] = True
|
||||
elif notification_category == "reports":
|
||||
filters["push_reports"] = True
|
||||
|
||||
return await self.get_multi(filters=filters)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get users with push enabled",
|
||||
tenant_id=tenant_id,
|
||||
category=notification_category,
|
||||
error=str(e))
|
||||
return []
|
||||
|
||||
async def check_quiet_hours(
|
||||
self,
|
||||
user_id: str,
|
||||
tenant_id: str,
|
||||
check_time: datetime = None
|
||||
) -> bool:
|
||||
"""Check if current time is within user's quiet hours"""
|
||||
try:
|
||||
preferences = await self.get_user_preferences(user_id, tenant_id)
|
||||
if not preferences:
|
||||
return False # No quiet hours if no preferences
|
||||
|
||||
if not check_time:
|
||||
check_time = datetime.utcnow()
|
||||
|
||||
# Convert time to user's timezone (simplified - using hour comparison)
|
||||
current_hour = check_time.hour
|
||||
quiet_start = int(preferences.quiet_hours_start.split(":")[0])
|
||||
quiet_end = int(preferences.quiet_hours_end.split(":")[0])
|
||||
|
||||
# Handle quiet hours that span midnight
|
||||
if quiet_start > quiet_end:
|
||||
return current_hour >= quiet_start or current_hour < quiet_end
|
||||
else:
|
||||
return quiet_start <= current_hour < quiet_end
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to check quiet hours",
|
||||
user_id=user_id,
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
return False
|
||||
|
||||
async def get_users_for_digest(
|
||||
self,
|
||||
tenant_id: str,
|
||||
frequency: str = "daily"
|
||||
) -> List[NotificationPreference]:
|
||||
"""Get users who want digest notifications for a frequency"""
|
||||
try:
|
||||
return await self.get_multi(
|
||||
filters={
|
||||
"tenant_id": tenant_id,
|
||||
"digest_frequency": frequency,
|
||||
"email_enabled": True
|
||||
}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get users for digest",
|
||||
tenant_id=tenant_id,
|
||||
frequency=frequency,
|
||||
error=str(e))
|
||||
return []
|
||||
|
||||
async def can_send_email(
|
||||
self,
|
||||
user_id: str,
|
||||
tenant_id: str,
|
||||
category: str = "alerts"
|
||||
) -> Dict[str, Any]:
|
||||
"""Check if an email can be sent to a user based on their preferences"""
|
||||
try:
|
||||
preferences = await self.get_user_preferences(user_id, tenant_id)
|
||||
if not preferences:
|
||||
return {
|
||||
"can_send": True, # Default to allowing if no preferences set
|
||||
"reason": "No preferences found, using defaults"
|
||||
}
|
||||
|
||||
# Check if email is enabled
|
||||
if not preferences.email_enabled:
|
||||
return {
|
||||
"can_send": False,
|
||||
"reason": "Email notifications disabled"
|
||||
}
|
||||
|
||||
# Check category-specific settings
|
||||
category_enabled = True
|
||||
if category == "alerts" and not preferences.email_alerts:
|
||||
category_enabled = False
|
||||
elif category == "marketing" and not preferences.email_marketing:
|
||||
category_enabled = False
|
||||
elif category == "reports" and not preferences.email_reports:
|
||||
category_enabled = False
|
||||
|
||||
if not category_enabled:
|
||||
return {
|
||||
"can_send": False,
|
||||
"reason": f"Email {category} notifications disabled"
|
||||
}
|
||||
|
||||
# Check quiet hours
|
||||
if self.check_quiet_hours(user_id, tenant_id):
|
||||
return {
|
||||
"can_send": False,
|
||||
"reason": "Within user's quiet hours"
|
||||
}
|
||||
|
||||
# Check daily limit (simplified - would need to query recent notifications)
|
||||
# For now, just return the limit info
|
||||
return {
|
||||
"can_send": True,
|
||||
"max_daily_emails": preferences.max_emails_per_day,
|
||||
"language": preferences.language,
|
||||
"timezone": preferences.timezone
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to check if email can be sent",
|
||||
user_id=user_id,
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
return {
|
||||
"can_send": True, # Default to allowing on error
|
||||
"reason": "Error checking preferences"
|
||||
}
|
||||
|
||||
async def bulk_update_preferences(
|
||||
self,
|
||||
tenant_id: str,
|
||||
update_data: Dict[str, Any],
|
||||
user_ids: List[str] = None
|
||||
) -> int:
|
||||
"""Bulk update preferences for multiple users"""
|
||||
try:
|
||||
conditions = ["tenant_id = :tenant_id"]
|
||||
params = {"tenant_id": tenant_id}
|
||||
|
||||
if user_ids:
|
||||
placeholders = ", ".join([f":user_id_{i}" for i in range(len(user_ids))])
|
||||
conditions.append(f"user_id IN ({placeholders})")
|
||||
for i, user_id in enumerate(user_ids):
|
||||
params[f"user_id_{i}"] = user_id
|
||||
|
||||
# Build update clause
|
||||
update_fields = []
|
||||
for key, value in update_data.items():
|
||||
update_fields.append(f"{key} = :update_{key}")
|
||||
params[f"update_{key}"] = value
|
||||
|
||||
params["updated_at"] = datetime.utcnow()
|
||||
update_fields.append("updated_at = :updated_at")
|
||||
|
||||
query_text = f"""
|
||||
UPDATE notification_preferences
|
||||
SET {', '.join(update_fields)}
|
||||
WHERE {' AND '.join(conditions)}
|
||||
"""
|
||||
|
||||
result = await self.session.execute(text(query_text), params)
|
||||
updated_count = result.rowcount
|
||||
|
||||
logger.info("Bulk preferences update completed",
|
||||
tenant_id=tenant_id,
|
||||
updated_count=updated_count,
|
||||
updated_fields=list(update_data.keys()))
|
||||
|
||||
return updated_count
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to bulk update preferences",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
raise DatabaseError(f"Bulk update failed: {str(e)}")
|
||||
|
||||
async def delete_user_preferences(
|
||||
self,
|
||||
user_id: str,
|
||||
tenant_id: str
|
||||
) -> bool:
|
||||
"""Delete user preferences (when user leaves tenant)"""
|
||||
try:
|
||||
preferences = await self.get_user_preferences(user_id, tenant_id)
|
||||
if not preferences:
|
||||
return False
|
||||
|
||||
await self.delete(str(preferences.id))
|
||||
|
||||
logger.info("User preferences deleted",
|
||||
user_id=user_id,
|
||||
tenant_id=tenant_id)
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to delete user preferences",
|
||||
user_id=user_id,
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
raise DatabaseError(f"Failed to delete preferences: {str(e)}")
|
||||
|
||||
def _validate_preference_updates(self, update_data: Dict[str, Any]) -> None:
|
||||
"""Validate preference update data"""
|
||||
# Validate boolean fields
|
||||
boolean_fields = [
|
||||
"email_enabled", "email_alerts", "email_marketing", "email_reports",
|
||||
"whatsapp_enabled", "whatsapp_alerts", "whatsapp_reports",
|
||||
"push_enabled", "push_alerts", "push_reports"
|
||||
]
|
||||
|
||||
for field in boolean_fields:
|
||||
if field in update_data and not isinstance(update_data[field], bool):
|
||||
raise ValidationError(f"{field} must be a boolean value")
|
||||
|
||||
# Validate time format for quiet hours
|
||||
time_fields = ["quiet_hours_start", "quiet_hours_end"]
|
||||
for field in time_fields:
|
||||
if field in update_data:
|
||||
time_value = update_data[field]
|
||||
if not isinstance(time_value, str) or len(time_value) != 5 or ":" not in time_value:
|
||||
raise ValidationError(f"{field} must be in HH:MM format")
|
||||
|
||||
try:
|
||||
hour, minute = time_value.split(":")
|
||||
hour, minute = int(hour), int(minute)
|
||||
if hour < 0 or hour > 23 or minute < 0 or minute > 59:
|
||||
raise ValueError()
|
||||
except ValueError:
|
||||
raise ValidationError(f"{field} must be a valid time in HH:MM format")
|
||||
|
||||
# Validate digest frequency
|
||||
if "digest_frequency" in update_data:
|
||||
valid_frequencies = ["none", "daily", "weekly"]
|
||||
if update_data["digest_frequency"] not in valid_frequencies:
|
||||
raise ValidationError(f"digest_frequency must be one of: {valid_frequencies}")
|
||||
|
||||
# Validate max emails per day
|
||||
if "max_emails_per_day" in update_data:
|
||||
max_emails = update_data["max_emails_per_day"]
|
||||
if not isinstance(max_emails, int) or max_emails < 0 or max_emails > 100:
|
||||
raise ValidationError("max_emails_per_day must be an integer between 0 and 100")
|
||||
|
||||
# Validate language
|
||||
if "language" in update_data:
|
||||
valid_languages = ["es", "en", "fr", "de"]
|
||||
if update_data["language"] not in valid_languages:
|
||||
raise ValidationError(f"language must be one of: {valid_languages}")
|
||||
450
services/notification/app/repositories/template_repository.py
Normal file
450
services/notification/app/repositories/template_repository.py
Normal file
@@ -0,0 +1,450 @@
|
||||
"""
|
||||
Template Repository
|
||||
Repository for notification template operations
|
||||
"""
|
||||
|
||||
from typing import Optional, List, Dict, Any
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, text, and_
|
||||
from datetime import datetime
|
||||
import structlog
|
||||
import json
|
||||
|
||||
from .base import NotificationBaseRepository
|
||||
from app.models.notifications import NotificationTemplate, NotificationType
|
||||
from shared.database.exceptions import DatabaseError, ValidationError, DuplicateRecordError
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class TemplateRepository(NotificationBaseRepository):
|
||||
"""Repository for notification template operations"""
|
||||
|
||||
def __init__(self, session: AsyncSession, cache_ttl: Optional[int] = 1800):
|
||||
# Templates don't change often, longer cache time (30 minutes)
|
||||
super().__init__(NotificationTemplate, session, cache_ttl)
|
||||
|
||||
async def create_template(self, template_data: Dict[str, Any]) -> NotificationTemplate:
|
||||
"""Create a new notification template with validation"""
|
||||
try:
|
||||
# Validate template data
|
||||
required_fields = ["template_key", "name", "category", "type", "body_template"]
|
||||
validation_result = self._validate_notification_data(template_data, required_fields)
|
||||
|
||||
# Additional template-specific validation
|
||||
if validation_result["is_valid"]:
|
||||
# Check if template_key already exists
|
||||
existing_template = await self.get_by_template_key(template_data["template_key"])
|
||||
if existing_template:
|
||||
raise DuplicateRecordError(f"Template key {template_data['template_key']} already exists")
|
||||
|
||||
# Validate template variables if provided
|
||||
if "required_variables" in template_data:
|
||||
if isinstance(template_data["required_variables"], list):
|
||||
template_data["required_variables"] = json.dumps(template_data["required_variables"])
|
||||
elif isinstance(template_data["required_variables"], str):
|
||||
# Verify it's valid JSON
|
||||
try:
|
||||
json.loads(template_data["required_variables"])
|
||||
except json.JSONDecodeError:
|
||||
validation_result["errors"].append("Invalid JSON format in required_variables")
|
||||
validation_result["is_valid"] = False
|
||||
|
||||
if not validation_result["is_valid"]:
|
||||
raise ValidationError(f"Invalid template data: {validation_result['errors']}")
|
||||
|
||||
# Set default values
|
||||
if "language" not in template_data:
|
||||
template_data["language"] = "es"
|
||||
if "is_active" not in template_data:
|
||||
template_data["is_active"] = True
|
||||
if "is_system" not in template_data:
|
||||
template_data["is_system"] = False
|
||||
if "default_priority" not in template_data:
|
||||
template_data["default_priority"] = "normal"
|
||||
|
||||
# Create template
|
||||
template = await self.create(template_data)
|
||||
|
||||
logger.info("Notification template created successfully",
|
||||
template_id=template.id,
|
||||
template_key=template.template_key,
|
||||
type=template.type.value,
|
||||
category=template.category)
|
||||
|
||||
return template
|
||||
|
||||
except (ValidationError, DuplicateRecordError):
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Failed to create template",
|
||||
template_key=template_data.get("template_key"),
|
||||
error=str(e))
|
||||
raise DatabaseError(f"Failed to create template: {str(e)}")
|
||||
|
||||
async def get_by_template_key(self, template_key: str) -> Optional[NotificationTemplate]:
|
||||
"""Get template by template key"""
|
||||
try:
|
||||
return await self.get_by_field("template_key", template_key)
|
||||
except Exception as e:
|
||||
logger.error("Failed to get template by key",
|
||||
template_key=template_key,
|
||||
error=str(e))
|
||||
raise DatabaseError(f"Failed to get template: {str(e)}")
|
||||
|
||||
async def get_templates_by_category(
|
||||
self,
|
||||
category: str,
|
||||
tenant_id: str = None,
|
||||
include_system: bool = True
|
||||
) -> List[NotificationTemplate]:
|
||||
"""Get templates by category"""
|
||||
try:
|
||||
filters = {"category": category, "is_active": True}
|
||||
|
||||
if tenant_id and include_system:
|
||||
# Get both tenant-specific and system templates
|
||||
tenant_templates = await self.get_multi(
|
||||
filters={**filters, "tenant_id": tenant_id}
|
||||
)
|
||||
system_templates = await self.get_multi(
|
||||
filters={**filters, "is_system": True}
|
||||
)
|
||||
return tenant_templates + system_templates
|
||||
elif tenant_id:
|
||||
# Only tenant-specific templates
|
||||
filters["tenant_id"] = tenant_id
|
||||
return await self.get_multi(filters=filters)
|
||||
elif include_system:
|
||||
# Only system templates
|
||||
filters["is_system"] = True
|
||||
return await self.get_multi(filters=filters)
|
||||
else:
|
||||
return []
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get templates by category",
|
||||
category=category,
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
return []
|
||||
|
||||
async def get_templates_by_type(
|
||||
self,
|
||||
notification_type: NotificationType,
|
||||
tenant_id: str = None,
|
||||
include_system: bool = True
|
||||
) -> List[NotificationTemplate]:
|
||||
"""Get templates by notification type"""
|
||||
try:
|
||||
filters = {"type": notification_type, "is_active": True}
|
||||
|
||||
if tenant_id and include_system:
|
||||
# Get both tenant-specific and system templates
|
||||
tenant_templates = await self.get_multi(
|
||||
filters={**filters, "tenant_id": tenant_id}
|
||||
)
|
||||
system_templates = await self.get_multi(
|
||||
filters={**filters, "is_system": True}
|
||||
)
|
||||
return tenant_templates + system_templates
|
||||
elif tenant_id:
|
||||
# Only tenant-specific templates
|
||||
filters["tenant_id"] = tenant_id
|
||||
return await self.get_multi(filters=filters)
|
||||
elif include_system:
|
||||
# Only system templates
|
||||
filters["is_system"] = True
|
||||
return await self.get_multi(filters=filters)
|
||||
else:
|
||||
return []
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get templates by type",
|
||||
notification_type=notification_type.value,
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
return []
|
||||
|
||||
async def update_template(
|
||||
self,
|
||||
template_id: str,
|
||||
update_data: Dict[str, Any],
|
||||
allow_system_update: bool = False
|
||||
) -> Optional[NotificationTemplate]:
|
||||
"""Update template with system template protection"""
|
||||
try:
|
||||
template = await self.get_by_id(template_id)
|
||||
if not template:
|
||||
return None
|
||||
|
||||
# Prevent updating system templates unless explicitly allowed
|
||||
if template.is_system and not allow_system_update:
|
||||
raise ValidationError("Cannot update system templates")
|
||||
|
||||
# Validate required_variables if being updated
|
||||
if "required_variables" in update_data:
|
||||
if isinstance(update_data["required_variables"], list):
|
||||
update_data["required_variables"] = json.dumps(update_data["required_variables"])
|
||||
elif isinstance(update_data["required_variables"], str):
|
||||
try:
|
||||
json.loads(update_data["required_variables"])
|
||||
except json.JSONDecodeError:
|
||||
raise ValidationError("Invalid JSON format in required_variables")
|
||||
|
||||
# Update template
|
||||
updated_template = await self.update(template_id, update_data)
|
||||
|
||||
logger.info("Template updated successfully",
|
||||
template_id=template_id,
|
||||
template_key=template.template_key,
|
||||
updated_fields=list(update_data.keys()))
|
||||
|
||||
return updated_template
|
||||
|
||||
except ValidationError:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Failed to update template",
|
||||
template_id=template_id,
|
||||
error=str(e))
|
||||
raise DatabaseError(f"Failed to update template: {str(e)}")
|
||||
|
||||
async def deactivate_template(self, template_id: str) -> Optional[NotificationTemplate]:
|
||||
"""Deactivate a template (soft delete)"""
|
||||
try:
|
||||
template = await self.get_by_id(template_id)
|
||||
if not template:
|
||||
return None
|
||||
|
||||
# Prevent deactivating system templates
|
||||
if template.is_system:
|
||||
raise ValidationError("Cannot deactivate system templates")
|
||||
|
||||
updated_template = await self.update(template_id, {
|
||||
"is_active": False,
|
||||
"updated_at": datetime.utcnow()
|
||||
})
|
||||
|
||||
logger.info("Template deactivated",
|
||||
template_id=template_id,
|
||||
template_key=template.template_key)
|
||||
|
||||
return updated_template
|
||||
|
||||
except ValidationError:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Failed to deactivate template",
|
||||
template_id=template_id,
|
||||
error=str(e))
|
||||
raise DatabaseError(f"Failed to deactivate template: {str(e)}")
|
||||
|
||||
async def activate_template(self, template_id: str) -> Optional[NotificationTemplate]:
|
||||
"""Activate a template"""
|
||||
try:
|
||||
updated_template = await self.update(template_id, {
|
||||
"is_active": True,
|
||||
"updated_at": datetime.utcnow()
|
||||
})
|
||||
|
||||
if updated_template:
|
||||
logger.info("Template activated",
|
||||
template_id=template_id,
|
||||
template_key=updated_template.template_key)
|
||||
|
||||
return updated_template
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to activate template",
|
||||
template_id=template_id,
|
||||
error=str(e))
|
||||
raise DatabaseError(f"Failed to activate template: {str(e)}")
|
||||
|
||||
async def search_templates(
|
||||
self,
|
||||
search_term: str,
|
||||
tenant_id: str = None,
|
||||
category: str = None,
|
||||
notification_type: NotificationType = None,
|
||||
include_system: bool = True,
|
||||
limit: int = 50
|
||||
) -> List[NotificationTemplate]:
|
||||
"""Search templates by name, description, or template key"""
|
||||
try:
|
||||
conditions = [
|
||||
"is_active = true",
|
||||
"(LOWER(name) LIKE LOWER(:search_term) OR LOWER(description) LIKE LOWER(:search_term) OR LOWER(template_key) LIKE LOWER(:search_term))"
|
||||
]
|
||||
params = {"search_term": f"%{search_term}%", "limit": limit}
|
||||
|
||||
# Add tenant/system filter
|
||||
if tenant_id and include_system:
|
||||
conditions.append("(tenant_id = :tenant_id OR is_system = true)")
|
||||
params["tenant_id"] = tenant_id
|
||||
elif tenant_id:
|
||||
conditions.append("tenant_id = :tenant_id")
|
||||
params["tenant_id"] = tenant_id
|
||||
elif include_system:
|
||||
conditions.append("is_system = true")
|
||||
|
||||
# Add category filter
|
||||
if category:
|
||||
conditions.append("category = :category")
|
||||
params["category"] = category
|
||||
|
||||
# Add type filter
|
||||
if notification_type:
|
||||
conditions.append("type = :notification_type")
|
||||
params["notification_type"] = notification_type.value
|
||||
|
||||
query_text = f"""
|
||||
SELECT * FROM notification_templates
|
||||
WHERE {' AND '.join(conditions)}
|
||||
ORDER BY name ASC
|
||||
LIMIT :limit
|
||||
"""
|
||||
|
||||
result = await self.session.execute(text(query_text), params)
|
||||
|
||||
templates = []
|
||||
for row in result.fetchall():
|
||||
record_dict = dict(row._mapping)
|
||||
# Convert enum string back to enum object
|
||||
record_dict["type"] = NotificationType(record_dict["type"])
|
||||
template = self.model(**record_dict)
|
||||
templates.append(template)
|
||||
|
||||
return templates
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to search templates",
|
||||
search_term=search_term,
|
||||
error=str(e))
|
||||
return []
|
||||
|
||||
async def get_template_usage_statistics(self, template_id: str) -> Dict[str, Any]:
|
||||
"""Get usage statistics for a template"""
|
||||
try:
|
||||
template = await self.get_by_id(template_id)
|
||||
if not template:
|
||||
return {"error": "Template not found"}
|
||||
|
||||
# Get usage statistics from notifications table
|
||||
usage_query = text("""
|
||||
SELECT
|
||||
COUNT(*) as total_uses,
|
||||
COUNT(CASE WHEN status = 'delivered' THEN 1 END) as successful_uses,
|
||||
COUNT(CASE WHEN status = 'failed' THEN 1 END) as failed_uses,
|
||||
COUNT(CASE WHEN created_at >= NOW() - INTERVAL '30 days' THEN 1 END) as uses_last_30_days,
|
||||
MIN(created_at) as first_used,
|
||||
MAX(created_at) as last_used
|
||||
FROM notifications
|
||||
WHERE template_id = :template_key
|
||||
""")
|
||||
|
||||
result = await self.session.execute(usage_query, {"template_key": template.template_key})
|
||||
stats = result.fetchone()
|
||||
|
||||
total = stats.total_uses or 0
|
||||
successful = stats.successful_uses or 0
|
||||
success_rate = (successful / total * 100) if total > 0 else 0
|
||||
|
||||
return {
|
||||
"template_id": template_id,
|
||||
"template_key": template.template_key,
|
||||
"total_uses": total,
|
||||
"successful_uses": successful,
|
||||
"failed_uses": stats.failed_uses or 0,
|
||||
"success_rate_percent": round(success_rate, 2),
|
||||
"uses_last_30_days": stats.uses_last_30_days or 0,
|
||||
"first_used": stats.first_used.isoformat() if stats.first_used else None,
|
||||
"last_used": stats.last_used.isoformat() if stats.last_used else None
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get template usage statistics",
|
||||
template_id=template_id,
|
||||
error=str(e))
|
||||
return {
|
||||
"template_id": template_id,
|
||||
"error": str(e)
|
||||
}
|
||||
|
||||
async def duplicate_template(
|
||||
self,
|
||||
template_id: str,
|
||||
new_template_key: str,
|
||||
new_name: str,
|
||||
tenant_id: str = None
|
||||
) -> Optional[NotificationTemplate]:
|
||||
"""Duplicate an existing template"""
|
||||
try:
|
||||
original_template = await self.get_by_id(template_id)
|
||||
if not original_template:
|
||||
return None
|
||||
|
||||
# Check if new template key already exists
|
||||
existing_template = await self.get_by_template_key(new_template_key)
|
||||
if existing_template:
|
||||
raise DuplicateRecordError(f"Template key {new_template_key} already exists")
|
||||
|
||||
# Create duplicate template data
|
||||
duplicate_data = {
|
||||
"template_key": new_template_key,
|
||||
"name": new_name,
|
||||
"description": f"Copy of {original_template.name}",
|
||||
"category": original_template.category,
|
||||
"type": original_template.type,
|
||||
"subject_template": original_template.subject_template,
|
||||
"body_template": original_template.body_template,
|
||||
"html_template": original_template.html_template,
|
||||
"language": original_template.language,
|
||||
"default_priority": original_template.default_priority,
|
||||
"required_variables": original_template.required_variables,
|
||||
"tenant_id": tenant_id,
|
||||
"is_active": True,
|
||||
"is_system": False # Duplicates are never system templates
|
||||
}
|
||||
|
||||
duplicated_template = await self.create(duplicate_data)
|
||||
|
||||
logger.info("Template duplicated successfully",
|
||||
original_template_id=template_id,
|
||||
new_template_id=duplicated_template.id,
|
||||
new_template_key=new_template_key)
|
||||
|
||||
return duplicated_template
|
||||
|
||||
except DuplicateRecordError:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Failed to duplicate template",
|
||||
template_id=template_id,
|
||||
new_template_key=new_template_key,
|
||||
error=str(e))
|
||||
raise DatabaseError(f"Failed to duplicate template: {str(e)}")
|
||||
|
||||
async def get_system_templates(self) -> List[NotificationTemplate]:
|
||||
"""Get all system templates"""
|
||||
try:
|
||||
return await self.get_multi(
|
||||
filters={"is_system": True, "is_active": True},
|
||||
order_by="category"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error("Failed to get system templates", error=str(e))
|
||||
return []
|
||||
|
||||
async def get_tenant_templates(self, tenant_id: str) -> List[NotificationTemplate]:
|
||||
"""Get all templates for a specific tenant"""
|
||||
try:
|
||||
return await self.get_multi(
|
||||
filters={"tenant_id": tenant_id, "is_active": True},
|
||||
order_by="category"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error("Failed to get tenant templates",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
return []
|
||||
@@ -0,0 +1,23 @@
|
||||
"""
|
||||
Notification Service Layer
|
||||
Business logic services for notification operations
|
||||
"""
|
||||
|
||||
from .notification_service import NotificationService, EnhancedNotificationService
|
||||
from .email_service import EmailService
|
||||
from .whatsapp_service import WhatsAppService
|
||||
from .messaging import (
|
||||
publish_notification_sent,
|
||||
publish_notification_failed,
|
||||
publish_notification_delivered
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"NotificationService",
|
||||
"EnhancedNotificationService",
|
||||
"EmailService",
|
||||
"WhatsAppService",
|
||||
"publish_notification_sent",
|
||||
"publish_notification_failed",
|
||||
"publish_notification_delivered"
|
||||
]
|
||||
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user