Fix few issues

This commit is contained in:
Urtzi Alfaro
2025-09-26 12:12:17 +02:00
parent d573c38621
commit a27f159e24
32 changed files with 2694 additions and 575 deletions

View File

@@ -239,15 +239,18 @@ class BaseAlertService:
# Publishing (Updated for type)
async def publish_item(self, tenant_id: UUID, item: Dict[str, Any], item_type: str = 'alert'):
"""Publish alert or recommendation to RabbitMQ with deduplication"""
try:
# Check for duplicate
item_key = f"{tenant_id}:{item_type}:{item['type']}:{item.get('metadata', {}).get('id', '')}"
# Generate proper deduplication key based on alert type and specific identifiers
unique_id = self._generate_unique_identifier(item)
item_key = f"{tenant_id}:{item_type}:{item['type']}:{unique_id}"
if await self.is_duplicate_item(item_key):
logger.debug("Duplicate item skipped",
service=self.config.SERVICE_NAME,
item_type=item_type,
alert_type=item['type'])
logger.debug("Duplicate item skipped",
service=self.config.SERVICE_NAME,
item_type=item_type,
alert_type=item['type'],
dedup_key=item_key)
return False
# Add metadata
@@ -302,12 +305,49 @@ class BaseAlertService:
item_type=item_type)
return False
def _generate_unique_identifier(self, item: Dict[str, Any]) -> str:
"""Generate unique identifier for deduplication based on alert type and content"""
alert_type = item.get('type', '')
metadata = item.get('metadata', {})
# Generate unique identifier based on alert type
if alert_type == 'overstock_warning':
return metadata.get('ingredient_id', '')
elif alert_type == 'critical_stock_shortage' or alert_type == 'low_stock_warning':
return metadata.get('ingredient_id', '')
elif alert_type == 'expired_products':
# For expired products alerts, create hash of all expired item IDs
expired_items = metadata.get('expired_items', [])
if expired_items:
expired_ids = sorted([str(item.get('id', '')) for item in expired_items])
import hashlib
return hashlib.md5(':'.join(expired_ids).encode()).hexdigest()[:16]
return ''
elif alert_type == 'urgent_expiry':
return f"{metadata.get('ingredient_id', '')}:{metadata.get('stock_id', '')}"
elif alert_type == 'temperature_breach':
return f"{metadata.get('sensor_id', '')}:{metadata.get('location', '')}"
elif alert_type == 'stock_depleted_by_order':
return f"{metadata.get('order_id', '')}:{metadata.get('ingredient_id', '')}"
elif alert_type == 'expired_batches_auto_processed':
# Use processing date and total batches as identifier
processing_date = metadata.get('processing_date', '')[:10] # Date only
total_batches = metadata.get('total_batches_processed', 0)
return f"{processing_date}:{total_batches}"
elif alert_type == 'inventory_optimization':
return f"opt:{metadata.get('ingredient_id', '')}:{metadata.get('recommendation_type', '')}"
elif alert_type == 'waste_reduction':
return f"waste:{metadata.get('ingredient_id', '')}"
else:
# Fallback to generic metadata.id or empty string
return metadata.get('id', '')
async def is_duplicate_item(self, item_key: str, window_minutes: int = 15) -> bool:
"""Prevent duplicate items within time window"""
key = f"item_sent:{item_key}"
try:
result = await self.redis.set(
key, "1",
key, "1",
ex=window_minutes * 60,
nx=True
)