Fix new services implementation 2
This commit is contained in:
@@ -11,13 +11,16 @@ import base64
|
||||
import pandas as pd
|
||||
from typing import Dict, Any, List, Optional, Union
|
||||
from datetime import datetime, timezone
|
||||
from uuid import UUID
|
||||
import structlog
|
||||
import re
|
||||
import asyncio
|
||||
|
||||
from app.repositories.sales_repository import SalesRepository
|
||||
from app.models.sales import SalesData
|
||||
from app.schemas.sales import SalesDataCreate
|
||||
from app.core.database import get_db_transaction
|
||||
from app.services.inventory_client import InventoryServiceClient
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
@@ -79,7 +82,10 @@ class DataImportService:
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize enhanced import service"""
|
||||
pass
|
||||
self.inventory_client = InventoryServiceClient()
|
||||
# Product resolution cache for the import session
|
||||
self.product_cache = {} # product_name -> inventory_product_id
|
||||
self.failed_products = set() # Track products that failed to resolve
|
||||
|
||||
async def validate_import_data(self, data: Dict[str, Any]) -> SalesValidationResult:
|
||||
"""Enhanced validation with better error handling and suggestions"""
|
||||
@@ -349,6 +355,9 @@ class DataImportService:
|
||||
start_time = datetime.utcnow()
|
||||
|
||||
try:
|
||||
# Clear cache for new import session
|
||||
self._clear_import_cache()
|
||||
|
||||
logger.info("Starting enhanced data import",
|
||||
filename=filename,
|
||||
format=file_format,
|
||||
@@ -451,12 +460,24 @@ class DataImportService:
|
||||
warnings.extend(parsed_data.get("warnings", []))
|
||||
continue
|
||||
|
||||
# Resolve product name to inventory_product_id
|
||||
inventory_product_id = await self._resolve_product_to_inventory_id(
|
||||
parsed_data["product_name"],
|
||||
parsed_data.get("product_category"),
|
||||
tenant_id
|
||||
)
|
||||
|
||||
if not inventory_product_id:
|
||||
error_msg = f"Row {index + 1}: Could not resolve product '{parsed_data['product_name']}' to inventory ID"
|
||||
errors.append(error_msg)
|
||||
logger.warning("Product resolution failed", error=error_msg)
|
||||
continue
|
||||
|
||||
# Create sales record with enhanced data
|
||||
sales_data = SalesDataCreate(
|
||||
tenant_id=tenant_id,
|
||||
date=parsed_data["date"],
|
||||
product_name=parsed_data["product_name"],
|
||||
product_category=parsed_data.get("product_category"),
|
||||
inventory_product_id=inventory_product_id,
|
||||
quantity_sold=parsed_data["quantity_sold"],
|
||||
unit_price=parsed_data.get("unit_price"),
|
||||
revenue=parsed_data.get("revenue"),
|
||||
@@ -619,12 +640,24 @@ class DataImportService:
|
||||
warnings.extend(parsed_data.get("warnings", []))
|
||||
continue
|
||||
|
||||
# Resolve product name to inventory_product_id
|
||||
inventory_product_id = await self._resolve_product_to_inventory_id(
|
||||
parsed_data["product_name"],
|
||||
parsed_data.get("product_category"),
|
||||
tenant_id
|
||||
)
|
||||
|
||||
if not inventory_product_id:
|
||||
error_msg = f"Row {index + 1}: Could not resolve product '{parsed_data['product_name']}' to inventory ID"
|
||||
errors.append(error_msg)
|
||||
logger.warning("Product resolution failed", error=error_msg)
|
||||
continue
|
||||
|
||||
# Create enhanced sales record
|
||||
sales_data = SalesDataCreate(
|
||||
tenant_id=tenant_id,
|
||||
date=parsed_data["date"],
|
||||
product_name=parsed_data["product_name"],
|
||||
product_category=parsed_data.get("product_category"),
|
||||
inventory_product_id=inventory_product_id,
|
||||
quantity_sold=parsed_data["quantity_sold"],
|
||||
unit_price=parsed_data.get("unit_price"),
|
||||
revenue=parsed_data.get("revenue"),
|
||||
@@ -874,6 +907,94 @@ class DataImportService:
|
||||
|
||||
return cleaned if cleaned else "Producto sin nombre"
|
||||
|
||||
def _clear_import_cache(self):
|
||||
"""Clear the product resolution cache for a new import session"""
|
||||
self.product_cache.clear()
|
||||
self.failed_products.clear()
|
||||
logger.info("Import cache cleared for new session")
|
||||
|
||||
async def _resolve_product_to_inventory_id(self, product_name: str, product_category: Optional[str], tenant_id: UUID) -> Optional[UUID]:
|
||||
"""Resolve a product name to an inventory_product_id via the inventory service with caching and rate limiting"""
|
||||
|
||||
# Check cache first
|
||||
if product_name in self.product_cache:
|
||||
logger.debug("Product resolved from cache", product_name=product_name, tenant_id=tenant_id)
|
||||
return self.product_cache[product_name]
|
||||
|
||||
# Skip if this product already failed to resolve
|
||||
if product_name in self.failed_products:
|
||||
logger.debug("Skipping previously failed product", product_name=product_name, tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
max_retries = 3
|
||||
base_delay = 1.0 # Start with 1 second delay
|
||||
|
||||
for attempt in range(max_retries):
|
||||
try:
|
||||
# Add delay before API calls to avoid rate limiting
|
||||
if attempt > 0:
|
||||
delay = base_delay * (2 ** (attempt - 1)) # Exponential backoff
|
||||
logger.info(f"Retrying product resolution after {delay}s delay",
|
||||
product_name=product_name, attempt=attempt, tenant_id=tenant_id)
|
||||
await asyncio.sleep(delay)
|
||||
|
||||
# First try to search for existing product by name
|
||||
products = await self.inventory_client.search_products(product_name, tenant_id)
|
||||
|
||||
if products:
|
||||
# Return the first matching product's ID
|
||||
product_id = products[0].get('id')
|
||||
if product_id:
|
||||
uuid_id = UUID(str(product_id))
|
||||
self.product_cache[product_name] = uuid_id # Cache for future use
|
||||
logger.info("Resolved product to existing inventory ID",
|
||||
product_name=product_name, product_id=product_id, tenant_id=tenant_id)
|
||||
return uuid_id
|
||||
|
||||
# Add small delay before creation attempt to avoid hitting rate limits
|
||||
await asyncio.sleep(0.5)
|
||||
|
||||
# If not found, create a new ingredient/product in inventory
|
||||
ingredient_data = {
|
||||
'name': product_name,
|
||||
'type': 'finished_product', # Assuming sales are of finished products
|
||||
'unit': 'unit', # Default unit
|
||||
'current_stock': 0, # No stock initially
|
||||
'reorder_point': 0,
|
||||
'cost_per_unit': 0,
|
||||
'category': product_category or 'general'
|
||||
}
|
||||
|
||||
created_product = await self.inventory_client.create_ingredient(ingredient_data, str(tenant_id))
|
||||
if created_product and created_product.get('id'):
|
||||
product_id = created_product['id']
|
||||
uuid_id = UUID(str(product_id))
|
||||
self.product_cache[product_name] = uuid_id # Cache for future use
|
||||
logger.info("Created new inventory product for sales data",
|
||||
product_name=product_name, product_id=product_id, tenant_id=tenant_id)
|
||||
return uuid_id
|
||||
|
||||
logger.warning("Failed to resolve or create product in inventory",
|
||||
product_name=product_name, tenant_id=tenant_id, attempt=attempt)
|
||||
|
||||
except Exception as e:
|
||||
error_str = str(e)
|
||||
if "429" in error_str or "rate limit" in error_str.lower():
|
||||
logger.warning("Rate limit hit, retrying",
|
||||
product_name=product_name, attempt=attempt, error=error_str, tenant_id=tenant_id)
|
||||
if attempt < max_retries - 1:
|
||||
continue # Retry with exponential backoff
|
||||
else:
|
||||
logger.error("Error resolving product to inventory ID",
|
||||
error=error_str, product_name=product_name, tenant_id=tenant_id)
|
||||
break # Don't retry for non-rate-limit errors
|
||||
|
||||
# If all retries failed, mark as failed and return None
|
||||
self.failed_products.add(product_name)
|
||||
logger.error("Failed to resolve product after all retries",
|
||||
product_name=product_name, tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
def _structure_messages(self, messages: List[Union[str, Dict]]) -> List[Dict[str, Any]]:
|
||||
"""Convert string messages to structured format"""
|
||||
structured = []
|
||||
|
||||
Reference in New Issue
Block a user