Fix new services implementation 1

This commit is contained in:
Urtzi Alfaro
2025-08-13 21:41:00 +02:00
parent 16b8a9d50c
commit 262b3dc9c4
13 changed files with 1702 additions and 1210 deletions

View File

@@ -56,6 +56,28 @@ export interface BusinessModelAnalysis {
recommendations: string[];
}
// Step 1: File validation result
export interface FileValidationResult {
is_valid: boolean;
total_records: number;
unique_products: number;
product_list: string[];
validation_errors: any[];
validation_warnings: any[];
summary: Record<string, any>;
}
// Step 2: AI suggestions result
export interface ProductSuggestionsResult {
suggestions: InventorySuggestion[];
business_model_analysis: BusinessModelAnalysis;
total_products: number;
high_confidence_count: number;
low_confidence_count: number;
processing_time_seconds: number;
}
// Legacy support - will be deprecated
export interface OnboardingAnalysisResult {
total_products_found: number;
inventory_suggestions: InventorySuggestion[];
@@ -143,16 +165,16 @@ export class OnboardingService {
return apiClient.get(`${this.baseEndpoint}/can-access/${stepName}`);
}
// ========== AUTOMATED INVENTORY CREATION METHODS ==========
// ========== NEW 4-STEP AUTOMATED INVENTORY CREATION METHODS ==========
/**
* Phase 1: Analyze sales data and get AI suggestions
* Step 1: Validate file and extract unique products
*/
async analyzeSalesDataForOnboarding(tenantId: string, file: File): Promise<OnboardingAnalysisResult> {
async validateFileAndExtractProducts(tenantId: string, file: File): Promise<FileValidationResult> {
const formData = new FormData();
formData.append('file', file);
return apiClient.post(`/tenants/${tenantId}/onboarding/analyze`, formData, {
return apiClient.post(`/tenants/${tenantId}/onboarding/validate-file`, formData, {
headers: {
'Content-Type': 'multipart/form-data',
},
@@ -160,7 +182,26 @@ export class OnboardingService {
}
/**
* Phase 2: Create inventory from approved suggestions
* Step 2: Generate AI-powered inventory suggestions
*/
async generateInventorySuggestions(
tenantId: string,
file: File,
productList: string[]
): Promise<ProductSuggestionsResult> {
const formData = new FormData();
formData.append('file', file);
formData.append('product_list', JSON.stringify(productList));
return apiClient.post(`/tenants/${tenantId}/onboarding/generate-suggestions`, formData, {
headers: {
'Content-Type': 'multipart/form-data',
},
});
}
/**
* Step 3: Create inventory from approved suggestions
*/
async createInventoryFromSuggestions(
tenantId: string,
@@ -176,7 +217,7 @@ export class OnboardingService {
}
/**
* Phase 3: Import sales data with inventory mapping
* Step 4: Final sales data import with inventory mapping
*/
async importSalesWithInventory(
tenantId: string,
@@ -194,6 +235,35 @@ export class OnboardingService {
});
}
// ========== LEGACY METHODS (for backward compatibility) ==========
/**
* @deprecated Use the new 4-step flow instead
* Phase 1: Analyze sales data and get AI suggestions (OLD METHOD)
*/
async analyzeSalesDataForOnboarding(tenantId: string, file: File): Promise<OnboardingAnalysisResult> {
// This method will use the new flow under the hood for backward compatibility
const validationResult = await this.validateFileAndExtractProducts(tenantId, file);
if (!validationResult.is_valid) {
throw new Error(`File validation failed: ${validationResult.validation_errors.map(e => e.message || e).join(', ')}`);
}
const suggestionsResult = await this.generateInventorySuggestions(tenantId, file, validationResult.product_list);
// Convert to legacy format
return {
total_products_found: suggestionsResult.total_products,
inventory_suggestions: suggestionsResult.suggestions,
business_model_analysis: suggestionsResult.business_model_analysis,
import_job_id: `legacy-${Date.now()}`,
status: 'completed',
processed_rows: validationResult.total_records,
errors: validationResult.validation_errors.map(e => e.message || String(e)),
warnings: validationResult.validation_warnings.map(w => w.message || String(w))
};
}
/**
* Get business model guidance based on analysis
*/

View File

@@ -21,6 +21,8 @@ import {
import toast from 'react-hot-toast';
import {
FileValidationResult,
ProductSuggestionsResult,
OnboardingAnalysisResult,
InventorySuggestion,
BusinessModelAnalysis,
@@ -35,12 +37,13 @@ interface SmartHistoricalDataImportProps {
onBack?: () => void;
}
type ImportPhase = 'upload' | 'analysis' | 'review' | 'creation' | 'import' | 'complete';
type ImportPhase = 'upload' | 'validation' | 'suggestions' | 'review' | 'creation' | 'import' | 'complete';
interface PhaseState {
phase: ImportPhase;
file?: File;
analysisResult?: OnboardingAnalysisResult;
validationResult?: FileValidationResult;
suggestionsResult?: ProductSuggestionsResult;
reviewedSuggestions?: InventorySuggestion[];
creationResult?: InventoryCreationResult;
importResult?: SalesImportResult;
@@ -57,22 +60,56 @@ const SmartHistoricalDataImport: React.FC<SmartHistoricalDataImportProps> = ({
const [showAllSuggestions, setShowAllSuggestions] = useState(false);
const handleFileUpload = useCallback(async (file: File) => {
setState(prev => ({ ...prev, file, phase: 'analysis' }));
setState(prev => ({ ...prev, file, phase: 'validation' }));
setIsProcessing(true);
try {
toast.loading('🧠 Analizando tu archivo con IA...', { id: 'analysis' });
// Step 1: Validate file and extract products
toast.loading('📋 Validando archivo...', { id: 'validation' });
const analysisResult = await onboardingService.analyzeSalesDataForOnboarding(tenantId, file);
const validationResult = await onboardingService.validateFileAndExtractProducts(tenantId, file);
toast.success(`¡Análisis completado! ${analysisResult.total_products_found} productos encontrados`, {
id: 'analysis'
if (!validationResult.is_valid) {
throw new Error(`Archivo inválido: ${validationResult.validation_errors.map(e => e.message || e).join(', ')}`);
}
toast.success(`¡Archivo válido! ${validationResult.unique_products} productos únicos encontrados`, {
id: 'validation'
});
setState(prev => ({ ...prev, validationResult, phase: 'suggestions' }));
// Step 2: Generate AI suggestions
setTimeout(() => handleGenerateSuggestions(file, validationResult.product_list), 1000);
} catch (error: any) {
toast.error('Error al validar el archivo', { id: 'validation' });
setState(prev => ({
...prev,
error: error.message || 'Error de validación',
phase: 'upload'
}));
} finally {
setIsProcessing(false);
}
}, [tenantId]);
const handleGenerateSuggestions = useCallback(async (file: File, productList: string[]) => {
setIsProcessing(true);
try {
toast.loading('🧠 Generando sugerencias con IA...', { id: 'suggestions' });
const suggestionsResult = await onboardingService.generateInventorySuggestions(tenantId, file, productList);
toast.success(`¡${suggestionsResult.total_products} productos clasificados! ${suggestionsResult.high_confidence_count} con alta confianza`, {
id: 'suggestions'
});
setState(prev => ({
...prev,
analysisResult,
reviewedSuggestions: analysisResult.inventory_suggestions.map(s => ({
suggestionsResult,
reviewedSuggestions: suggestionsResult.suggestions.map(s => ({
...s,
user_approved: s.confidence_score >= 0.7
})),
@@ -80,11 +117,11 @@ const SmartHistoricalDataImport: React.FC<SmartHistoricalDataImportProps> = ({
}));
} catch (error: any) {
toast.error('Error al analizar el archivo', { id: 'analysis' });
toast.error('Error al generar sugerencias', { id: 'suggestions' });
setState(prev => ({
...prev,
error: error.message || 'Error desconocido',
phase: 'upload'
error: error.message || 'Error en sugerencias de IA',
phase: 'validation'
}));
} finally {
setIsProcessing(false);
@@ -475,17 +512,17 @@ const SmartHistoricalDataImport: React.FC<SmartHistoricalDataImportProps> = ({
</div>
);
case 'analysis':
case 'validation':
return (
<div className="text-center py-12">
<div className="w-20 h-20 bg-gradient-to-r from-blue-500 to-purple-500 rounded-full flex items-center justify-center mx-auto mb-6 animate-pulse">
<Brain className="w-10 h-10 text-white" />
<div className="w-20 h-20 bg-gradient-to-r from-blue-500 to-green-500 rounded-full flex items-center justify-center mx-auto mb-6 animate-pulse">
<CheckCircle2 className="w-10 h-10 text-white" />
</div>
<h2 className="text-xl font-semibold text-gray-900 mb-3">
🧠 Analizando tu archivo con IA...
📋 Validando archivo...
</h2>
<p className="text-gray-600 mb-6">
Esto puede tomar unos momentos mientras clasificamos tus productos
Verificando formato y extrayendo productos únicos
</p>
<div className="bg-white rounded-lg shadow-sm p-4 max-w-md mx-auto">
<div className="flex items-center justify-between text-sm text-gray-600">
@@ -493,16 +530,48 @@ const SmartHistoricalDataImport: React.FC<SmartHistoricalDataImportProps> = ({
<span className="font-medium">{state.file?.name}</span>
</div>
<div className="mt-2 bg-gray-200 rounded-full h-2">
<div className="bg-gradient-to-r from-blue-500 to-purple-500 h-2 rounded-full w-1/2 animate-pulse"></div>
<div className="bg-gradient-to-r from-blue-500 to-green-500 h-2 rounded-full w-1/3 animate-pulse"></div>
</div>
<div className="mt-2 text-xs text-gray-500">Paso 1 de 4: Validación</div>
</div>
</div>
);
case 'suggestions':
return (
<div className="text-center py-12">
<div className="w-20 h-20 bg-gradient-to-r from-purple-500 to-pink-500 rounded-full flex items-center justify-center mx-auto mb-6 animate-pulse">
<Brain className="w-10 h-10 text-white" />
</div>
<h2 className="text-xl font-semibold text-gray-900 mb-3">
🧠 Generando sugerencias con IA...
</h2>
<p className="text-gray-600 mb-6">
Clasificando productos y analizando tu modelo de negocio
</p>
<div className="bg-white rounded-lg shadow-sm p-4 max-w-md mx-auto">
{state.validationResult && (
<div className="mb-4">
<div className="flex items-center justify-center space-x-2 text-green-600 mb-2">
<CheckCircle2 className="w-4 h-4" />
<span className="text-sm font-medium">
{state.validationResult.unique_products} productos únicos encontrados
</span>
</div>
</div>
)}
<div className="bg-gray-200 rounded-full h-2">
<div className="bg-gradient-to-r from-purple-500 to-pink-500 h-2 rounded-full w-2/3 animate-pulse"></div>
</div>
<div className="mt-2 text-xs text-gray-500">Paso 2 de 4: Clasificación IA</div>
</div>
</div>
);
case 'review':
if (!state.analysisResult) return null;
if (!state.suggestionsResult) return null;
const { analysisResult, reviewedSuggestions } = state;
const { suggestionsResult, reviewedSuggestions } = state;
const approvedCount = reviewedSuggestions?.filter(s => s.user_approved).length || 0;
const highConfidenceCount = reviewedSuggestions?.filter(s => s.confidence_score >= 0.7).length || 0;
const visibleSuggestions = showAllSuggestions
@@ -519,12 +588,15 @@ const SmartHistoricalDataImport: React.FC<SmartHistoricalDataImportProps> = ({
¡Análisis Completado! 🎉
</h2>
<p className="text-gray-600">
Hemos encontrado <strong>{analysisResult.total_products_found} productos</strong> y
Hemos encontrado <strong>{suggestionsResult.total_products} productos</strong> y
sugerimos <strong>{approvedCount} para tu inventario</strong>
</p>
<div className="mt-2 text-sm text-gray-500">
Procesado en {suggestionsResult.processing_time_seconds.toFixed(1)}s
</div>
</div>
{renderBusinessModelInsight(analysisResult.business_model_analysis)}
{renderBusinessModelInsight(suggestionsResult.business_model_analysis)}
<div className="bg-white border rounded-xl p-6">
<div className="flex items-center justify-between mb-4">
@@ -579,15 +651,15 @@ const SmartHistoricalDataImport: React.FC<SmartHistoricalDataImportProps> = ({
{visibleSuggestions?.map(renderSuggestionCard)}
</div>
{analysisResult.warnings.length > 0 && (
{state.validationResult?.validation_warnings && state.validationResult.validation_warnings.length > 0 && (
<div className="bg-amber-50 border border-amber-200 rounded-lg p-4 mb-4">
<div className="flex">
<AlertTriangle className="h-5 w-5 text-amber-400" />
<div className="ml-3">
<h4 className="text-sm font-medium text-amber-800">Advertencias</h4>
<h4 className="text-sm font-medium text-amber-800">Advertencias de Validación</h4>
<ul className="mt-2 text-sm text-amber-700 space-y-1">
{analysisResult.warnings.map((warning, idx) => (
<li key={idx}> {warning}</li>
{state.validationResult.validation_warnings.map((warning, idx) => (
<li key={idx}> {warning.message || warning}</li>
))}
</ul>
</div>
@@ -630,6 +702,9 @@ const SmartHistoricalDataImport: React.FC<SmartHistoricalDataImportProps> = ({
case 'creation':
case 'import':
const isCreating = state.phase === 'creation';
const stepNumber = isCreating ? 3 : 4;
const stepProgress = isCreating ? 75 : 90;
return (
<div className="text-center py-12">
<div className="w-20 h-20 bg-gradient-to-r from-green-400 to-blue-500 rounded-full flex items-center justify-center mx-auto mb-6 animate-pulse">
@@ -662,11 +737,17 @@ const SmartHistoricalDataImport: React.FC<SmartHistoricalDataImportProps> = ({
)}
<div className="bg-gray-200 rounded-full h-3">
<div className="bg-gradient-to-r from-green-400 to-blue-500 h-3 rounded-full w-3/4 animate-pulse"></div>
<div
className="bg-gradient-to-r from-green-400 to-blue-500 h-3 rounded-full animate-pulse transition-all duration-500"
style={{ width: `${stepProgress}%` }}
/>
</div>
<p className="text-sm text-gray-500 mt-2">
<div className="flex justify-between items-center mt-2">
<p className="text-sm text-gray-500">
{isCreating ? 'Creando inventario...' : 'Procesando importación final...'}
</p>
<span className="text-xs text-gray-400">Paso {stepNumber} de 4</span>
</div>
</div>
</div>
);

View File

@@ -5,7 +5,7 @@ import {
Filter,
Download,
Upload,
Grid3X3,
LayoutGrid,
List,
Package,
TrendingDown,
@@ -143,6 +143,14 @@ const InventoryPage: React.FC = () => {
console.log('View details:', item);
};
// Handle view item by ID (for alerts)
const handleViewItemById = (itemId: string) => {
const item = items.find(item => item.id === itemId);
if (item) {
handleItemViewDetails(item);
}
};
// Handle alert acknowledgment
const handleAcknowledgeAlert = async (alertId: string) => {
await acknowledgeAlert(alertId);
@@ -302,7 +310,7 @@ const InventoryPage: React.FC = () => {
: 'bg-white text-gray-600 hover:bg-gray-50'
}`}
>
<Grid3X3 className="w-4 h-4" />
<LayoutGrid className="w-4 h-4" />
</button>
<button
onClick={() => setViewMode('list')}
@@ -529,7 +537,7 @@ const InventoryPage: React.FC = () => {
alerts={alerts}
onAcknowledge={handleAcknowledgeAlert}
onAcknowledgeAll={handleBulkAcknowledgeAlerts}
onViewItem={handleItemViewDetails}
onViewItem={handleViewItemById}
/>
</div>
)}

View File

@@ -4,7 +4,7 @@ import {
Search,
Plus,
Filter,
Grid3X3,
LayoutGrid,
List,
ChefHat,
TrendingUp,
@@ -304,7 +304,7 @@ const RecipesPage: React.FC = () => {
: 'bg-white text-gray-600 hover:bg-gray-50'
}`}
>
<Grid3X3 className="w-4 h-4" />
<LayoutGrid className="w-4 h-4" />
</button>
<button
onClick={() => setViewMode('list')}

View File

@@ -17,7 +17,7 @@ from app.core.service_discovery import ServiceDiscovery
from app.middleware.auth import AuthMiddleware
from app.middleware.logging import LoggingMiddleware
from app.middleware.rate_limit import RateLimitMiddleware
from app.routes import auth, tenant, notification, nominatim, user
from app.routes import auth, tenant, notification, nominatim, user, inventory
from shared.monitoring.logging import setup_logging
from shared.monitoring.metrics import MetricsCollector
@@ -60,6 +60,7 @@ app.include_router(user.router, prefix="/api/v1/users", tags=["users"])
app.include_router(tenant.router, prefix="/api/v1/tenants", tags=["tenants"])
app.include_router(notification.router, prefix="/api/v1/notifications", tags=["notifications"])
app.include_router(nominatim.router, prefix="/api/v1/nominatim", tags=["location"])
app.include_router(inventory.router, prefix="/api/v1/inventory", tags=["inventory"])
@app.on_event("startup")
async def startup_event():

View File

@@ -0,0 +1,216 @@
# gateway/app/routes/inventory.py
"""
Inventory routes for API Gateway - Handles inventory management endpoints
"""
from fastapi import APIRouter, Request, Response, HTTPException, Path
from fastapi.responses import JSONResponse
import httpx
import logging
from typing import Optional
from app.core.config import settings
logger = logging.getLogger(__name__)
router = APIRouter()
# Inventory service URL - add to settings
INVENTORY_SERVICE_URL = "http://inventory-service:8000"
# ================================================================
# TENANT-SCOPED INVENTORY ENDPOINTS
# ================================================================
@router.api_route("/{tenant_id}/inventory/ingredients{path:path}", methods=["GET", "POST", "PUT", "DELETE", "OPTIONS"])
async def proxy_tenant_ingredients(request: Request, tenant_id: str = Path(...), path: str = ""):
"""Proxy tenant ingredient requests to inventory service"""
base_path = f"/api/v1/ingredients"
# If path is empty or just "/", use base path
if not path or path == "/" or path == "":
target_path = base_path
else:
# Ensure path starts with "/"
if not path.startswith("/"):
path = "/" + path
target_path = base_path + path
return await _proxy_to_inventory_service(request, target_path)
@router.api_route("/{tenant_id}/inventory/stock{path:path}", methods=["GET", "POST", "PUT", "DELETE", "OPTIONS"])
async def proxy_tenant_stock(request: Request, tenant_id: str = Path(...), path: str = ""):
"""Proxy tenant stock requests to inventory service"""
base_path = f"/api/v1/stock"
# If path is empty or just "/", use base path
if not path or path == "/" or path == "":
target_path = base_path
else:
# Ensure path starts with "/"
if not path.startswith("/"):
path = "/" + path
target_path = base_path + path
return await _proxy_to_inventory_service(request, target_path)
@router.api_route("/{tenant_id}/inventory/alerts{path:path}", methods=["GET", "POST", "PUT", "DELETE", "OPTIONS"])
async def proxy_tenant_alerts(request: Request, tenant_id: str = Path(...), path: str = ""):
"""Proxy tenant inventory alert requests to inventory service"""
base_path = f"/api/v1/alerts"
# If path is empty or just "/", use base path
if not path or path == "/" or path == "":
target_path = base_path
else:
# Ensure path starts with "/"
if not path.startswith("/"):
path = "/" + path
target_path = base_path + path
return await _proxy_to_inventory_service(request, target_path)
@router.api_route("/{tenant_id}/inventory/dashboard{path:path}", methods=["GET", "OPTIONS"])
async def proxy_tenant_inventory_dashboard(request: Request, tenant_id: str = Path(...), path: str = ""):
"""Proxy tenant inventory dashboard requests to inventory service"""
base_path = f"/api/v1/dashboard"
# If path is empty or just "/", use base path
if not path or path == "/" or path == "":
target_path = base_path
else:
# Ensure path starts with "/"
if not path.startswith("/"):
path = "/" + path
target_path = base_path + path
return await _proxy_to_inventory_service(request, target_path)
# ================================================================
# DIRECT INVENTORY ENDPOINTS (for backward compatibility)
# ================================================================
@router.api_route("/ingredients{path:path}", methods=["GET", "POST", "PUT", "DELETE", "OPTIONS"])
async def proxy_ingredients(request: Request, path: str = ""):
"""Proxy ingredient requests to inventory service"""
base_path = f"/api/v1/ingredients"
if not path or path == "/" or path == "":
target_path = base_path
else:
if not path.startswith("/"):
path = "/" + path
target_path = base_path + path
return await _proxy_to_inventory_service(request, target_path)
@router.api_route("/stock{path:path}", methods=["GET", "POST", "PUT", "DELETE", "OPTIONS"])
async def proxy_stock(request: Request, path: str = ""):
"""Proxy stock requests to inventory service"""
base_path = f"/api/v1/stock"
if not path or path == "/" or path == "":
target_path = base_path
else:
if not path.startswith("/"):
path = "/" + path
target_path = base_path + path
return await _proxy_to_inventory_service(request, target_path)
@router.api_route("/alerts{path:path}", methods=["GET", "POST", "PUT", "DELETE", "OPTIONS"])
async def proxy_alerts(request: Request, path: str = ""):
"""Proxy inventory alert requests to inventory service"""
base_path = f"/api/v1/alerts"
if not path or path == "/" or path == "":
target_path = base_path
else:
if not path.startswith("/"):
path = "/" + path
target_path = base_path + path
return await _proxy_to_inventory_service(request, target_path)
# ================================================================
# PROXY HELPER FUNCTION
# ================================================================
async def _proxy_to_inventory_service(request: Request, target_path: str):
"""Proxy request to inventory service with enhanced error handling"""
# Handle OPTIONS requests directly for CORS
if request.method == "OPTIONS":
return Response(
status_code=200,
headers={
"Access-Control-Allow-Origin": "*",
"Access-Control-Allow-Methods": "GET, POST, PUT, DELETE, OPTIONS",
"Access-Control-Allow-Headers": "Content-Type, Authorization, X-Tenant-ID",
"Access-Control-Allow-Credentials": "true",
"Access-Control-Max-Age": "86400"
}
)
try:
url = f"{INVENTORY_SERVICE_URL}{target_path}"
# Forward headers and add user/tenant context
headers = dict(request.headers)
headers.pop("host", None)
# Get request body if present
body = None
if request.method in ["POST", "PUT", "PATCH"]:
body = await request.body()
# Add query parameters
params = dict(request.query_params)
timeout_config = httpx.Timeout(
connect=30.0, # Connection timeout
read=600.0, # Read timeout: 10 minutes
write=30.0, # Write timeout
pool=30.0 # Pool timeout
)
async with httpx.AsyncClient(timeout=timeout_config) as client:
response = await client.request(
method=request.method,
url=url,
headers=headers,
content=body,
params=params
)
# Handle different response types
if response.headers.get("content-type", "").startswith("application/json"):
try:
content = response.json()
except:
content = {"message": "Invalid JSON response from inventory service"}
else:
content = response.text
return JSONResponse(
status_code=response.status_code,
content=content
)
except httpx.ConnectTimeout:
logger.error(f"Connection timeout to inventory service: {INVENTORY_SERVICE_URL}{target_path}")
raise HTTPException(
status_code=503,
detail="Inventory service temporarily unavailable"
)
except httpx.ReadTimeout:
logger.error(f"Read timeout from inventory service: {INVENTORY_SERVICE_URL}{target_path}")
raise HTTPException(
status_code=504,
detail="Inventory service response timeout"
)
except Exception as e:
logger.error(f"Unexpected error proxying to inventory service {INVENTORY_SERVICE_URL}{target_path}: {e}")
raise HTTPException(
status_code=500,
detail="Internal gateway error"
)

View File

@@ -3,40 +3,27 @@
Client for communicating with Inventory Service
"""
import httpx
import logging
from typing import List, Optional, Dict, Any
from uuid import UUID
from shared.clients.inventory_client import InventoryServiceClient as SharedInventoryClient
from ..core.config import settings
logger = logging.getLogger(__name__)
class InventoryClient:
"""Client for inventory service communication"""
"""Client for inventory service communication via shared client"""
def __init__(self):
self.base_url = settings.INVENTORY_SERVICE_URL
self.timeout = 30.0
self._shared_client = SharedInventoryClient(settings)
async def get_ingredient_by_id(self, tenant_id: UUID, ingredient_id: UUID) -> Optional[Dict[str, Any]]:
"""Get ingredient details from inventory service"""
try:
async with httpx.AsyncClient(timeout=self.timeout) as client:
response = await client.get(
f"{self.base_url}/api/v1/ingredients/{ingredient_id}",
headers={"X-Tenant-ID": str(tenant_id)}
)
if response.status_code == 200:
return response.json()
elif response.status_code == 404:
return None
else:
logger.error(f"Failed to get ingredient {ingredient_id}: {response.status_code}")
return None
result = await self._shared_client.get_ingredient_by_id(ingredient_id, str(tenant_id))
return result
except Exception as e:
logger.error(f"Error getting ingredient {ingredient_id}: {e}")
return None
@@ -44,19 +31,13 @@ class InventoryClient:
async def get_ingredients_by_ids(self, tenant_id: UUID, ingredient_ids: List[UUID]) -> List[Dict[str, Any]]:
"""Get multiple ingredients by IDs"""
try:
async with httpx.AsyncClient(timeout=self.timeout) as client:
response = await client.post(
f"{self.base_url}/api/v1/ingredients/batch",
headers={"X-Tenant-ID": str(tenant_id)},
json={"ingredient_ids": [str(id) for id in ingredient_ids]}
)
if response.status_code == 200:
return response.json()
else:
logger.error(f"Failed to get ingredients batch: {response.status_code}")
return []
# For now, get ingredients individually - could be optimized with batch endpoint
results = []
for ingredient_id in ingredient_ids:
ingredient = await self._shared_client.get_ingredient_by_id(ingredient_id, str(tenant_id))
if ingredient:
results.append(ingredient)
return results
except Exception as e:
logger.error(f"Error getting ingredients batch: {e}")
return []
@@ -64,20 +45,16 @@ class InventoryClient:
async def get_ingredient_stock_level(self, tenant_id: UUID, ingredient_id: UUID) -> Optional[Dict[str, Any]]:
"""Get current stock level for ingredient"""
try:
async with httpx.AsyncClient(timeout=self.timeout) as client:
response = await client.get(
f"{self.base_url}/api/v1/stock/ingredient/{ingredient_id}",
headers={"X-Tenant-ID": str(tenant_id)}
)
if response.status_code == 200:
return response.json()
elif response.status_code == 404:
stock_entries = await self._shared_client.get_ingredient_stock(ingredient_id, str(tenant_id))
if stock_entries:
# Calculate total available stock from all entries
total_stock = sum(entry.get('available_quantity', 0) for entry in stock_entries)
return {
'ingredient_id': str(ingredient_id),
'total_available': total_stock,
'stock_entries': stock_entries
}
return None
else:
logger.error(f"Failed to get stock level for {ingredient_id}: {response.status_code}")
return None
except Exception as e:
logger.error(f"Error getting stock level for {ingredient_id}: {e}")
return None
@@ -114,22 +91,18 @@ class InventoryClient:
) -> Dict[str, Any]:
"""Record ingredient consumption for production"""
try:
async with httpx.AsyncClient(timeout=self.timeout) as client:
response = await client.post(
f"{self.base_url}/api/v1/stock/consume",
headers={"X-Tenant-ID": str(tenant_id)},
json={
consumption_data = {
"consumptions": consumptions,
"reference_number": str(production_batch_id),
"movement_type": "production_use"
}
)
if response.status_code == 200:
return {"success": True, "data": response.json()}
result = await self._shared_client.consume_stock(consumption_data, str(tenant_id))
if result:
return {"success": True, "data": result}
else:
logger.error(f"Failed to consume ingredients: {response.status_code}")
return {"success": False, "error": response.text}
return {"success": False, "error": "Failed to consume ingredients"}
except Exception as e:
logger.error(f"Error consuming ingredients: {e}")
@@ -142,18 +115,12 @@ class InventoryClient:
) -> Dict[str, Any]:
"""Add finished product to inventory after production"""
try:
async with httpx.AsyncClient(timeout=self.timeout) as client:
response = await client.post(
f"{self.base_url}/api/v1/stock/add",
headers={"X-Tenant-ID": str(tenant_id)},
json=product_data
)
result = await self._shared_client.receive_stock(product_data, str(tenant_id))
if response.status_code == 200:
return {"success": True, "data": response.json()}
if result:
return {"success": True, "data": result}
else:
logger.error(f"Failed to add finished product: {response.status_code}")
return {"success": False, "error": response.text}
return {"success": False, "error": "Failed to add finished product"}
except Exception as e:
logger.error(f"Error adding finished product: {e}")

View File

@@ -10,11 +10,12 @@ from uuid import UUID
from pydantic import BaseModel, Field
import structlog
from app.services.onboarding_import_service import (
OnboardingImportService,
from app.services.ai_onboarding_service import (
AIOnboardingService,
OnboardingValidationResult,
ProductSuggestionsResult,
OnboardingImportResult,
InventoryCreationRequest,
get_onboarding_import_service
get_ai_onboarding_service
)
from shared.auth.decorators import get_current_user_dep, get_current_tenant_id_dep
@@ -22,16 +23,6 @@ router = APIRouter(tags=["onboarding"])
logger = structlog.get_logger()
class OnboardingAnalysisResponse(BaseModel):
"""Response for onboarding analysis"""
total_products_found: int
inventory_suggestions: List[Dict[str, Any]]
business_model_analysis: Dict[str, Any]
import_job_id: str
status: str
processed_rows: int
errors: List[str]
warnings: List[str]
class InventoryApprovalRequest(BaseModel):
@@ -58,23 +49,22 @@ class SalesImportResponse(BaseModel):
warnings: List[str]
@router.post("/tenants/{tenant_id}/onboarding/analyze", response_model=OnboardingAnalysisResponse)
async def analyze_onboarding_data(
@router.post("/tenants/{tenant_id}/onboarding/validate-file", response_model=FileValidationResponse)
async def validate_onboarding_file(
file: UploadFile = File(..., description="Sales data CSV/Excel file"),
tenant_id: UUID = Path(..., description="Tenant ID"),
current_tenant: str = Depends(get_current_tenant_id_dep),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
onboarding_service: OnboardingImportService = Depends(get_onboarding_import_service)
onboarding_service: AIOnboardingService = Depends(get_ai_onboarding_service)
):
"""
Step 1: Analyze uploaded sales data and suggest inventory items
Step 1: Validate uploaded file and extract unique products
This endpoint:
1. Parses the uploaded sales file
2. Extracts unique products and sales metrics
3. Uses AI to classify products and suggest inventory items
4. Analyzes business model (production vs retail)
5. Returns suggestions for user review
1. Validates the file format and content
2. Checks for required columns (date, product, etc.)
3. Extracts unique products from sales data
4. Returns validation results and product list
"""
try:
# Verify tenant access
@@ -89,34 +79,42 @@ async def analyze_onboarding_data(
if not any(file.filename.lower().endswith(ext) for ext in allowed_extensions):
raise HTTPException(status_code=400, detail=f"Unsupported file format. Allowed: {allowed_extensions}")
# Determine file format
file_format = "csv" if file.filename.lower().endswith('.csv') else "excel"
# Read file content
file_content = await file.read()
if not file_content:
raise HTTPException(status_code=400, detail="File is empty")
# Analyze the data
result = await onboarding_service.analyze_sales_data_for_onboarding(
file_content=file_content,
filename=file.filename,
tenant_id=tenant_id,
user_id=UUID(current_user['user_id'])
# Convert bytes to string for CSV
if file_format == "csv":
file_data = file_content.decode('utf-8')
else:
import base64
file_data = base64.b64encode(file_content).decode('utf-8')
# Validate and extract products
result = await onboarding_service.validate_and_extract_products(
file_data=file_data,
file_format=file_format,
tenant_id=tenant_id
)
response = OnboardingAnalysisResponse(
total_products_found=result.total_products_found,
inventory_suggestions=result.inventory_suggestions,
business_model_analysis=result.business_model_analysis,
import_job_id=str(result.import_job_id),
status=result.status,
processed_rows=result.processed_rows,
errors=result.errors,
warnings=result.warnings
response = FileValidationResponse(
is_valid=result.is_valid,
total_records=result.total_records,
unique_products=result.unique_products,
product_list=result.product_list,
validation_errors=result.validation_details.errors,
validation_warnings=result.validation_details.warnings,
summary=result.summary
)
logger.info("Onboarding analysis complete",
logger.info("File validation complete",
filename=file.filename,
products_found=result.total_products_found,
business_model=result.business_model_analysis.get('model'),
is_valid=result.is_valid,
unique_products=result.unique_products,
tenant_id=tenant_id)
return response
@@ -124,9 +122,120 @@ async def analyze_onboarding_data(
except HTTPException:
raise
except Exception as e:
logger.error("Failed onboarding analysis",
logger.error("Failed file validation",
error=str(e), filename=file.filename if file else None, tenant_id=tenant_id)
raise HTTPException(status_code=500, detail=f"Analysis failed: {str(e)}")
raise HTTPException(status_code=500, detail=f"Validation failed: {str(e)}")
@router.post("/tenants/{tenant_id}/onboarding/generate-suggestions", response_model=ProductSuggestionsResponse)
async def generate_inventory_suggestions(
file: UploadFile = File(..., description="Same sales data file from step 1"),
product_list: str = Form(..., description="JSON array of product names to classify"),
tenant_id: UUID = Path(..., description="Tenant ID"),
current_tenant: str = Depends(get_current_tenant_id_dep),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
onboarding_service: AIOnboardingService = Depends(get_ai_onboarding_service)
):
"""
Step 2: Generate AI-powered inventory suggestions
This endpoint:
1. Takes the validated file and product list from step 1
2. Uses AI to classify products into inventory categories
3. Analyzes business model (production vs retail)
4. Returns detailed suggestions for user review
"""
try:
# Verify tenant access
if str(tenant_id) != current_tenant:
raise HTTPException(status_code=403, detail="Access denied to this tenant")
# Parse product list
import json
try:
products = json.loads(product_list)
except json.JSONDecodeError as e:
raise HTTPException(status_code=400, detail=f"Invalid product list format: {str(e)}")
if not products:
raise HTTPException(status_code=400, detail="No products provided")
# Determine file format
file_format = "csv" if file.filename.lower().endswith('.csv') else "excel"
# Read file content
file_content = await file.read()
if not file_content:
raise HTTPException(status_code=400, detail="File is empty")
# Convert bytes to string for CSV
if file_format == "csv":
file_data = file_content.decode('utf-8')
else:
import base64
file_data = base64.b64encode(file_content).decode('utf-8')
# Generate suggestions
result = await onboarding_service.generate_inventory_suggestions(
product_list=products,
file_data=file_data,
file_format=file_format,
tenant_id=tenant_id
)
# Convert suggestions to dict format
suggestions_dict = []
for suggestion in result.suggestions:
suggestion_dict = {
"suggestion_id": suggestion.suggestion_id,
"original_name": suggestion.original_name,
"suggested_name": suggestion.suggested_name,
"product_type": suggestion.product_type,
"category": suggestion.category,
"unit_of_measure": suggestion.unit_of_measure,
"confidence_score": suggestion.confidence_score,
"estimated_shelf_life_days": suggestion.estimated_shelf_life_days,
"requires_refrigeration": suggestion.requires_refrigeration,
"requires_freezing": suggestion.requires_freezing,
"is_seasonal": suggestion.is_seasonal,
"suggested_supplier": suggestion.suggested_supplier,
"notes": suggestion.notes,
"sales_data": suggestion.sales_data
}
suggestions_dict.append(suggestion_dict)
business_model_dict = {
"model": result.business_model_analysis.model,
"confidence": result.business_model_analysis.confidence,
"ingredient_count": result.business_model_analysis.ingredient_count,
"finished_product_count": result.business_model_analysis.finished_product_count,
"ingredient_ratio": result.business_model_analysis.ingredient_ratio,
"recommendations": result.business_model_analysis.recommendations
}
response = ProductSuggestionsResponse(
suggestions=suggestions_dict,
business_model_analysis=business_model_dict,
total_products=result.total_products,
high_confidence_count=result.high_confidence_count,
low_confidence_count=result.low_confidence_count,
processing_time_seconds=result.processing_time_seconds
)
logger.info("AI suggestions generated",
total_products=result.total_products,
business_model=result.business_model_analysis.model,
high_confidence=result.high_confidence_count,
tenant_id=tenant_id)
return response
except HTTPException:
raise
except Exception as e:
logger.error("Failed to generate suggestions",
error=str(e), tenant_id=tenant_id)
raise HTTPException(status_code=500, detail=f"Suggestion generation failed: {str(e)}")
@router.post("/tenants/{tenant_id}/onboarding/create-inventory", response_model=InventoryCreationResponse)
@@ -135,16 +244,16 @@ async def create_inventory_from_suggestions(
tenant_id: UUID = Path(..., description="Tenant ID"),
current_tenant: str = Depends(get_current_tenant_id_dep),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
onboarding_service: OnboardingImportService = Depends(get_onboarding_import_service)
onboarding_service: AIOnboardingService = Depends(get_ai_onboarding_service)
):
"""
Step 2: Create inventory items from approved suggestions
Step 3: Create inventory items from approved suggestions
This endpoint:
1. Takes user-approved inventory suggestions
2. Applies any user modifications
1. Takes user-approved inventory suggestions from step 2
2. Applies any user modifications to suggestions
3. Creates inventory items via inventory service
4. Returns creation results
4. Returns creation results for final import step
"""
try:
# Verify tenant access
@@ -154,18 +263,9 @@ async def create_inventory_from_suggestions(
if not request.suggestions:
raise HTTPException(status_code=400, detail="No suggestions provided")
# Convert to internal format
approval_requests = []
for suggestion in request.suggestions:
approval_requests.append(InventoryCreationRequest(
suggestion_id=suggestion.get('suggestion_id'),
approved=suggestion.get('approved', False),
modifications=suggestion.get('modifications', {})
))
# Create inventory items
# Create inventory items using new service
result = await onboarding_service.create_inventory_from_suggestions(
suggestions_approval=approval_requests,
approved_suggestions=request.suggestions,
tenant_id=tenant_id,
user_id=UUID(current_user['user_id'])
)
@@ -199,16 +299,16 @@ async def import_sales_with_inventory(
tenant_id: UUID = Path(..., description="Tenant ID"),
current_tenant: str = Depends(get_current_tenant_id_dep),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
onboarding_service: OnboardingImportService = Depends(get_onboarding_import_service)
onboarding_service: AIOnboardingService = Depends(get_ai_onboarding_service)
):
"""
Step 3: Import sales data using created inventory items
Step 4: Final sales data import using created inventory items
This endpoint:
1. Takes the same sales file from step 1
2. Uses the inventory mapping from step 2
3. Imports sales records with proper inventory product references
4. Returns import results
1. Takes the same validated sales file from step 1
2. Uses the inventory mapping from step 3
3. Imports sales records using detailed processing from DataImportService
4. Returns final import results - onboarding complete!
"""
try:
# Verify tenant access
@@ -223,41 +323,51 @@ async def import_sales_with_inventory(
import json
try:
mapping = json.loads(inventory_mapping)
# Convert string UUIDs to UUID objects
inventory_mapping_uuids = {
product_name: UUID(inventory_id)
# Convert to string mapping for the new service
inventory_mapping_dict = {
product_name: str(inventory_id)
for product_name, inventory_id in mapping.items()
}
except (json.JSONDecodeError, ValueError) as e:
except json.JSONDecodeError as e:
raise HTTPException(status_code=400, detail=f"Invalid inventory mapping format: {str(e)}")
# Determine file format
file_format = "csv" if file.filename.lower().endswith('.csv') else "excel"
# Read file content
file_content = await file.read()
if not file_content:
raise HTTPException(status_code=400, detail="File is empty")
# Import sales data
# Convert bytes to string for CSV
if file_format == "csv":
file_data = file_content.decode('utf-8')
else:
import base64
file_data = base64.b64encode(file_content).decode('utf-8')
# Import sales data using new service
result = await onboarding_service.import_sales_data_with_inventory(
file_content=file_content,
filename=file.filename,
file_data=file_data,
file_format=file_format,
inventory_mapping=inventory_mapping_dict,
tenant_id=tenant_id,
user_id=UUID(current_user['user_id']),
inventory_mapping=inventory_mapping_uuids
filename=file.filename
)
response = SalesImportResponse(
import_job_id=str(result.import_job_id),
status=result.status,
processed_rows=result.processed_rows,
successful_imports=result.successful_imports,
failed_imports=result.failed_imports,
errors=result.errors,
warnings=result.warnings
import_job_id="onboarding-" + str(tenant_id), # Generate a simple job ID
status="completed" if result.success else "failed",
processed_rows=result.import_details.records_processed,
successful_imports=result.import_details.records_created,
failed_imports=result.import_details.records_failed,
errors=[error.get("message", str(error)) for error in result.import_details.errors],
warnings=[warning.get("message", str(warning)) for warning in result.import_details.warnings]
)
logger.info("Sales import complete",
successful=result.successful_imports,
failed=result.failed_imports,
successful=result.import_details.records_created,
failed=result.import_details.records_failed,
filename=file.filename,
tenant_id=tenant_id)

View File

@@ -0,0 +1,627 @@
# services/sales/app/services/ai_onboarding_service.py
"""
AI-Powered Onboarding Service
Handles the complete onboarding flow: File validation -> Product extraction -> Inventory suggestions -> Data processing
"""
import pandas as pd
import structlog
from typing import List, Dict, Any, Optional
from uuid import UUID, uuid4
from dataclasses import dataclass
import asyncio
from app.services.data_import_service import DataImportService, SalesValidationResult, SalesImportResult
from app.services.inventory_client import InventoryServiceClient
from app.core.database import get_db_transaction
logger = structlog.get_logger()
@dataclass
class ProductSuggestion:
"""Single product suggestion from AI classification"""
suggestion_id: str
original_name: str
suggested_name: str
product_type: str
category: str
unit_of_measure: str
confidence_score: float
estimated_shelf_life_days: Optional[int] = None
requires_refrigeration: bool = False
requires_freezing: bool = False
is_seasonal: bool = False
suggested_supplier: Optional[str] = None
notes: Optional[str] = None
sales_data: Optional[Dict[str, Any]] = None
@dataclass
class BusinessModelAnalysis:
"""Business model analysis results"""
model: str # production, retail, hybrid
confidence: float
ingredient_count: int
finished_product_count: int
ingredient_ratio: float
recommendations: List[str]
@dataclass
class OnboardingValidationResult:
"""Result of onboarding file validation step"""
is_valid: bool
total_records: int
unique_products: int
validation_details: SalesValidationResult
product_list: List[str]
summary: Dict[str, Any]
@dataclass
class ProductSuggestionsResult:
"""Result of AI product classification step"""
suggestions: List[ProductSuggestion]
business_model_analysis: BusinessModelAnalysis
total_products: int
high_confidence_count: int
low_confidence_count: int
processing_time_seconds: float
@dataclass
class OnboardingImportResult:
"""Result of final data import step"""
success: bool
import_details: SalesImportResult
inventory_items_created: int
inventory_creation_errors: List[str]
final_summary: Dict[str, Any]
class AIOnboardingService:
"""
Unified AI-powered onboarding service that orchestrates the complete flow:
1. File validation and product extraction
2. AI-powered inventory suggestions
3. User confirmation and inventory creation
4. Final sales data import
"""
def __init__(self):
self.data_import_service = DataImportService()
self.inventory_client = InventoryServiceClient()
# ================================================================
# STEP 1: FILE VALIDATION AND PRODUCT EXTRACTION
# ================================================================
async def validate_and_extract_products(
self,
file_data: str,
file_format: str,
tenant_id: UUID
) -> OnboardingValidationResult:
"""
Step 1: Validate uploaded file and extract unique products
This uses the detailed validation from data_import_service
"""
try:
logger.info("Starting onboarding validation and product extraction",
file_format=file_format, tenant_id=tenant_id)
# Use data_import_service for detailed validation
validation_data = {
"tenant_id": str(tenant_id),
"data": file_data,
"data_format": file_format,
"validate_only": True,
"source": "ai_onboarding"
}
validation_result = await self.data_import_service.validate_import_data(validation_data)
# Extract unique products if validation passes
product_list = []
unique_products = 0
if validation_result.is_valid and file_format.lower() == "csv":
try:
# Parse CSV to extract unique products
import csv
import io
reader = csv.DictReader(io.StringIO(file_data))
rows = list(reader)
# Use data_import_service column detection
column_mapping = self.data_import_service._detect_columns(list(rows[0].keys()) if rows else [])
if column_mapping.get('product'):
product_column = column_mapping['product']
# Extract and clean unique products
products_raw = [row.get(product_column, '').strip() for row in rows if row.get(product_column, '').strip()]
# Clean product names using data_import_service method
products_cleaned = [
self.data_import_service._clean_product_name(product)
for product in products_raw
]
# Get unique products
product_list = list(set([p for p in products_cleaned if p and p != "Producto sin nombre"]))
unique_products = len(product_list)
logger.info("Extracted unique products",
total_rows=len(rows), unique_products=unique_products)
except Exception as e:
logger.error("Failed to extract products", error=str(e))
# Don't fail validation just because product extraction failed
pass
result = OnboardingValidationResult(
is_valid=validation_result.is_valid,
total_records=validation_result.total_records,
unique_products=unique_products,
validation_details=validation_result,
product_list=product_list,
summary={
"status": "valid" if validation_result.is_valid else "invalid",
"file_format": file_format,
"total_records": validation_result.total_records,
"unique_products": unique_products,
"ready_for_ai_classification": validation_result.is_valid and unique_products > 0,
"next_step": "ai_classification" if validation_result.is_valid and unique_products > 0 else "fix_validation_errors"
}
)
logger.info("Onboarding validation completed",
is_valid=result.is_valid,
unique_products=unique_products,
tenant_id=tenant_id)
return result
except Exception as e:
logger.error("Onboarding validation failed", error=str(e), tenant_id=tenant_id)
return OnboardingValidationResult(
is_valid=False,
total_records=0,
unique_products=0,
validation_details=SalesValidationResult(
is_valid=False,
total_records=0,
valid_records=0,
invalid_records=0,
errors=[{
"type": "system_error",
"message": f"Onboarding validation error: {str(e)}",
"field": None,
"row": None,
"code": "ONBOARDING_VALIDATION_ERROR"
}],
warnings=[],
summary={}
),
product_list=[],
summary={
"status": "error",
"error_message": str(e),
"next_step": "retry_upload"
}
)
# ================================================================
# STEP 2: AI PRODUCT CLASSIFICATION
# ================================================================
async def generate_inventory_suggestions(
self,
product_list: List[str],
file_data: str,
file_format: str,
tenant_id: UUID
) -> ProductSuggestionsResult:
"""
Step 2: Generate AI-powered inventory suggestions for products
"""
import time
start_time = time.time()
try:
logger.info("Starting AI inventory suggestions",
product_count=len(product_list), tenant_id=tenant_id)
if not product_list:
raise ValueError("No products provided for classification")
# Analyze sales data for each product to provide context
product_analysis = await self._analyze_product_sales_data(
product_list, file_data, file_format
)
# Prepare products for classification
products_for_classification = []
for product_name in product_list:
sales_data = product_analysis.get(product_name, {})
products_for_classification.append({
"product_name": product_name,
"sales_volume": sales_data.get("total_quantity"),
"sales_data": sales_data
})
# Call inventory service for AI classification
classification_result = await self.inventory_client.classify_products_batch(
products_for_classification, tenant_id
)
if not classification_result or "suggestions" not in classification_result:
raise ValueError("Invalid classification response from inventory service")
suggestions_raw = classification_result["suggestions"]
business_model_raw = classification_result.get("business_model_analysis", {})
# Convert to dataclass objects
suggestions = []
for suggestion_data in suggestions_raw:
suggestion = ProductSuggestion(
suggestion_id=suggestion_data.get("suggestion_id", str(uuid4())),
original_name=suggestion_data["original_name"],
suggested_name=suggestion_data["suggested_name"],
product_type=suggestion_data["product_type"],
category=suggestion_data["category"],
unit_of_measure=suggestion_data["unit_of_measure"],
confidence_score=suggestion_data["confidence_score"],
estimated_shelf_life_days=suggestion_data.get("estimated_shelf_life_days"),
requires_refrigeration=suggestion_data.get("requires_refrigeration", False),
requires_freezing=suggestion_data.get("requires_freezing", False),
is_seasonal=suggestion_data.get("is_seasonal", False),
suggested_supplier=suggestion_data.get("suggested_supplier"),
notes=suggestion_data.get("notes"),
sales_data=product_analysis.get(suggestion_data["original_name"])
)
suggestions.append(suggestion)
business_model = BusinessModelAnalysis(
model=business_model_raw.get("model", "unknown"),
confidence=business_model_raw.get("confidence", 0.0),
ingredient_count=business_model_raw.get("ingredient_count", 0),
finished_product_count=business_model_raw.get("finished_product_count", 0),
ingredient_ratio=business_model_raw.get("ingredient_ratio", 0.0),
recommendations=business_model_raw.get("recommendations", [])
)
# Calculate confidence metrics
high_confidence_count = sum(1 for s in suggestions if s.confidence_score >= 0.7)
low_confidence_count = sum(1 for s in suggestions if s.confidence_score < 0.6)
processing_time = time.time() - start_time
result = ProductSuggestionsResult(
suggestions=suggestions,
business_model_analysis=business_model,
total_products=len(suggestions),
high_confidence_count=high_confidence_count,
low_confidence_count=low_confidence_count,
processing_time_seconds=processing_time
)
logger.info("AI inventory suggestions completed",
total_suggestions=len(suggestions),
business_model=business_model.model,
high_confidence=high_confidence_count,
processing_time=processing_time,
tenant_id=tenant_id)
return result
except Exception as e:
processing_time = time.time() - start_time
logger.error("AI inventory suggestions failed",
error=str(e), tenant_id=tenant_id)
# Return fallback suggestions
fallback_suggestions = [
ProductSuggestion(
suggestion_id=str(uuid4()),
original_name=product_name,
suggested_name=product_name.title(),
product_type="finished_product",
category="other_products",
unit_of_measure="units",
confidence_score=0.3,
notes="Fallback suggestion - requires manual review"
)
for product_name in product_list
]
return ProductSuggestionsResult(
suggestions=fallback_suggestions,
business_model_analysis=BusinessModelAnalysis(
model="unknown",
confidence=0.0,
ingredient_count=0,
finished_product_count=len(fallback_suggestions),
ingredient_ratio=0.0,
recommendations=["Manual review required for all products"]
),
total_products=len(fallback_suggestions),
high_confidence_count=0,
low_confidence_count=len(fallback_suggestions),
processing_time_seconds=processing_time
)
# ================================================================
# STEP 3: INVENTORY CREATION (after user confirmation)
# ================================================================
async def create_inventory_from_suggestions(
self,
approved_suggestions: List[Dict[str, Any]],
tenant_id: UUID,
user_id: UUID
) -> Dict[str, Any]:
"""
Step 3: Create inventory items from user-approved suggestions
"""
try:
logger.info("Creating inventory from approved suggestions",
approved_count=len(approved_suggestions), tenant_id=tenant_id)
created_items = []
failed_items = []
for approval in approved_suggestions:
suggestion_id = approval.get("suggestion_id")
is_approved = approval.get("approved", False)
modifications = approval.get("modifications", {})
if not is_approved:
continue
try:
# Build inventory item data from suggestion and modifications
inventory_data = {
"name": modifications.get("name") or approval.get("suggested_name"),
"product_type": modifications.get("product_type") or approval.get("product_type"),
"category": modifications.get("category") or approval.get("category"),
"unit_of_measure": modifications.get("unit_of_measure") or approval.get("unit_of_measure"),
"description": modifications.get("description") or approval.get("notes", ""),
"estimated_shelf_life_days": modifications.get("estimated_shelf_life_days") or approval.get("estimated_shelf_life_days"),
"requires_refrigeration": modifications.get("requires_refrigeration", approval.get("requires_refrigeration", False)),
"requires_freezing": modifications.get("requires_freezing", approval.get("requires_freezing", False)),
"is_seasonal": modifications.get("is_seasonal", approval.get("is_seasonal", False)),
"suggested_supplier": modifications.get("suggested_supplier") or approval.get("suggested_supplier"),
"is_active": True,
"source": "ai_onboarding"
}
# Create inventory item via inventory service
created_item = await self.inventory_client.create_ingredient(
inventory_data, str(tenant_id)
)
if created_item:
created_items.append({
"suggestion_id": suggestion_id,
"inventory_item": created_item,
"original_name": approval.get("original_name")
})
logger.info("Created inventory item",
item_name=inventory_data["name"],
suggestion_id=suggestion_id)
else:
failed_items.append({
"suggestion_id": suggestion_id,
"error": "Failed to create inventory item - no response"
})
except Exception as e:
logger.error("Failed to create inventory item",
error=str(e), suggestion_id=suggestion_id)
failed_items.append({
"suggestion_id": suggestion_id,
"error": str(e)
})
success_rate = len(created_items) / max(1, len(approved_suggestions)) * 100
result = {
"created_items": created_items,
"failed_items": failed_items,
"total_approved": len(approved_suggestions),
"successful_creations": len(created_items),
"failed_creations": len(failed_items),
"success_rate": success_rate,
"ready_for_import": len(created_items) > 0
}
logger.info("Inventory creation completed",
created=len(created_items),
failed=len(failed_items),
success_rate=success_rate,
tenant_id=tenant_id)
return result
except Exception as e:
logger.error("Inventory creation failed", error=str(e), tenant_id=tenant_id)
raise
# ================================================================
# STEP 4: FINAL DATA IMPORT
# ================================================================
async def import_sales_data_with_inventory(
self,
file_data: str,
file_format: str,
inventory_mapping: Dict[str, str], # original_product_name -> inventory_item_id
tenant_id: UUID,
filename: Optional[str] = None
) -> OnboardingImportResult:
"""
Step 4: Import sales data using the detailed processing from data_import_service
"""
try:
logger.info("Starting final sales data import with inventory mapping",
mappings_count=len(inventory_mapping), tenant_id=tenant_id)
# Use data_import_service for the actual import processing
import_result = await self.data_import_service.process_import(
str(tenant_id), file_data, file_format, filename
)
result = OnboardingImportResult(
success=import_result.success,
import_details=import_result,
inventory_items_created=len(inventory_mapping),
inventory_creation_errors=[],
final_summary={
"status": "completed" if import_result.success else "failed",
"total_records": import_result.records_processed,
"successful_imports": import_result.records_created,
"failed_imports": import_result.records_failed,
"inventory_items": len(inventory_mapping),
"processing_time": import_result.processing_time_seconds,
"onboarding_complete": import_result.success
}
)
logger.info("Final sales data import completed",
success=import_result.success,
records_created=import_result.records_created,
tenant_id=tenant_id)
return result
except Exception as e:
logger.error("Final sales data import failed", error=str(e), tenant_id=tenant_id)
return OnboardingImportResult(
success=False,
import_details=SalesImportResult(
success=False,
records_processed=0,
records_created=0,
records_updated=0,
records_failed=0,
errors=[{
"type": "import_error",
"message": f"Import failed: {str(e)}",
"field": None,
"row": None,
"code": "FINAL_IMPORT_ERROR"
}],
warnings=[],
processing_time_seconds=0.0
),
inventory_items_created=len(inventory_mapping),
inventory_creation_errors=[str(e)],
final_summary={
"status": "failed",
"error_message": str(e),
"onboarding_complete": False
}
)
# ================================================================
# HELPER METHODS
# ================================================================
async def _analyze_product_sales_data(
self,
product_list: List[str],
file_data: str,
file_format: str
) -> Dict[str, Dict[str, Any]]:
"""Analyze sales data for each product to provide context for AI classification"""
try:
if file_format.lower() != "csv":
return {}
import csv
import io
reader = csv.DictReader(io.StringIO(file_data))
rows = list(reader)
if not rows:
return {}
# Use data_import_service column detection
column_mapping = self.data_import_service._detect_columns(list(rows[0].keys()))
if not column_mapping.get('product'):
return {}
product_column = column_mapping['product']
quantity_column = column_mapping.get('quantity')
revenue_column = column_mapping.get('revenue')
date_column = column_mapping.get('date')
# Analyze each product
product_analysis = {}
for product_name in product_list:
# Find all rows for this product
product_rows = [
row for row in rows
if self.data_import_service._clean_product_name(row.get(product_column, '')) == product_name
]
if not product_rows:
continue
# Calculate metrics
total_quantity = 0
total_revenue = 0
sales_count = len(product_rows)
for row in product_rows:
try:
# Quantity
qty_raw = row.get(quantity_column, 1)
if qty_raw and str(qty_raw).strip():
qty = int(float(str(qty_raw).replace(',', '.')))
total_quantity += qty
else:
total_quantity += 1
# Revenue
if revenue_column:
rev_raw = row.get(revenue_column)
if rev_raw and str(rev_raw).strip():
rev = float(str(rev_raw).replace(',', '.').replace('', '').replace('$', '').strip())
total_revenue += rev
except:
continue
avg_quantity = total_quantity / sales_count if sales_count > 0 else 0
avg_revenue = total_revenue / sales_count if sales_count > 0 else 0
avg_unit_price = total_revenue / total_quantity if total_quantity > 0 else 0
product_analysis[product_name] = {
"total_quantity": total_quantity,
"total_revenue": total_revenue,
"sales_count": sales_count,
"avg_quantity_per_sale": avg_quantity,
"avg_revenue_per_sale": avg_revenue,
"avg_unit_price": avg_unit_price
}
return product_analysis
except Exception as e:
logger.warning("Failed to analyze product sales data", error=str(e))
return {}
# Factory function for dependency injection
def get_ai_onboarding_service() -> AIOnboardingService:
"""Get AI onboarding service instance"""
return AIOnboardingService()

View File

@@ -4,101 +4,71 @@ Inventory Service Client - Inter-service communication
Handles communication with the inventory service to fetch product data
"""
import httpx
import structlog
from typing import Dict, Any, List, Optional
from uuid import UUID
from shared.clients.inventory_client import InventoryServiceClient as SharedInventoryClient
from app.core.config import settings
logger = structlog.get_logger()
class InventoryServiceClient:
"""Client for communicating with the inventory service"""
"""Client for communicating with the inventory service via shared client"""
def __init__(self):
self.base_url = settings.INVENTORY_SERVICE_URL
self.timeout = 30.0
self._shared_client = SharedInventoryClient(settings)
async def classify_products_batch(self, product_list: Dict[str, Any], tenant_id: UUID) -> Optional[Dict[str, Any]]:
"""Get product details from inventory service by ID"""
async def classify_products_batch(self, product_list: List[Dict[str, Any]], tenant_id: UUID) -> Optional[Dict[str, Any]]:
"""Classify multiple products for inventory creation"""
try:
async with httpx.AsyncClient(timeout=self.timeout) as client:
response = await client.post(
f"{self.base_url}/api/v1/tenants/{tenant_id}/inventory/classify-products-batch",
headers=self._get_headers(),
product_list=product_list
)
# Convert product_list to expected format for shared client
products = []
for item in product_list:
if isinstance(item, str):
# If it's just a product name
products.append({"product_name": item})
elif isinstance(item, dict):
# If it's already a dict, ensure required fields
product_data = {
"product_name": item.get("product_name", item.get("name", str(item))),
"sales_volume": item.get("sales_volume", item.get("total_quantity"))
}
products.append(product_data)
if response.status_code == 200:
product_data = response.json()
logger.info("Retrieved product from inventory service",
tenant_id=tenant_id)
return product_data
elif response.status_code == 404:
logger.warning("Product not found in inventory service",
tenant_id=tenant_id)
return None
else:
logger.error("Failed to fetch product from inventory service",
status_code=response.status_code,
tenant_id=tenant_id)
return None
result = await self._shared_client.classify_products_batch(products, str(tenant_id))
if result:
logger.info("Classified products batch",
count=len(products), tenant_id=tenant_id)
return result
except httpx.TimeoutException:
logger.error("Timeout fetching product from inventory service",
tenant_id=tenant_id)
return None
except Exception as e:
logger.error("Error communicating with inventory service",
logger.error("Error in batch product classification",
error=str(e), tenant_id=tenant_id)
return None
async def get_product_by_id(self, product_id: UUID, tenant_id: UUID) -> Optional[Dict[str, Any]]:
"""Get product details from inventory service by ID"""
try:
async with httpx.AsyncClient(timeout=self.timeout) as client:
response = await client.get(
f"{self.base_url}/api/v1/tenants/{tenant_id}/ingredients/{product_id}",
headers=self._get_headers()
)
if response.status_code == 200:
product_data = response.json()
result = await self._shared_client.get_ingredient_by_id(product_id, str(tenant_id))
if result:
logger.info("Retrieved product from inventory service",
product_id=product_id, tenant_id=tenant_id)
return product_data
elif response.status_code == 404:
logger.warning("Product not found in inventory service",
product_id=product_id, tenant_id=tenant_id)
return None
else:
logger.error("Failed to fetch product from inventory service",
status_code=response.status_code,
product_id=product_id, tenant_id=tenant_id)
return None
return result
except httpx.TimeoutException:
logger.error("Timeout fetching product from inventory service",
product_id=product_id, tenant_id=tenant_id)
return None
except Exception as e:
logger.error("Error communicating with inventory service",
logger.error("Error fetching product by ID",
error=str(e), product_id=product_id, tenant_id=tenant_id)
return None
async def get_product_by_sku(self, sku: str, tenant_id: UUID) -> Optional[Dict[str, Any]]:
"""Get product details from inventory service by SKU"""
try:
async with httpx.AsyncClient(timeout=self.timeout) as client:
response = await client.get(
f"{self.base_url}/api/v1/tenants/{tenant_id}/ingredients",
params={"sku": sku, "limit": 1},
headers=self._get_headers()
# Search for product by SKU using shared client
products = await self._shared_client.search_ingredients(
str(tenant_id), search=sku, limit=1
)
if response.status_code == 200:
data = response.json()
products = data.get("items", [])
if products:
product_data = products[0]
logger.info("Retrieved product by SKU from inventory service",
@@ -108,18 +78,9 @@ class InventoryServiceClient:
logger.warning("Product not found by SKU in inventory service",
sku=sku, tenant_id=tenant_id)
return None
else:
logger.error("Failed to fetch product by SKU from inventory service",
status_code=response.status_code,
sku=sku, tenant_id=tenant_id)
return None
except httpx.TimeoutException:
logger.error("Timeout fetching product by SKU from inventory service",
sku=sku, tenant_id=tenant_id)
return None
except Exception as e:
logger.error("Error communicating with inventory service for SKU",
logger.error("Error fetching product by SKU",
error=str(e), sku=sku, tenant_id=tenant_id)
return None
@@ -127,38 +88,16 @@ class InventoryServiceClient:
product_type: Optional[str] = None) -> List[Dict[str, Any]]:
"""Search products in inventory service"""
try:
params = {
"search": search_term,
"limit": 50
}
if product_type:
params["product_type"] = product_type
async with httpx.AsyncClient(timeout=self.timeout) as client:
response = await client.get(
f"{self.base_url}/api/v1/tenants/{tenant_id}/ingredients",
params=params,
headers=self._get_headers()
products = await self._shared_client.search_ingredients(
str(tenant_id), search=search_term, limit=50
)
if response.status_code == 200:
data = response.json()
products = data.get("items", [])
logger.info("Searched products in inventory service",
search_term=search_term, count=len(products), tenant_id=tenant_id)
return products
else:
logger.error("Failed to search products in inventory service",
status_code=response.status_code,
search_term=search_term, tenant_id=tenant_id)
return []
except httpx.TimeoutException:
logger.error("Timeout searching products in inventory service",
search_term=search_term, tenant_id=tenant_id)
return []
except Exception as e:
logger.error("Error searching products in inventory service",
logger.error("Error searching products",
error=str(e), search_term=search_term, tenant_id=tenant_id)
return []
@@ -166,56 +105,19 @@ class InventoryServiceClient:
product_type: Optional[str] = None) -> List[Dict[str, Any]]:
"""Get products by category from inventory service"""
try:
params = {
"limit": 100
}
if product_type == "ingredient":
params["ingredient_category"] = category
elif product_type == "finished_product":
params["product_category"] = category
else:
# Search in both categories if type not specified
params["category"] = category
async with httpx.AsyncClient(timeout=self.timeout) as client:
response = await client.get(
f"{self.base_url}/api/v1/tenants/{tenant_id}/ingredients",
params=params,
headers=self._get_headers()
products = await self._shared_client.search_ingredients(
str(tenant_id), category=category, limit=100
)
if response.status_code == 200:
data = response.json()
products = data.get("items", [])
logger.info("Retrieved products by category from inventory service",
category=category, count=len(products), tenant_id=tenant_id)
return products
else:
logger.error("Failed to fetch products by category from inventory service",
status_code=response.status_code,
category=category, tenant_id=tenant_id)
return []
except httpx.TimeoutException:
logger.error("Timeout fetching products by category from inventory service",
category=category, tenant_id=tenant_id)
return []
except Exception as e:
logger.error("Error fetching products by category from inventory service",
logger.error("Error fetching products by category",
error=str(e), category=category, tenant_id=tenant_id)
return []
# Cache synchronization removed - no longer needed with pure inventory reference approach
def _get_headers(self) -> Dict[str, str]:
"""Get headers for inventory service requests"""
return {
"Content-Type": "application/json",
"X-Service-Name": "sales-service",
# Add authentication headers if needed
}
# Dependency injection
async def get_inventory_client() -> InventoryServiceClient:
"""Get inventory service client instance"""

View File

@@ -1,446 +0,0 @@
# services/sales/app/services/onboarding_import_service.py
"""
Onboarding Data Import Service
Handles historical sales data import with automated inventory creation
"""
import pandas as pd
import structlog
from typing import List, Dict, Any, Optional, Tuple
from uuid import UUID, uuid4
from datetime import datetime, timezone
from dataclasses import dataclass, asdict
import asyncio
from app.services.inventory_client import InventoryServiceClient
from app.services.data_import_service import DataImportService
from app.models.sales import SalesData
from app.core.database import get_db_transaction
from app.repositories.sales_repository import SalesRepository
logger = structlog.get_logger()
@dataclass
class OnboardingImportResult:
"""Result of onboarding import process"""
total_products_found: int
inventory_suggestions: List[Dict[str, Any]]
business_model_analysis: Dict[str, Any]
import_job_id: UUID
status: str
processed_rows: int
successful_imports: int
failed_imports: int
errors: List[str]
warnings: List[str]
@dataclass
class InventoryCreationRequest:
"""Request to create inventory item from suggestion"""
suggestion_id: str
approved: bool
modifications: Dict[str, Any] # User modifications to the suggestion
class OnboardingImportService:
"""Service for handling onboarding data import with inventory automation"""
def __init__(self):
self.inventory_client = InventoryServiceClient()
self.data_import_service = DataImportService()
async def analyze_sales_data_for_onboarding(
self,
file_content: bytes,
filename: str,
tenant_id: UUID,
user_id: UUID
) -> OnboardingImportResult:
"""Analyze uploaded sales data and suggest inventory items"""
try:
logger.info("Starting onboarding analysis", filename=filename, tenant_id=tenant_id)
# Parse the uploaded file
df = await self._parse_uploaded_file(file_content, filename)
# Extract unique products and their sales volumes
product_analysis = self._analyze_products_from_sales(df)
# Get product suggestions from inventory service
inventory_suggestions = await self._get_inventory_suggestions(
product_analysis, tenant_id
)
# Analyze business model
business_model = self._analyze_business_model(inventory_suggestions)
# Create import job for tracking
import_job_id = await self._create_import_job(
filename, tenant_id, user_id, len(df)
)
result = OnboardingImportResult(
total_products_found=len(product_analysis),
inventory_suggestions=inventory_suggestions,
business_model_analysis=business_model,
import_job_id=import_job_id,
status="analysis_complete",
processed_rows=len(df),
successful_imports=0, # Will be updated when user confirms
failed_imports=0,
errors=[],
warnings=self._generate_warnings(df, inventory_suggestions)
)
logger.info("Onboarding analysis complete",
products_found=len(product_analysis),
business_model=business_model.get('model'),
tenant_id=tenant_id)
return result
except Exception as e:
logger.error("Failed onboarding analysis", error=str(e), tenant_id=tenant_id)
raise
async def create_inventory_from_suggestions(
self,
suggestions_approval: List[InventoryCreationRequest],
tenant_id: UUID,
user_id: UUID
) -> Dict[str, Any]:
"""Create inventory items from approved suggestions"""
try:
created_items = []
failed_items = []
for request in suggestions_approval:
if request.approved:
try:
# Find the original suggestion
suggestion = self._find_suggestion_by_id(request.suggestion_id)
if not suggestion:
failed_items.append({
'suggestion_id': request.suggestion_id,
'error': 'Suggestion not found'
})
continue
# Apply user modifications
final_item_data = self._apply_modifications(suggestion, request.modifications)
# Create inventory item via inventory service
created_item = await self._create_inventory_item(
final_item_data, tenant_id, user_id
)
created_items.append(created_item)
except Exception as e:
logger.error("Failed to create inventory item",
error=str(e), suggestion_id=request.suggestion_id)
failed_items.append({
'suggestion_id': request.suggestion_id,
'error': str(e)
})
logger.info("Inventory creation complete",
created=len(created_items), failed=len(failed_items), tenant_id=tenant_id)
return {
'created_items': created_items,
'failed_items': failed_items,
'total_approved': len([r for r in suggestions_approval if r.approved]),
'success_rate': len(created_items) / max(1, len([r for r in suggestions_approval if r.approved]))
}
except Exception as e:
logger.error("Failed inventory creation", error=str(e), tenant_id=tenant_id)
raise
async def import_sales_data_with_inventory(
self,
file_content: bytes,
filename: str,
tenant_id: UUID,
user_id: UUID,
inventory_mapping: Dict[str, UUID] # product_name -> inventory_product_id
) -> OnboardingImportResult:
"""Import sales data using created inventory items"""
try:
logger.info("Starting sales import with inventory mapping",
filename=filename, products_mapped=len(inventory_mapping), tenant_id=tenant_id)
# Parse the file again
df = await self._parse_uploaded_file(file_content, filename)
# Add inventory product IDs to the data
df_with_inventory = self._map_products_to_inventory(df, inventory_mapping)
# Import the sales data using the standard import service
import_result = await self._import_sales_with_inventory_ids(
df_with_inventory, tenant_id, user_id, filename
)
result = OnboardingImportResult(
total_products_found=len(inventory_mapping),
inventory_suggestions=[], # Already processed
business_model_analysis={}, # Already analyzed
import_job_id=import_result['job_id'],
status="import_complete",
processed_rows=import_result['processed_rows'],
successful_imports=import_result['successful_imports'],
failed_imports=import_result['failed_imports'],
errors=import_result.get('errors', []),
warnings=import_result.get('warnings', [])
)
logger.info("Sales import complete",
successful=result.successful_imports,
failed=result.failed_imports,
tenant_id=tenant_id)
return result
except Exception as e:
logger.error("Failed sales import", error=str(e), tenant_id=tenant_id)
raise
async def _parse_uploaded_file(self, file_content: bytes, filename: str) -> pd.DataFrame:
"""Parse uploaded CSV/Excel file"""
try:
if filename.endswith('.csv'):
# Try different encodings
for encoding in ['utf-8', 'latin-1', 'cp1252']:
try:
df = pd.read_csv(io.BytesIO(file_content), encoding=encoding)
break
except UnicodeDecodeError:
continue
else:
raise ValueError("Could not decode CSV file with any supported encoding")
elif filename.endswith(('.xlsx', '.xls')):
df = pd.read_excel(io.BytesIO(file_content))
else:
raise ValueError(f"Unsupported file format: {filename}")
# Validate required columns exist
required_columns = ['product_name', 'quantity_sold', 'revenue', 'date']
missing_columns = [col for col in required_columns if col not in df.columns]
if missing_columns:
raise ValueError(f"Missing required columns: {missing_columns}")
# Clean the data
df = df.dropna(subset=['product_name', 'quantity_sold', 'revenue'])
df['date'] = pd.to_datetime(df['date'], errors='coerce')
df = df.dropna(subset=['date'])
logger.info("File parsed successfully", rows=len(df), columns=list(df.columns))
return df
except Exception as e:
logger.error("Failed to parse file", error=str(e), filename=filename)
raise
def _analyze_products_from_sales(self, df: pd.DataFrame) -> Dict[str, Dict[str, Any]]:
"""Extract and analyze products from sales data"""
# Group by product name and calculate metrics
product_stats = df.groupby('product_name').agg({
'quantity_sold': ['sum', 'mean', 'count'],
'revenue': ['sum', 'mean'],
'date': ['min', 'max']
}).round(2)
# Flatten column names
product_stats.columns = ['_'.join(col).strip() for col in product_stats.columns.values]
# Convert to dictionary with analysis
products = {}
for product_name in product_stats.index:
stats = product_stats.loc[product_name]
products[product_name] = {
'name': product_name,
'total_quantity': float(stats['quantity_sold_sum']),
'avg_quantity_per_sale': float(stats['quantity_sold_mean']),
'total_sales_count': int(stats['quantity_sold_count']),
'total_revenue': float(stats['revenue_sum']),
'avg_revenue_per_sale': float(stats['revenue_mean']),
'first_sale_date': stats['date_min'],
'last_sale_date': stats['date_max'],
'avg_unit_price': float(stats['revenue_sum'] / stats['quantity_sold_sum']) if stats['quantity_sold_sum'] > 0 else 0
}
logger.info("Product analysis complete", unique_products=len(products))
return products
async def _get_inventory_suggestions(
self,
product_analysis: Dict[str, Dict[str, Any]],
tenant_id: UUID
) -> List[Dict[str, Any]]:
"""Get inventory suggestions from inventory service"""
try:
# Call inventory service classification API
product_names = list(product_analysis.keys())
suggestions = []
suggestions = await self.inventory_client.classify_products_batch(product_names)
return suggestions
except Exception as e:
logger.error("Failed to get inventory suggestions", error=str(e))
# Return fallback suggestions for all products
return [self._create_fallback_suggestion(name, stats)
for name, stats in product_analysis.items()]
def _create_fallback_suggestion(self, product_name: str, stats: Dict[str, Any]) -> Dict[str, Any]:
"""Create fallback suggestion when AI classification fails"""
return {
'suggestion_id': str(uuid4()),
'original_name': product_name,
'suggested_name': product_name.title(),
'product_type': 'finished_product',
'category': 'other_products',
'unit_of_measure': 'units',
'confidence_score': 0.3,
'estimated_shelf_life_days': 3,
'requires_refrigeration': False,
'requires_freezing': False,
'is_seasonal': False,
'notes': 'Fallback suggestion - requires manual review',
'original_sales_data': stats
}
def _analyze_business_model(self, suggestions: List[Dict[str, Any]]) -> Dict[str, Any]:
"""Analyze business model from suggestions"""
if not suggestions:
return {'model': 'unknown', 'confidence': 0.0}
ingredient_count = sum(1 for s in suggestions if s.get('product_type') == 'ingredient')
finished_count = sum(1 for s in suggestions if s.get('product_type') == 'finished_product')
total = len(suggestions)
ingredient_ratio = ingredient_count / total if total > 0 else 0
if ingredient_ratio >= 0.7:
model = 'production'
elif ingredient_ratio <= 0.3:
model = 'retail'
else:
model = 'hybrid'
confidence = max(abs(ingredient_ratio - 0.5) * 2, 0.1)
return {
'model': model,
'confidence': confidence,
'ingredient_count': ingredient_count,
'finished_product_count': finished_count,
'ingredient_ratio': ingredient_ratio,
'recommendations': self._get_model_recommendations(model)
}
def _get_model_recommendations(self, model: str) -> List[str]:
"""Get recommendations based on business model"""
recommendations = {
'production': [
'Set up supplier relationships for ingredients',
'Configure recipe management',
'Enable production cost tracking',
'Set up ingredient inventory alerts'
],
'retail': [
'Configure central baker relationships',
'Set up delivery tracking',
'Enable freshness monitoring',
'Focus on sales forecasting'
],
'hybrid': [
'Configure both production and retail features',
'Set up flexible inventory management',
'Enable comprehensive analytics'
]
}
return recommendations.get(model, [])
async def _create_import_job(
self,
filename: str,
tenant_id: UUID,
user_id: UUID,
total_rows: int
) -> UUID:
"""Create import job for tracking"""
try:
async with get_db_transaction() as db:
from app.models.sales import SalesImportJob
job = SalesImportJob(
id=uuid4(),
tenant_id=tenant_id,
filename=filename,
import_type='onboarding_csv',
status='analyzing',
total_rows=total_rows,
created_by=user_id
)
db.add(job)
await db.commit()
logger.info("Import job created", job_id=job.id, tenant_id=tenant_id)
return job.id
except Exception as e:
logger.error("Failed to create import job", error=str(e))
return uuid4() # Return dummy ID if job creation fails
def _generate_warnings(self, df: pd.DataFrame, suggestions: List[Dict[str, Any]]) -> List[str]:
"""Generate warnings about data quality"""
warnings = []
# Check for low confidence suggestions
low_confidence = [s for s in suggestions if s.get('confidence_score', 1.0) < 0.6]
if low_confidence:
warnings.append(f"{len(low_confidence)} products have low classification confidence and may need manual review")
# Check for missing data
missing_prices = df[df['revenue'].isna() | (df['revenue'] == 0)].shape[0]
if missing_prices > 0:
warnings.append(f"{missing_prices} sales records have missing or zero revenue")
# Check for old data
latest_date = df['date'].max()
if pd.Timestamp.now() - latest_date > pd.Timedelta(days=90):
warnings.append("Sales data appears to be more than 90 days old")
return warnings
# Additional helper methods would be implemented here...
# _find_suggestion_by_id, _apply_modifications, _create_inventory_item, etc.
# Dependency injection
def get_onboarding_import_service() -> OnboardingImportService:
"""Get onboarding import service instance"""
return OnboardingImportService()

View File

@@ -1,391 +0,0 @@
# Enhanced Inter-Service Communication System
This directory contains the enhanced inter-service communication system that integrates with the new repository pattern architecture. The system provides circuit breakers, caching, monitoring, and event tracking for all service-to-service communications.
## Architecture Overview
### Base Components
1. **BaseServiceClient** - Foundation class providing authentication, retries, and basic HTTP operations
2. **EnhancedServiceClient** - Adds circuit breaker, caching, and monitoring capabilities
3. **ServiceRegistry** - Central registry for managing all enhanced service clients
### Enhanced Service Clients
Each service has a specialized enhanced client:
- **SalesServiceClient** - Sales data, products, data import with optimized caching
- **ExternalServiceClient** - Weather and traffic data collection with external API integration
- **EnhancedAuthServiceClient** - Authentication, user management, permissions with security focus
- **EnhancedTrainingServiceClient** - ML training, model management, deployment with pipeline monitoring
- **EnhancedForecastingServiceClient** - Forecasting, predictions, scenarios with analytics
- **EnhancedTenantServiceClient** - Tenant management, memberships, organization features
- **EnhancedNotificationServiceClient** - Notifications, templates, delivery tracking
## Key Features
### Circuit Breaker Pattern
- **States**: Closed (normal), Open (failing), Half-Open (testing recovery)
- **Configuration**: Failure threshold, recovery timeout, success threshold
- **Monitoring**: State changes tracked and logged
### Intelligent Caching
- **TTL-based**: Different cache durations for different data types
- **Invalidation**: Pattern-based cache invalidation on updates
- **Statistics**: Hit/miss ratios and performance metrics
- **Manual Control**: Clear specific cache patterns when needed
### Event Integration
- **Repository Events**: Entity created/updated/deleted events
- **Correlation IDs**: Track operations across services
- **Metadata**: Rich event metadata for debugging and monitoring
### Monitoring & Metrics
- **Request Metrics**: Success/failure rates, latencies
- **Cache Metrics**: Hit rates, entry counts
- **Circuit Breaker Metrics**: State changes, failure counts
- **Health Checks**: Per-service and aggregate health status
## Usage Examples
### Basic Usage with Service Registry
```python
from shared.clients.enhanced_service_client import ServiceRegistry
from shared.config.base import BaseServiceSettings
# Initialize registry
config = BaseServiceSettings()
registry = ServiceRegistry(config, calling_service="forecasting")
# Get enhanced clients
data_client = registry.get_data_client()
auth_client = registry.get_auth_client()
training_client = registry.get_training_client()
# Use with full features
sales_data = await data_client.get_all_sales_data_with_monitoring(
tenant_id="tenant-123",
start_date="2024-01-01",
end_date="2024-12-31",
correlation_id="forecast-job-456"
)
```
### Data Service Operations
```python
# Get sales data with intelligent caching
sales_data = await data_client.get_sales_data_cached(
tenant_id="tenant-123",
start_date="2024-01-01",
end_date="2024-01-31",
aggregation="daily"
)
# Upload sales data with cache invalidation and events
result = await data_client.upload_sales_data_with_events(
tenant_id="tenant-123",
sales_data=sales_records,
correlation_id="data-import-789"
)
# Get weather data with caching (30 min TTL)
weather_data = await data_client.get_weather_historical_cached(
tenant_id="tenant-123",
start_date="2024-01-01",
end_date="2024-01-31"
)
```
### Authentication & User Management
```python
# Authenticate with security monitoring
auth_result = await auth_client.authenticate_user_cached(
email="user@example.com",
password="password"
)
# Check permissions with caching
has_access = await auth_client.check_user_permissions_cached(
user_id="user-123",
tenant_id="tenant-456",
resource="sales_data",
action="read"
)
# Create user with events
user = await auth_client.create_user_with_events(
user_data={
"email": "new@example.com",
"name": "New User",
"role": "analyst"
},
tenant_id="tenant-123",
correlation_id="user-creation-789"
)
```
### Training & ML Operations
```python
# Create training job with monitoring
job = await training_client.create_training_job_with_monitoring(
tenant_id="tenant-123",
include_weather=True,
include_traffic=False,
min_data_points=30,
correlation_id="training-pipeline-456"
)
# Get active model with caching
model = await training_client.get_active_model_for_product_cached(
tenant_id="tenant-123",
product_name="croissants"
)
# Deploy model with events
deployment = await training_client.deploy_model_with_events(
tenant_id="tenant-123",
model_id="model-789",
correlation_id="deployment-123"
)
# Get pipeline status
status = await training_client.get_training_pipeline_status("tenant-123")
```
### Forecasting & Predictions
```python
# Create forecast with monitoring
forecast = await forecasting_client.create_forecast_with_monitoring(
tenant_id="tenant-123",
model_id="model-456",
start_date="2024-02-01",
end_date="2024-02-29",
correlation_id="forecast-creation-789"
)
# Get predictions with caching
predictions = await forecasting_client.get_predictions_cached(
tenant_id="tenant-123",
forecast_id="forecast-456",
start_date="2024-02-01",
end_date="2024-02-07"
)
# Real-time prediction with caching
prediction = await forecasting_client.create_realtime_prediction_with_monitoring(
tenant_id="tenant-123",
model_id="model-456",
target_date="2024-02-01",
features={"temperature": 20, "day_of_week": 1},
correlation_id="realtime-pred-123"
)
# Get forecasting dashboard
dashboard = await forecasting_client.get_forecasting_dashboard("tenant-123")
```
### Tenant Management
```python
# Create tenant with monitoring
tenant = await tenant_client.create_tenant_with_monitoring(
name="New Bakery Chain",
owner_id="user-123",
description="Multi-location bakery chain",
correlation_id="tenant-creation-456"
)
# Add member with events
membership = await tenant_client.add_tenant_member_with_events(
tenant_id="tenant-123",
user_id="user-456",
role="manager",
correlation_id="member-add-789"
)
# Get tenant analytics
analytics = await tenant_client.get_tenant_analytics("tenant-123")
```
### Notification Management
```python
# Send notification with monitoring
notification = await notification_client.send_notification_with_monitoring(
recipient_id="user-123",
notification_type="forecast_ready",
title="Forecast Complete",
message="Your weekly forecast is ready for review",
tenant_id="tenant-456",
priority="high",
channels=["email", "in_app"],
correlation_id="forecast-notification-789"
)
# Send bulk notification
bulk_result = await notification_client.send_bulk_notification_with_monitoring(
recipients=["user-123", "user-456", "user-789"],
notification_type="system_update",
title="System Maintenance",
message="Scheduled maintenance tonight at 2 AM",
priority="normal",
correlation_id="maintenance-notification-123"
)
# Get delivery analytics
analytics = await notification_client.get_delivery_analytics(
tenant_id="tenant-123",
start_date="2024-01-01",
end_date="2024-01-31"
)
```
## Health Monitoring
### Individual Service Health
```python
# Get specific service health
data_health = data_client.get_data_service_health()
auth_health = auth_client.get_auth_service_health()
training_health = training_client.get_training_service_health()
# Health includes:
# - Circuit breaker status
# - Cache statistics and configuration
# - Service-specific features
# - Supported endpoints
```
### Registry-Level Health
```python
# Get all service health status
all_health = registry.get_all_health_status()
# Get aggregate metrics
metrics = registry.get_aggregate_metrics()
# Returns:
# - Total cache hits/misses and hit rate
# - Circuit breaker states for all services
# - Count of healthy vs total services
```
## Configuration
### Cache TTL Configuration
Each enhanced client has optimized cache TTL values:
```python
# Data Service
sales_cache_ttl = 600 # 10 minutes
weather_cache_ttl = 1800 # 30 minutes
traffic_cache_ttl = 3600 # 1 hour
product_cache_ttl = 300 # 5 minutes
# Auth Service
user_cache_ttl = 300 # 5 minutes
token_cache_ttl = 60 # 1 minute
permission_cache_ttl = 900 # 15 minutes
# Training Service
job_cache_ttl = 180 # 3 minutes
model_cache_ttl = 600 # 10 minutes
metrics_cache_ttl = 300 # 5 minutes
# And so on...
```
### Circuit Breaker Configuration
```python
CircuitBreakerConfig(
failure_threshold=5, # Failures before opening
recovery_timeout=60, # Seconds before testing recovery
success_threshold=2, # Successes needed to close
timeout=30 # Request timeout in seconds
)
```
## Event System Integration
All enhanced clients integrate with the enhanced event system:
### Event Types
- **EntityCreatedEvent** - When entities are created
- **EntityUpdatedEvent** - When entities are modified
- **EntityDeletedEvent** - When entities are removed
### Event Metadata
- **correlation_id** - Track operations across services
- **source_service** - Service that generated the event
- **destination_service** - Target service
- **tenant_id** - Tenant context
- **user_id** - User context
- **tags** - Additional metadata
### Usage in Enhanced Clients
Events are automatically published for:
- Data uploads and modifications
- User creation/updates/deletion
- Training job lifecycle
- Model deployments
- Forecast creation
- Tenant management operations
- Notification delivery
## Error Handling & Resilience
### Circuit Breaker Protection
- Automatically stops requests when services are failing
- Provides fallback to cached data when available
- Gradually tests service recovery
### Retry Logic
- Exponential backoff for transient failures
- Configurable retry counts and delays
- Authentication token refresh on 401 errors
### Cache Fallbacks
- Returns cached data when services are unavailable
- Graceful degradation with stale data warnings
- Manual cache invalidation for data consistency
## Integration with Repository Pattern
The enhanced clients seamlessly integrate with the new repository pattern:
### Service Layer Integration
```python
class ForecastingService:
def __init__(self,
forecast_repository: ForecastRepository,
service_registry: ServiceRegistry):
self.forecast_repository = forecast_repository
self.data_client = service_registry.get_data_client()
self.training_client = service_registry.get_training_client()
async def create_forecast(self, tenant_id: str, model_id: str):
# Get data through enhanced client
sales_data = await self.data_client.get_all_sales_data_with_monitoring(
tenant_id=tenant_id,
correlation_id=f"forecast_data_{datetime.utcnow().isoformat()}"
)
# Use repository for database operations
forecast = await self.forecast_repository.create({
"tenant_id": tenant_id,
"model_id": model_id,
"status": "pending"
})
return forecast
```
This completes the comprehensive enhanced inter-service communication system that integrates seamlessly with the new repository pattern architecture, providing resilience, monitoring, and advanced features for all service interactions.

View File

@@ -0,0 +1,347 @@
# shared/clients/inventory_client.py
"""
Inventory Service Client - Inter-service communication
Handles communication with the inventory service for all other services
"""
import structlog
from typing import Dict, Any, List, Optional, Union
from uuid import UUID
from shared.clients.base_service_client import BaseServiceClient
from shared.config.base import BaseServiceSettings
logger = structlog.get_logger()
class InventoryServiceClient(BaseServiceClient):
"""Client for communicating with the inventory service via gateway"""
def __init__(self, config: BaseServiceSettings):
super().__init__("inventory", config)
def get_service_base_path(self) -> str:
"""Return the base path for inventory service APIs"""
return "/api/v1"
# ================================================================
# INGREDIENT MANAGEMENT
# ================================================================
async def get_ingredient_by_id(self, ingredient_id: UUID, tenant_id: str) -> Optional[Dict[str, Any]]:
"""Get ingredient details by ID"""
try:
result = await self.get(f"ingredients/{ingredient_id}", tenant_id=tenant_id)
if result:
logger.info("Retrieved ingredient from inventory service",
ingredient_id=ingredient_id, tenant_id=tenant_id)
return result
except Exception as e:
logger.error("Error fetching ingredient by ID",
error=str(e), ingredient_id=ingredient_id, tenant_id=tenant_id)
return None
async def search_ingredients(
self,
tenant_id: str,
search: Optional[str] = None,
category: Optional[str] = None,
is_active: Optional[bool] = None,
skip: int = 0,
limit: int = 100
) -> List[Dict[str, Any]]:
"""Search ingredients with filters"""
try:
params = {
"skip": skip,
"limit": limit
}
if search:
params["search"] = search
if category:
params["category"] = category
if is_active is not None:
params["is_active"] = is_active
result = await self.get("ingredients", tenant_id=tenant_id, params=params)
ingredients = result if isinstance(result, list) else []
logger.info("Searched ingredients in inventory service",
search_term=search, count=len(ingredients), tenant_id=tenant_id)
return ingredients
except Exception as e:
logger.error("Error searching ingredients",
error=str(e), search=search, tenant_id=tenant_id)
return []
async def get_all_ingredients(self, tenant_id: str, is_active: Optional[bool] = True) -> List[Dict[str, Any]]:
"""Get all ingredients for a tenant (paginated)"""
try:
params = {}
if is_active is not None:
params["is_active"] = is_active
ingredients = await self.get_paginated("ingredients", tenant_id=tenant_id, params=params)
logger.info("Retrieved all ingredients from inventory service",
count=len(ingredients), tenant_id=tenant_id)
return ingredients
except Exception as e:
logger.error("Error fetching all ingredients",
error=str(e), tenant_id=tenant_id)
return []
async def create_ingredient(self, ingredient_data: Dict[str, Any], tenant_id: str) -> Optional[Dict[str, Any]]:
"""Create a new ingredient"""
try:
result = await self.post("ingredients", data=ingredient_data, tenant_id=tenant_id)
if result:
logger.info("Created ingredient in inventory service",
ingredient_name=ingredient_data.get('name'), tenant_id=tenant_id)
return result
except Exception as e:
logger.error("Error creating ingredient",
error=str(e), ingredient_data=ingredient_data, tenant_id=tenant_id)
return None
async def update_ingredient(
self,
ingredient_id: UUID,
ingredient_data: Dict[str, Any],
tenant_id: str
) -> Optional[Dict[str, Any]]:
"""Update an existing ingredient"""
try:
result = await self.put(f"ingredients/{ingredient_id}", data=ingredient_data, tenant_id=tenant_id)
if result:
logger.info("Updated ingredient in inventory service",
ingredient_id=ingredient_id, tenant_id=tenant_id)
return result
except Exception as e:
logger.error("Error updating ingredient",
error=str(e), ingredient_id=ingredient_id, tenant_id=tenant_id)
return None
async def delete_ingredient(self, ingredient_id: UUID, tenant_id: str) -> bool:
"""Delete (deactivate) an ingredient"""
try:
result = await self.delete(f"ingredients/{ingredient_id}", tenant_id=tenant_id)
success = result is not None
if success:
logger.info("Deleted ingredient in inventory service",
ingredient_id=ingredient_id, tenant_id=tenant_id)
return success
except Exception as e:
logger.error("Error deleting ingredient",
error=str(e), ingredient_id=ingredient_id, tenant_id=tenant_id)
return False
async def get_ingredient_stock(
self,
ingredient_id: UUID,
tenant_id: str,
include_unavailable: bool = False
) -> List[Dict[str, Any]]:
"""Get stock entries for an ingredient"""
try:
params = {}
if include_unavailable:
params["include_unavailable"] = include_unavailable
result = await self.get(f"ingredients/{ingredient_id}/stock", tenant_id=tenant_id, params=params)
stock_entries = result if isinstance(result, list) else []
logger.info("Retrieved ingredient stock from inventory service",
ingredient_id=ingredient_id, stock_count=len(stock_entries), tenant_id=tenant_id)
return stock_entries
except Exception as e:
logger.error("Error fetching ingredient stock",
error=str(e), ingredient_id=ingredient_id, tenant_id=tenant_id)
return []
# ================================================================
# STOCK MANAGEMENT
# ================================================================
async def get_stock_levels(self, tenant_id: str, ingredient_ids: Optional[List[UUID]] = None) -> List[Dict[str, Any]]:
"""Get current stock levels"""
try:
params = {}
if ingredient_ids:
params["ingredient_ids"] = [str(id) for id in ingredient_ids]
result = await self.get("stock", tenant_id=tenant_id, params=params)
stock_levels = result if isinstance(result, list) else []
logger.info("Retrieved stock levels from inventory service",
count=len(stock_levels), tenant_id=tenant_id)
return stock_levels
except Exception as e:
logger.error("Error fetching stock levels",
error=str(e), tenant_id=tenant_id)
return []
async def get_low_stock_alerts(self, tenant_id: str) -> List[Dict[str, Any]]:
"""Get low stock alerts"""
try:
result = await self.get("alerts", tenant_id=tenant_id, params={"type": "low_stock"})
alerts = result if isinstance(result, list) else []
logger.info("Retrieved low stock alerts from inventory service",
count=len(alerts), tenant_id=tenant_id)
return alerts
except Exception as e:
logger.error("Error fetching low stock alerts",
error=str(e), tenant_id=tenant_id)
return []
async def consume_stock(
self,
consumption_data: Dict[str, Any],
tenant_id: str
) -> Optional[Dict[str, Any]]:
"""Record stock consumption"""
try:
result = await self.post("stock/consume", data=consumption_data, tenant_id=tenant_id)
if result:
logger.info("Recorded stock consumption",
tenant_id=tenant_id)
return result
except Exception as e:
logger.error("Error recording stock consumption",
error=str(e), tenant_id=tenant_id)
return None
async def receive_stock(
self,
receipt_data: Dict[str, Any],
tenant_id: str
) -> Optional[Dict[str, Any]]:
"""Record stock receipt"""
try:
result = await self.post("stock/receive", data=receipt_data, tenant_id=tenant_id)
if result:
logger.info("Recorded stock receipt",
tenant_id=tenant_id)
return result
except Exception as e:
logger.error("Error recording stock receipt",
error=str(e), tenant_id=tenant_id)
return None
# ================================================================
# PRODUCT CLASSIFICATION (for onboarding)
# ================================================================
async def classify_product(
self,
product_name: str,
sales_volume: Optional[float],
tenant_id: str
) -> Optional[Dict[str, Any]]:
"""Classify a single product for inventory creation"""
try:
classification_data = {
"product_name": product_name,
"sales_volume": sales_volume
}
result = await self.post("inventory/classify-product", data=classification_data, tenant_id=tenant_id)
if result:
logger.info("Classified product",
product=product_name,
classification=result.get('product_type'),
confidence=result.get('confidence_score'),
tenant_id=tenant_id)
return result
except Exception as e:
logger.error("Error classifying product",
error=str(e), product=product_name, tenant_id=tenant_id)
return None
async def classify_products_batch(
self,
products: List[Dict[str, Any]],
tenant_id: str
) -> Optional[Dict[str, Any]]:
"""Classify multiple products for onboarding automation"""
try:
classification_data = {
"products": products
}
result = await self.post("inventory/classify-products-batch", data=classification_data, tenant_id=tenant_id)
if result:
suggestions = result.get('suggestions', [])
business_model = result.get('business_model_analysis', {}).get('model', 'unknown')
logger.info("Batch classification complete",
total_products=len(suggestions),
business_model=business_model,
tenant_id=tenant_id)
return result
except Exception as e:
logger.error("Error in batch classification",
error=str(e), products_count=len(products), tenant_id=tenant_id)
return None
# ================================================================
# DASHBOARD AND ANALYTICS
# ================================================================
async def get_inventory_dashboard(self, tenant_id: str) -> Optional[Dict[str, Any]]:
"""Get inventory dashboard data"""
try:
result = await self.get("dashboard", tenant_id=tenant_id)
if result:
logger.info("Retrieved inventory dashboard data", tenant_id=tenant_id)
return result
except Exception as e:
logger.error("Error fetching inventory dashboard",
error=str(e), tenant_id=tenant_id)
return None
async def get_inventory_summary(self, tenant_id: str) -> Optional[Dict[str, Any]]:
"""Get inventory summary statistics"""
try:
result = await self.get("dashboard/summary", tenant_id=tenant_id)
if result:
logger.info("Retrieved inventory summary", tenant_id=tenant_id)
return result
except Exception as e:
logger.error("Error fetching inventory summary",
error=str(e), tenant_id=tenant_id)
return None
# ================================================================
# UTILITY METHODS
# ================================================================
async def health_check(self) -> bool:
"""Check if inventory service is healthy"""
try:
result = await self.get("../health") # Health endpoint is not tenant-scoped
return result is not None
except Exception as e:
logger.error("Inventory service health check failed", error=str(e))
return False
# Factory function for dependency injection
def create_inventory_client(config: BaseServiceSettings) -> InventoryServiceClient:
"""Create inventory service client instance"""
return InventoryServiceClient(config)
# Convenience function for quick access (requires config to be passed)
async def get_inventory_client(config: BaseServiceSettings) -> InventoryServiceClient:
"""Get inventory service client instance"""
return create_inventory_client(config)