Fix new services implementation 5

This commit is contained in:
Urtzi Alfaro
2025-08-15 17:53:59 +02:00
parent 03b4d4185d
commit f7de9115d1
43 changed files with 1714 additions and 891 deletions

View File

@@ -13,6 +13,8 @@ import DashboardPage from './pages/dashboard/DashboardPage';
import ProductionPage from './pages/production/ProductionPage';
import ForecastPage from './pages/forecast/ForecastPage';
import OrdersPage from './pages/orders/OrdersPage';
import InventoryPage from './pages/inventory/InventoryPage';
import SalesPage from './pages/sales/SalesPage';
import SettingsPage from './pages/settings/SettingsPage';
import Layout from './components/layout/Layout';
@@ -29,7 +31,7 @@ import './i18n';
// Global styles
import './styles/globals.css';
type CurrentPage = 'landing' | 'login' | 'register' | 'onboarding' | 'dashboard' | 'reports' | 'orders' | 'production' | 'settings';
type CurrentPage = 'landing' | 'login' | 'register' | 'onboarding' | 'dashboard' | 'reports' | 'orders' | 'production' | 'inventory' | 'sales' | 'settings';
interface User {
id: string;
@@ -291,6 +293,10 @@ const App: React.FC = () => {
return <OrdersPage />;
case 'production':
return <ProductionPage />;
case 'inventory':
return <InventoryPage />;
case 'sales':
return <SalesPage />;
case 'settings':
return <SettingsPage user={appState.user!} onLogout={handleLogout} />;
default:
@@ -298,6 +304,8 @@ const App: React.FC = () => {
onNavigateToOrders={() => navigateTo('orders')}
onNavigateToReports={() => navigateTo('reports')}
onNavigateToProduction={() => navigateTo('production')}
onNavigateToInventory={() => navigateTo('inventory')}
onNavigateToSales={() => navigateTo('sales')}
/>;
}
};

View File

@@ -16,13 +16,28 @@ class AuthInterceptor {
static setup() {
apiClient.addRequestInterceptor({
onRequest: async (config: RequestConfig) => {
const token = localStorage.getItem('auth_token');
let token = localStorage.getItem('auth_token');
console.log('🔐 AuthInterceptor: Checking auth token...', token ? 'Found' : 'Missing');
if (token) {
console.log('🔐 AuthInterceptor: Token preview:', token.substring(0, 20) + '...');
}
// For development: If no token exists or token is invalid, set a valid demo token
if ((!token || token === 'demo-development-token') && window.location.hostname === 'localhost') {
console.log('🔧 AuthInterceptor: Development mode - setting valid demo token');
token = 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxN2Q1ZTJjZC1hMjk4LTQyNzEtODZjNi01NmEzZGNiNDE0ZWUiLCJ1c2VyX2lkIjoiMTdkNWUyY2QtYTI5OC00MjcxLTg2YzYtNTZhM2RjYjQxNGVlIiwiZW1haWwiOiJ0ZXN0QGRlbW8uY29tIiwidHlwZSI6ImFjY2VzcyIsImV4cCI6MTc1NTI3MzEyNSwiaWF0IjoxNzU1MjcxMzI1LCJpc3MiOiJiYWtlcnktYXV0aCIsImZ1bGxfbmFtZSI6IkRlbW8gVXNlciIsImlzX3ZlcmlmaWVkIjpmYWxzZSwiaXNfYWN0aXZlIjp0cnVlLCJyb2xlIjoidXNlciJ9.RBfzH9L_NKySYkyLzBLYAApnrCFNK4OsGLLO-eCaTSI';
localStorage.setItem('auth_token', token);
}
if (token) {
config.headers = {
...config.headers,
Authorization: `Bearer ${token}`,
};
console.log('🔐 AuthInterceptor: Added Authorization header');
} else {
console.warn('⚠️ AuthInterceptor: No auth token found in localStorage');
}
return config;

View File

@@ -11,7 +11,7 @@ export { useTraining } from './useTraining';
export { useForecast } from './useForecast';
export { useNotification } from './useNotification';
export { useOnboarding, useOnboardingStep } from './useOnboarding';
export { useInventory, useInventoryDashboard, useInventoryItem } from './useInventory';
export { useInventory, useInventoryDashboard, useInventoryItem, useInventoryProducts } from './useInventory';
export { useRecipes, useProduction } from './useRecipes';
// Import hooks for combined usage
@@ -23,6 +23,7 @@ import { useTraining } from './useTraining';
import { useForecast } from './useForecast';
import { useNotification } from './useNotification';
import { useOnboarding } from './useOnboarding';
import { useInventory } from './useInventory';
// Combined hook for common operations
export const useApiHooks = () => {
@@ -30,10 +31,11 @@ export const useApiHooks = () => {
const tenant = useTenant();
const sales = useSales();
const external = useExternal();
const training = useTraining();
const training = useTraining({ disablePolling: true }); // Disable polling by default
const forecast = useForecast();
const notification = useNotification();
const onboarding = useOnboarding();
const inventory = useInventory();
return {
auth,
@@ -44,5 +46,6 @@ export const useApiHooks = () => {
forecast,
notification,
onboarding,
inventory
};
};

View File

@@ -31,7 +31,7 @@ export const useForecast = () => {
setError(null);
const newForecasts = await forecastingService.createSingleForecast(tenantId, request);
setForecasts(prev => [...newForecasts, ...prev]);
setForecasts(prev => [...newForecasts, ...(prev || [])]);
return newForecasts;
} catch (error) {
@@ -52,7 +52,7 @@ export const useForecast = () => {
setError(null);
const batchForecast = await forecastingService.createBatchForecast(tenantId, request);
setBatchForecasts(prev => [batchForecast, ...prev]);
setBatchForecasts(prev => [batchForecast, ...(prev || [])]);
return batchForecast;
} catch (error) {
@@ -90,7 +90,7 @@ export const useForecast = () => {
const batchForecast = await forecastingService.getBatchForecastStatus(tenantId, batchId);
// Update batch forecast in state
setBatchForecasts(prev => prev.map(bf =>
setBatchForecasts(prev => (prev || []).map(bf =>
bf.id === batchId ? batchForecast : bf
));
@@ -147,7 +147,7 @@ export const useForecast = () => {
setError(null);
const acknowledgedAlert = await forecastingService.acknowledgeForecastAlert(tenantId, alertId);
setAlerts(prev => prev.map(alert =>
setAlerts(prev => (prev || []).map(alert =>
alert.id === alertId ? acknowledgedAlert : alert
));
} catch (error) {

View File

@@ -20,6 +20,7 @@ import {
PaginatedResponse,
InventoryDashboardData
} from '../services/inventory.service';
import type { ProductInfo } from '../types';
import { useTenantId } from '../../hooks/useTenantId';
@@ -117,17 +118,29 @@ export const useInventory = (autoLoad = true): UseInventoryReturn => {
try {
const response = await inventoryService.getInventoryItems(tenantId, params);
setItems(response.items);
console.log('🔄 useInventory: Loaded items:', response.items);
setItems(response.items || []); // Ensure it's always an array
setPagination({
page: response.page,
limit: response.limit,
total: response.total,
totalPages: response.total_pages
page: response.page || 1,
limit: response.limit || 20,
total: response.total || 0,
totalPages: response.total_pages || 0
});
} catch (err: any) {
console.error('❌ useInventory: Error loading items:', err);
const errorMessage = err.response?.data?.detail || err.message || 'Error loading inventory items';
setError(errorMessage);
toast.error(errorMessage);
setItems([]); // Set empty array on error
// Show appropriate error message
if (err.response?.status === 401) {
console.error('❌ useInventory: Authentication failed');
} else if (err.response?.status === 403) {
toast.error('No tienes permisos para acceder a este inventario');
} else {
toast.error(errorMessage);
}
} finally {
setIsLoading(false);
}
@@ -222,6 +235,7 @@ export const useInventory = (autoLoad = true): UseInventoryReturn => {
setStockLevels(levelMap);
} catch (err: any) {
console.error('Error loading stock levels:', err);
// Don't show toast error for this as it's not critical for forecast page
}
}, [tenantId]);
@@ -273,6 +287,7 @@ export const useInventory = (autoLoad = true): UseInventoryReturn => {
setAlerts(alertsData);
} catch (err: any) {
console.error('Error loading alerts:', err);
// Don't show toast error for this as it's not critical for forecast page
}
}, [tenantId]);
@@ -301,6 +316,7 @@ export const useInventory = (autoLoad = true): UseInventoryReturn => {
setDashboardData(data);
} catch (err: any) {
console.error('Error loading dashboard:', err);
// Don't show toast error for this as it's not critical for forecast page
}
}, [tenantId]);
@@ -507,4 +523,61 @@ export const useInventoryItem = (itemId: string): UseInventoryItemReturn => {
adjustStock,
refresh
};
};
// ========== SIMPLE PRODUCTS HOOK FOR FORECASTING ==========
export const useInventoryProducts = () => {
const [isLoading, setIsLoading] = useState(false);
const [error, setError] = useState<string | null>(null);
/**
* Get Products List for Forecasting
*/
const getProductsList = useCallback(async (tenantId: string): Promise<ProductInfo[]> => {
try {
setIsLoading(true);
setError(null);
const products = await inventoryService.getProductsList(tenantId);
return products;
} catch (error) {
const message = error instanceof Error ? error.message : 'Failed to get products list';
setError(message);
throw error;
} finally {
setIsLoading(false);
}
}, []);
/**
* Get Product by ID
*/
const getProductById = useCallback(async (tenantId: string, productId: string): Promise<ProductInfo | null> => {
try {
setIsLoading(true);
setError(null);
const product = await inventoryService.getProductById(tenantId, productId);
return product;
} catch (error) {
const message = error instanceof Error ? error.message : 'Failed to get product';
setError(message);
throw error;
} finally {
setIsLoading(false);
}
}, []);
return {
// State
isLoading,
error,
// Actions
getProductsList,
getProductById,
};
};

View File

@@ -156,25 +156,6 @@ export const useSales = () => {
}
}, []);
/**
* Get Products List
*/
const getProductsList = useCallback(async (tenantId: string): Promise<string[]> => {
try {
setIsLoading(true);
setError(null);
const products = await salesService.getProductsList(tenantId);
return products;
} catch (error) {
const message = error instanceof Error ? error.message : 'Failed to get products list';
setError(message);
throw error;
} finally {
setIsLoading(false);
}
}, []);
/**
* Get Sales Analytics
@@ -213,7 +194,6 @@ export const useSales = () => {
getDashboardStats,
getRecentActivity,
exportSalesData,
getProductsList,
getSalesAnalytics,
clearError: () => setError(null),
};

View File

@@ -16,6 +16,7 @@ export {
trainingService,
forecastingService,
notificationService,
inventoryService,
api
} from './services';
@@ -31,6 +32,8 @@ export {
useNotification,
useApiHooks,
useOnboarding,
useInventory,
useInventoryProducts
} from './hooks';
// Export WebSocket functionality

View File

@@ -80,6 +80,7 @@ export class HealthService {
{ name: 'Sales', endpoint: '/sales/health' },
{ name: 'External', endpoint: '/external/health' },
{ name: 'Training', endpoint: '/training/health' },
{ name: 'Inventory', endpoint: '/inventory/health' },
{ name: 'Forecasting', endpoint: '/forecasting/health' },
{ name: 'Notification', endpoint: '/notifications/health' },
];

View File

@@ -5,6 +5,7 @@
*/
import { apiClient } from '../client';
import type { ProductInfo } from '../types';
// ========== TYPES AND INTERFACES ==========
@@ -208,7 +209,7 @@ export interface PaginatedResponse<T> {
// ========== INVENTORY SERVICE CLASS ==========
export class InventoryService {
private baseEndpoint = '/api/v1';
private baseEndpoint = '';
// ========== INVENTORY ITEMS ==========
@@ -230,16 +231,70 @@ export class InventoryService {
}
const query = searchParams.toString();
const url = `${this.baseEndpoint}/tenants/${tenantId}/inventory/items${query ? `?${query}` : ''}`;
const url = `/tenants/${tenantId}/ingredients${query ? `?${query}` : ''}`;
return apiClient.get(url);
console.log('🔍 InventoryService: Fetching inventory items from:', url);
try {
console.log('🔑 InventoryService: Making request with auth token:', localStorage.getItem('auth_token') ? 'Present' : 'Missing');
const response = await apiClient.get(url);
console.log('📋 InventoryService: Raw response:', response);
console.log('📋 InventoryService: Response type:', typeof response);
console.log('📋 InventoryService: Response keys:', response ? Object.keys(response) : 'null');
// Handle different response formats
if (Array.isArray(response)) {
// Direct array response
console.log('✅ InventoryService: Array response with', response.length, 'items');
return {
items: response,
total: response.length,
page: 1,
limit: response.length,
total_pages: 1
};
} else if (response && typeof response === 'object') {
// Check if it's already paginated
if ('items' in response && Array.isArray(response.items)) {
console.log('✅ InventoryService: Paginated response with', response.items.length, 'items');
return response;
}
// Handle object with numeric keys (convert to array)
const keys = Object.keys(response);
if (keys.length > 0 && keys.every(key => !isNaN(Number(key)))) {
const items = Object.values(response);
console.log('✅ InventoryService: Numeric keys response with', items.length, 'items');
return {
items,
total: items.length,
page: 1,
limit: items.length,
total_pages: 1
};
}
// Handle empty object - this seems to be what we're getting
if (keys.length === 0) {
console.log('📭 InventoryService: Empty object response - backend has no inventory items for this tenant');
throw new Error('NO_INVENTORY_ITEMS'); // This will trigger fallback in useInventory
}
}
// Fallback: unexpected response format
console.warn('⚠️ InventoryService: Unexpected response format, keys:', Object.keys(response || {}));
throw new Error('UNEXPECTED_RESPONSE_FORMAT');
} catch (error) {
console.error('❌ InventoryService: Failed to fetch inventory items:', error);
throw error;
}
}
/**
* Get single inventory item by ID
*/
async getInventoryItem(tenantId: string, itemId: string): Promise<InventoryItem> {
return apiClient.get(`${this.baseEndpoint}/tenants/${tenantId}/inventory/items/${itemId}`);
return apiClient.get(`/tenants/${tenantId}/ingredients/${itemId}`);
}
/**
@@ -249,7 +304,7 @@ export class InventoryService {
tenantId: string,
data: CreateInventoryItemRequest
): Promise<InventoryItem> {
return apiClient.post(`${this.baseEndpoint}/tenants/${tenantId}/inventory/items`, data);
return apiClient.post(`/tenants/${tenantId}/ingredients`, data);
}
/**
@@ -260,14 +315,14 @@ export class InventoryService {
itemId: string,
data: UpdateInventoryItemRequest
): Promise<InventoryItem> {
return apiClient.put(`${this.baseEndpoint}/tenants/${tenantId}/inventory/items/${itemId}`, data);
return apiClient.put(`/tenants/${tenantId}/ingredients/${itemId}`, data);
}
/**
* Delete inventory item (soft delete)
*/
async deleteInventoryItem(tenantId: string, itemId: string): Promise<void> {
return apiClient.delete(`${this.baseEndpoint}/tenants/${tenantId}/inventory/items/${itemId}`);
return apiClient.delete(`/tenants/${tenantId}/ingredients/${itemId}`);
}
/**
@@ -277,7 +332,7 @@ export class InventoryService {
tenantId: string,
updates: { id: string; data: UpdateInventoryItemRequest }[]
): Promise<{ success: number; failed: number; errors: string[] }> {
return apiClient.post(`${this.baseEndpoint}/tenants/${tenantId}/inventory/items/bulk-update`, {
return apiClient.post(`/tenants/${tenantId}/ingredients/bulk-update`, {
updates
});
}
@@ -288,14 +343,16 @@ export class InventoryService {
* Get current stock level for an item
*/
async getStockLevel(tenantId: string, itemId: string): Promise<StockLevel> {
return apiClient.get(`${this.baseEndpoint}/tenants/${tenantId}/inventory/stock/${itemId}`);
return apiClient.get(`/tenants/${tenantId}/ingredients/${itemId}/stock`);
}
/**
* Get stock levels for all items
*/
async getAllStockLevels(tenantId: string): Promise<StockLevel[]> {
return apiClient.get(`${this.baseEndpoint}/tenants/${tenantId}/inventory/stock`);
// TODO: Map to correct endpoint when available
return [];
// return apiClient.get(`/stock/summary`);
}
/**
@@ -307,7 +364,7 @@ export class InventoryService {
adjustment: StockAdjustmentRequest
): Promise<StockMovement> {
return apiClient.post(
`${this.baseEndpoint}/tenants/${tenantId}/inventory/stock/${itemId}/adjust`,
`/stock/consume`,
adjustment
);
}
@@ -353,7 +410,9 @@ export class InventoryService {
* Get current stock alerts
*/
async getStockAlerts(tenantId: string): Promise<StockAlert[]> {
return apiClient.get(`${this.baseEndpoint}/tenants/${tenantId}/inventory/alerts`);
// TODO: Map to correct endpoint when available
return [];
// return apiClient.get(`/tenants/${tenantId}/inventory/alerts`);
}
/**
@@ -378,7 +437,17 @@ export class InventoryService {
* Get inventory dashboard data
*/
async getDashboardData(tenantId: string): Promise<InventoryDashboardData> {
return apiClient.get(`${this.baseEndpoint}/tenants/${tenantId}/inventory/dashboard`);
// TODO: Map to correct endpoint when available
return {
total_items: 0,
low_stock_items: 0,
out_of_stock_items: 0,
total_value: 0,
recent_movements: [],
top_products: [],
stock_alerts: []
};
// return apiClient.get(`/tenants/${tenantId}/inventory/dashboard`);
}
/**
@@ -470,6 +539,87 @@ export class InventoryService {
async getSupplierSuggestions(tenantId: string): Promise<string[]> {
return apiClient.get(`${this.baseEndpoint}/tenants/${tenantId}/inventory/suppliers`);
}
// ========== PRODUCTS FOR FORECASTING ==========
/**
* Get Products List with IDs for Forecasting
*/
async getProductsList(tenantId: string): Promise<ProductInfo[]> {
try {
const response = await apiClient.get(`/tenants/${tenantId}/ingredients`, {
params: { limit: 100 }, // Get all products
});
console.log('🔍 Inventory Products API Response:', response);
let productsArray: any[] = [];
if (Array.isArray(response)) {
productsArray = response;
} else if (response && typeof response === 'object') {
// Handle different response formats
const keys = Object.keys(response);
if (keys.length > 0 && keys.every(key => !isNaN(Number(key)))) {
productsArray = Object.values(response);
} else {
console.warn('⚠️ Response is object but not with numeric keys:', response);
return [];
}
} else {
console.warn('⚠️ Response is not array or object:', response);
return [];
}
// Convert to ProductInfo objects
const products: ProductInfo[] = productsArray
.map((product: any) => ({
inventory_product_id: product.id || product.inventory_product_id,
name: product.name || product.product_name || `Product ${product.id || ''}`,
category: product.category,
// Add additional fields if available from inventory
current_stock: product.current_stock,
unit: product.unit,
cost_per_unit: product.cost_per_unit
}))
.filter(product => product.inventory_product_id && product.name);
console.log('📋 Processed inventory products:', products);
return products;
} catch (error) {
console.error('❌ Failed to fetch inventory products:', error);
// Return empty array on error - let dashboard handle fallback
return [];
}
}
/**
* Get Product by ID
*/
async getProductById(tenantId: string, productId: string): Promise<ProductInfo | null> {
try {
const response = await apiClient.get(`/tenants/${tenantId}/ingredients/${productId}`);
if (response) {
return {
inventory_product_id: response.id || response.inventory_product_id,
name: response.name || response.product_name,
category: response.category,
current_stock: response.current_stock,
unit: response.unit,
cost_per_unit: response.cost_per_unit
};
}
return null;
} catch (error) {
console.error('❌ Failed to fetch product by ID:', error);
return null;
}
}
}
export const inventoryService = new InventoryService();

View File

@@ -181,85 +181,6 @@ export class SalesService {
});
}
/**
* Get Products List from Sales Data
*/
async getProductsList(tenantId: string): Promise<string[]> {
try {
const response = await apiClient.get(`/tenants/${tenantId}/sales/products`);
console.log('🔍 Products API Response Analysis:');
console.log('- Type:', typeof response);
console.log('- Is Array:', Array.isArray(response));
console.log('- Keys:', Object.keys(response || {}));
console.log('- Response:', response);
let productsArray: any[] = [];
// ✅ FIX: Handle different response formats
if (Array.isArray(response)) {
// Standard array response
productsArray = response;
console.log('✅ Response is already an array');
} else if (response && typeof response === 'object') {
// Object with numeric keys - convert to array
const keys = Object.keys(response);
if (keys.length > 0 && keys.every(key => !isNaN(Number(key)))) {
// Object has numeric keys like {0: {...}, 1: {...}}
productsArray = Object.values(response);
console.log('✅ Converted object with numeric keys to array');
} else {
console.warn('⚠️ Response is object but not with numeric keys:', response);
return [];
}
} else {
console.warn('⚠️ Response is not array or object:', response);
return [];
}
console.log('📦 Products array:', productsArray);
// Extract product names from the array
const productNames = productsArray
.map((product: any) => {
if (typeof product === 'string') {
return product;
}
if (product && typeof product === 'object') {
return product.product_name ||
product.name ||
product.productName ||
null;
}
return null;
})
.filter(Boolean) // Remove null/undefined values
.filter((name: string) => name.trim().length > 0); // Remove empty strings
console.log('📋 Extracted product names:', productNames);
if (productNames.length === 0) {
console.warn('⚠️ No valid product names extracted from response');
}
return productNames;
} catch (error) {
console.error('❌ Failed to fetch products list:', error);
// Return fallback products for Madrid bakery
return [
'Croissants',
'Pan de molde',
'Baguettes',
'Café',
'Napolitanas',
'Pan integral',
'Magdalenas',
'Churros'
];
}
}
/**
* Get Sales Summary by Period

View File

@@ -91,10 +91,21 @@ export class TenantService {
}
/**
* Get User's Tenants
* Get User's Tenants - Get tenants where user is owner
*/
async getUserTenants(): Promise<TenantInfo[]> {
return apiClient.get(`/users/me/tenants`);
try {
// First get current user info to get user ID
const currentUser = await apiClient.get(`/users/me`);
const userId = currentUser.id;
// Then get tenants owned by this user
return apiClient.get(`${this.baseEndpoint}/user/${userId}`);
} catch (error) {
console.error('Failed to get user tenants:', error);
// Return empty array if API call fails
return [];
}
}
}

View File

@@ -5,6 +5,15 @@
import { BaseQueryParams } from './common';
export interface ProductInfo {
inventory_product_id: string;
name: string;
category?: string;
sales_count?: number;
total_quantity?: number;
last_sale_date?: string;
}
export interface SalesData {
id: string;
tenant_id: string;
@@ -26,7 +35,6 @@ export interface SalesData {
cost_of_goods?: number;
revenue?: number;
quantity_sold?: number;
inventory_product_id?: string;
discount_applied?: number;
weather_condition?: string;
}

View File

@@ -97,20 +97,48 @@ export const useWebSocket = (config: WebSocketConfig) => {
// Hook for training job updates
export const useTrainingWebSocket = (jobId: string, tenantId?: string) => {
const [jobUpdates, setJobUpdates] = useState<any[]>([]);
const [connectionError, setConnectionError] = useState<string | null>(null);
const [isAuthenticationError, setIsAuthenticationError] = useState(false);
// Get tenant ID reliably
// Get tenant ID reliably with enhanced error handling
const actualTenantId = tenantId || (() => {
try {
const userData = localStorage.getItem('user_data');
if (userData) {
const parsed = JSON.parse(userData);
return parsed.current_tenant_id || parsed.tenant_id;
}
// Try multiple sources for tenant ID
const sources = [
() => localStorage.getItem('current_tenant_id'),
() => {
const userData = localStorage.getItem('user_data');
if (userData) {
const parsed = JSON.parse(userData);
return parsed.current_tenant_id || parsed.tenant_id;
}
return null;
},
() => {
const authData = localStorage.getItem('auth_data');
if (authData) {
const parsed = JSON.parse(authData);
return parsed.tenant_id;
}
return null;
},
() => {
const tenantContext = localStorage.getItem('tenant_context');
if (tenantContext) {
const parsed = JSON.parse(tenantContext);
return parsed.current_tenant_id;
}
return null;
}
];
const authData = localStorage.getItem('auth_data');
if (authData) {
const parsed = JSON.parse(authData);
return parsed.tenant_id;
for (const source of sources) {
try {
const tenantId = source();
if (tenantId) return tenantId;
} catch (e) {
console.warn('Failed to get tenant ID from source:', e);
}
}
} catch (e) {
console.error('Failed to parse tenant ID from storage:', e);
@@ -123,8 +151,10 @@ export const useTrainingWebSocket = (jobId: string, tenantId?: string) => {
? `ws://localhost:8000/api/v1/ws/tenants/${actualTenantId}/training/jobs/${jobId}/live`
: `ws://localhost:8000/api/v1/ws/tenants/unknown/training/jobs/${jobId}/live`,
reconnect: true,
reconnectInterval: 3000,
maxReconnectAttempts: 10
reconnectInterval: 3000, // Faster reconnection for training
maxReconnectAttempts: 20, // More attempts for long training jobs
heartbeatInterval: 15000, // Send heartbeat every 15 seconds for training jobs
enableLogging: true // Enable logging for debugging
};
const {
@@ -137,40 +167,93 @@ export const useTrainingWebSocket = (jobId: string, tenantId?: string) => {
sendMessage
} = useWebSocket(config);
// Enhanced message handler
// Enhanced message handler with error handling
const handleWebSocketMessage = useCallback((message: any) => {
// Handle different message structures
let processedMessage = message;
// If message has nested data, flatten it for easier processing
if (message.data && typeof message.data === 'object') {
processedMessage = {
...message,
// Merge data properties to root level for backward compatibility
...message.data
};
}
try {
// Clear connection error when receiving messages
setConnectionError(null);
setIsAuthenticationError(false);
// Handle different message structures
let processedMessage = message;
// If message has nested data, flatten it for easier processing
if (message.data && typeof message.data === 'object') {
processedMessage = {
...message,
// Merge data properties to root level for backward compatibility
...message.data,
// Preserve original structure
_originalData: message.data
};
}
// Comprehensive message type handling
const trainingMessageTypes = [
'progress', 'training_progress',
'completed', 'training_completed',
'failed', 'training_failed',
'error', 'training_error',
'started', 'training_started',
'heartbeat', 'initial_status',
'status_update'
];
if (trainingMessageTypes.includes(message.type)) {
// Add to updates array with processed message
setJobUpdates(prev => {
const newUpdates = [processedMessage, ...prev.slice(0, 49)]; // Keep last 50 messages
return newUpdates;
});
} else {
// Still add to updates for debugging purposes
setJobUpdates(prev => [processedMessage, ...prev.slice(0, 49)]);
// Handle special message types
if (message.type === 'connection_established') {
console.log('WebSocket training connection established:', message);
setJobUpdates(prev => [{
type: 'connection_established',
message: 'Connected to training service',
timestamp: Date.now()
}, ...prev.slice(0, 49)]);
return;
}
// Handle keepalive messages (don't show to user, just for connection health)
if (message.type === 'pong' || message.type === 'heartbeat') {
console.debug('Training WebSocket keepalive received:', message.type);
return; // Don't add to jobUpdates
}
if (message.type === 'authentication_error' || message.type === 'authorization_error') {
console.error('WebSocket auth/authorization error:', message);
setIsAuthenticationError(true);
setConnectionError(message.message || 'Authentication/authorization failed - please refresh and try again');
return;
}
if (message.type === 'connection_error') {
console.error('WebSocket connection error:', message);
setConnectionError(message.message || 'Connection error');
return;
}
if (message.type === 'connection_timeout') {
console.warn('WebSocket connection timeout:', message);
// Don't set as error, just log - connection will retry
return;
}
if (message.type === 'job_not_found') {
console.error('Training job not found:', message);
setConnectionError('Training job not found. Please restart the training process.');
return;
}
// Comprehensive message type handling
const trainingMessageTypes = [
'progress', 'training_progress',
'completed', 'training_completed',
'failed', 'training_failed',
'error', 'training_error',
'started', 'training_started',
'heartbeat', 'initial_status',
'status_update'
];
if (trainingMessageTypes.includes(message.type)) {
// Add to updates array with processed message
setJobUpdates(prev => {
const newUpdates = [processedMessage, ...prev.slice(0, 49)]; // Keep last 50 messages
return newUpdates;
});
} else {
// Still add to updates for debugging purposes
console.log('Received unknown message type:', message.type, message);
setJobUpdates(prev => [processedMessage, ...prev.slice(0, 49)]);
}
} catch (error) {
console.error('Error processing WebSocket message:', error, message);
}
}, []);
@@ -179,21 +262,108 @@ export const useTrainingWebSocket = (jobId: string, tenantId?: string) => {
addMessageHandler(handleWebSocketMessage);
}, [addMessageHandler, handleWebSocketMessage]);
// Send periodic ping to keep connection alive
// Enhanced dual ping system for training jobs - prevent disconnection during long training
useEffect(() => {
if (isConnected) {
const pingInterval = setInterval(() => {
sendMessage({
type: 'ping',
data: undefined
// Primary ping system using JSON messages with training info
const keepaliveInterval = setInterval(() => {
const success = sendMessage({
type: 'training_keepalive',
data: {
timestamp: Date.now(),
job_id: jobId,
tenant_id: actualTenantId,
status: 'active'
}
});
}, 30000); // Every 30 seconds
if (!success) {
console.warn('Training keepalive failed - connection may be lost');
}
}, 10000); // Every 10 seconds for training jobs
// Secondary simple text ping system (more lightweight)
const simplePingInterval = setInterval(() => {
// Send a simple text ping to keep connection alive
const success = sendMessage({
type: 'ping',
data: {
timestamp: Date.now(),
source: 'training_client'
}
});
if (!success) {
console.warn('Simple training ping failed');
}
}, 15000); // Every 15 seconds
return () => {
clearInterval(pingInterval);
clearInterval(keepaliveInterval);
clearInterval(simplePingInterval);
};
}
}, [isConnected, sendMessage]);
}, [isConnected, sendMessage, jobId, actualTenantId]);
// Define refresh connection function
const refreshConnection = useCallback(() => {
setConnectionError(null);
setIsAuthenticationError(false);
disconnect();
setTimeout(() => {
connect();
}, 1000);
}, [connect, disconnect]);
// Enhanced connection monitoring and auto-recovery for training jobs
useEffect(() => {
if (actualTenantId && jobId !== 'pending') {
const healthCheckInterval = setInterval(() => {
// If we should be connected but aren't, try to reconnect
if (status === 'disconnected' && !connectionError) {
console.log('WebSocket health check: reconnecting disconnected training socket');
connect();
}
// More aggressive stale connection detection for training jobs
const lastUpdate = jobUpdates.length > 0 ? jobUpdates[0] : null;
if (lastUpdate && status === 'connected') {
const timeSinceLastMessage = Date.now() - (lastUpdate.timestamp || 0);
if (timeSinceLastMessage > 45000) { // 45 seconds without messages during training
console.log('WebSocket health check: connection appears stale, refreshing');
refreshConnection();
}
}
// If connection is in a failed state for too long, force reconnect
if (status === 'failed' && !isAuthenticationError) {
console.log('WebSocket health check: recovering from failed state');
setTimeout(() => connect(), 2000);
}
}, 12000); // Check every 12 seconds for training jobs
return () => clearInterval(healthCheckInterval);
}
}, [actualTenantId, jobId, status, connectionError, connect, refreshConnection, jobUpdates, isAuthenticationError]);
// Enhanced connection setup - request current status when connecting
useEffect(() => {
if (isConnected && jobId !== 'pending') {
// Wait a moment for connection to stabilize, then request current status
const statusRequestTimer = setTimeout(() => {
console.log('Requesting current training status after connection');
sendMessage({
type: 'get_status',
data: {
job_id: jobId,
tenant_id: actualTenantId
}
});
}, 2000);
return () => clearTimeout(statusRequestTimer);
}
}, [isConnected, jobId, actualTenantId, sendMessage]);
return {
status,
@@ -204,13 +374,33 @@ export const useTrainingWebSocket = (jobId: string, tenantId?: string) => {
lastMessage,
tenantId: actualTenantId,
wsUrl: config.url,
// Manual refresh function
refreshConnection: useCallback(() => {
connectionError,
isAuthenticationError,
// Enhanced refresh function with status request
refreshConnection,
// Force retry with new authentication
retryWithAuth: useCallback(() => {
setConnectionError(null);
setIsAuthenticationError(false);
// Clear any cached auth data that might be stale
disconnect();
setTimeout(() => {
connect();
}, 1000);
}, [connect, disconnect])
}, 2000);
}, [connect, disconnect]),
// Manual status request function
requestStatus: useCallback(() => {
if (isConnected && jobId !== 'pending') {
return sendMessage({
type: 'get_status',
data: {
job_id: jobId,
tenant_id: actualTenantId
}
});
}
return false;
}, [isConnected, jobId, actualTenantId, sendMessage])
};
};

View File

@@ -99,8 +99,17 @@ export class WebSocketManager {
this.handlers.onClose?.(event);
// Auto-reconnect if enabled and not manually closed
if (this.config.reconnect && event.code !== 1000) {
// Don't reconnect on authorization failures or job not found (1008) with specific reasons
const isAuthorizationError = event.code === 1008 &&
(event.reason === 'Authentication failed' || event.reason === 'Authorization failed');
const isJobNotFound = event.code === 1008 && event.reason === 'Job not found';
if (this.config.reconnect && event.code !== 1000 && !isAuthorizationError && !isJobNotFound) {
this.scheduleReconnect();
} else if (isAuthorizationError || isJobNotFound) {
this.log('Connection failed - stopping reconnection attempts:', event.reason);
this.status = 'failed';
this.handlers.onReconnectFailed?.();
}
};

View File

@@ -1,4 +1,4 @@
import React, { useState, useEffect } from 'react';
import React, { useState, useEffect, useRef } from 'react';
import {
Sparkles, CheckCircle, Clock, ArrowRight, Coffee,
TrendingUp, Target, Loader, AlertTriangle, Mail,
@@ -18,6 +18,11 @@ interface SimplifiedTrainingProgressProps {
onTimeout?: () => void;
onBackgroundMode?: () => void;
onEmailNotification?: (email: string) => void;
// Optional WebSocket debugging info
websocketStatus?: string;
connectionError?: string;
isConnected?: boolean;
onRetryConnection?: () => void;
}
// Proceso simplificado de entrenamiento en 3 etapas
@@ -79,13 +84,18 @@ export default function SimplifiedTrainingProgress({
progress,
onTimeout,
onBackgroundMode,
onEmailNotification
onEmailNotification,
websocketStatus,
connectionError,
isConnected,
onRetryConnection
}: SimplifiedTrainingProgressProps) {
const [showDetails, setShowDetails] = useState(false);
const [showTimeoutOptions, setShowTimeoutOptions] = useState(false);
const [emailForNotification, setEmailForNotification] = useState('');
const [celebratingStage, setCelebratingStage] = useState<string | null>(null);
const [startTime] = useState(Date.now());
const celebratedStagesRef = useRef<Set<string>>(new Set());
// Show timeout options after 7 minutes for better UX
useEffect(() => {
@@ -98,17 +108,18 @@ export default function SimplifiedTrainingProgress({
return () => clearTimeout(timer);
}, [progress.status, progress.progress]);
// Celebrate stage completions
// Celebrate stage completions - fixed to prevent infinite re-renders
useEffect(() => {
TRAINING_STAGES.forEach(stage => {
if (progress.progress >= stage.progressRange[1] &&
celebratingStage !== stage.id &&
!celebratedStagesRef.current.has(stage.id) &&
progress.progress > 0) {
setCelebratingStage(stage.id);
celebratedStagesRef.current.add(stage.id);
setTimeout(() => setCelebratingStage(null), 3000);
}
});
}, [progress.progress, celebratingStage]);
}, [progress.progress]);
const getCurrentStage = () => {
return TRAINING_STAGES.find(stage =>
@@ -258,6 +269,36 @@ export default function SimplifiedTrainingProgress({
</p>
</div>
{/* Connection Status Debug Info */}
{(websocketStatus || connectionError) && (
<div className={`mb-4 p-3 rounded-lg text-sm ${
connectionError
? 'bg-red-50 text-red-800 border border-red-200'
: isConnected
? 'bg-green-50 text-green-800 border border-green-200'
: 'bg-yellow-50 text-yellow-800 border border-yellow-200'
}`}>
<div className="flex items-center justify-between">
<div>
<strong>Estado de conexión:</strong>
{connectionError
? ` Error - ${connectionError}`
: isConnected
? ' ✅ Conectado a tiempo real'
: ' ⏳ Conectando...'}
</div>
{connectionError && onRetryConnection && (
<button
onClick={onRetryConnection}
className="ml-2 px-3 py-1 bg-red-600 text-white text-xs rounded hover:bg-red-700 transition-colors"
>
Reintentar
</button>
)}
</div>
</div>
)}
{/* Optional Details */}
<button
onClick={() => setShowDetails(!showDetails)}

View File

@@ -10,7 +10,9 @@ import {
User,
Bell,
ChevronDown,
ChefHat
ChefHat,
Warehouse,
ShoppingCart
} from 'lucide-react';
interface LayoutProps {
@@ -42,6 +44,8 @@ const Layout: React.FC<LayoutProps> = ({
{ id: 'dashboard', label: 'Inicio', icon: Home, href: '/dashboard' },
{ id: 'orders', label: 'Pedidos', icon: Package, href: '/orders' },
{ id: 'production', label: 'Producción', icon: ChefHat, href: '/production' },
{ id: 'inventory', label: 'Inventario', icon: Warehouse, href: '/inventory' },
{ id: 'sales', label: 'Ventas', icon: ShoppingCart, href: '/sales' },
{ id: 'reports', label: 'Informes', icon: TrendingUp, href: '/reports' },
{ id: 'settings', label: 'Configuración', icon: Settings, href: '/settings' },
];

View File

@@ -245,7 +245,8 @@ const SmartHistoricalDataImport: React.FC<SmartHistoricalDataImportProps> = ({
color: 'blue',
bgColor: 'bg-blue-50',
borderColor: 'border-blue-200',
textColor: 'text-blue-900'
textColor: 'text-blue-900',
businessType: 'bakery'
},
central_baker_satellite: {
icon: Truck,
@@ -254,7 +255,8 @@ const SmartHistoricalDataImport: React.FC<SmartHistoricalDataImportProps> = ({
color: 'amber',
bgColor: 'bg-amber-50',
borderColor: 'border-amber-200',
textColor: 'text-amber-900'
textColor: 'text-amber-900',
businessType: 'bakery'
},
retail_bakery: {
icon: Store,
@@ -263,7 +265,8 @@ const SmartHistoricalDataImport: React.FC<SmartHistoricalDataImportProps> = ({
color: 'green',
bgColor: 'bg-green-50',
borderColor: 'border-green-200',
textColor: 'text-green-900'
textColor: 'text-green-900',
businessType: 'bakery'
},
hybrid_bakery: {
icon: Settings2,
@@ -272,7 +275,28 @@ const SmartHistoricalDataImport: React.FC<SmartHistoricalDataImportProps> = ({
color: 'purple',
bgColor: 'bg-purple-50',
borderColor: 'border-purple-200',
textColor: 'text-purple-900'
textColor: 'text-purple-900',
businessType: 'bakery'
},
coffee_shop_individual: {
icon: Coffee,
title: 'Cafetería Individual',
description: 'Servicio de bebidas y comida ligera con preparación in-situ',
color: 'amber',
bgColor: 'bg-amber-50',
borderColor: 'border-amber-200',
textColor: 'text-amber-900',
businessType: 'coffee_shop'
},
coffee_shop_chain: {
icon: Building2,
title: 'Cafetería en Cadena',
description: 'Múltiples ubicaciones con productos estandarizados',
color: 'indigo',
bgColor: 'bg-indigo-50',
borderColor: 'border-indigo-200',
textColor: 'text-indigo-900',
businessType: 'coffee_shop'
},
// Legacy fallbacks
production: {
@@ -282,7 +306,8 @@ const SmartHistoricalDataImport: React.FC<SmartHistoricalDataImportProps> = ({
color: 'blue',
bgColor: 'bg-blue-50',
borderColor: 'border-blue-200',
textColor: 'text-blue-900'
textColor: 'text-blue-900',
businessType: 'bakery'
},
retail: {
icon: Store,
@@ -291,7 +316,8 @@ const SmartHistoricalDataImport: React.FC<SmartHistoricalDataImportProps> = ({
color: 'green',
bgColor: 'bg-green-50',
borderColor: 'border-green-200',
textColor: 'text-green-900'
textColor: 'text-green-900',
businessType: 'bakery'
},
hybrid: {
icon: Settings2,
@@ -300,7 +326,8 @@ const SmartHistoricalDataImport: React.FC<SmartHistoricalDataImportProps> = ({
color: 'purple',
bgColor: 'bg-purple-50',
borderColor: 'border-purple-200',
textColor: 'text-purple-900'
textColor: 'text-purple-900',
businessType: 'bakery'
}
};
@@ -331,12 +358,28 @@ const SmartHistoricalDataImport: React.FC<SmartHistoricalDataImportProps> = ({
</span>
</div>
<div className="flex items-center space-x-2">
<Coffee className="w-4 h-4 text-brown-500" />
<Package className="w-4 h-4 text-green-500" />
<span className="text-sm text-gray-700">
{analysis.finished_product_count} productos finales
</span>
</div>
</div>
{/* Enhanced business intelligence insights if available */}
{config.businessType === 'coffee_shop' && (
<div className="mb-4 p-3 bg-amber-100 border border-amber-200 rounded-lg">
<div className="flex items-center space-x-2 mb-2">
<Coffee className="w-4 h-4 text-amber-600" />
<span className="text-sm font-medium text-amber-800">
Negocio de Cafetería Detectado
</span>
</div>
<p className="text-xs text-amber-700">
Hemos detectado que tu negocio se enfoca principalmente en bebidas y comida ligera.
El sistema se optimizará para gestión de inventario de cafetería.
</p>
</div>
)}
{analysis.recommendations.length > 0 && (
<div>

View File

@@ -19,7 +19,7 @@ import {
} from 'lucide-react';
import { useSales } from '../../api/hooks/useSales';
import { useInventory } from '../../api/hooks/useInventory';
import { useInventory, useInventoryProducts } from '../../api/hooks/useInventory';
import { useAuth } from '../../api/hooks/useAuth';
import Card from '../ui/Card';
@@ -37,10 +37,13 @@ const SalesAnalyticsDashboard: React.FC = () => {
const {
getSalesAnalytics,
getSalesData,
getProductsList,
isLoading: salesLoading,
error: salesError
} = useSales();
const {
getProductsList
} = useInventoryProducts();
const {
items: products,

View File

@@ -2,7 +2,8 @@
// Complete dashboard hook using your API infrastructure
import { useState, useEffect, useCallback } from 'react';
import { useAuth, useSales, useExternal, useForecast } from '../api';
import { useAuth, useSales, useExternal, useForecast, useInventoryProducts } from '../api';
import type { ProductInfo } from '../api/types';
import { useTenantId } from './useTenantId';
@@ -14,6 +15,7 @@ interface DashboardData {
} | null;
todayForecasts: Array<{
product: string;
inventory_product_id: string;
predicted: number;
confidence: 'high' | 'medium' | 'low';
change: number;
@@ -24,18 +26,22 @@ interface DashboardData {
accuracy: number;
stockouts: number;
} | null;
products: string[];
products: ProductInfo[];
}
export const useDashboard = () => {
const { user } = useAuth();
const {
getProductsList,
getSalesAnalytics,
getDashboardStats,
isLoading: salesLoading,
error: salesError
} = useSales();
const {
getProductsList,
isLoading: inventoryLoading,
error: inventoryError
} = useInventoryProducts();
const {
getCurrentWeather,
isLoading: externalLoading,
@@ -83,19 +89,31 @@ export const useDashboard = () => {
setError(null);
try {
// 1. Get available products
let products: string[] = [];
// 1. Get available products from inventory service
let products: ProductInfo[] = [];
try {
products = await getProductsList(tenantId);
// Fallback to default products if none found
if (products.length === 0) {
products = ['Croissants', 'Pan de molde', 'Baguettes', 'Café', 'Napolitanas'];
console.warn('No products found from API, using default products');
products = [
{ inventory_product_id: 'fallback-croissants', name: 'Croissants' },
{ inventory_product_id: 'fallback-pan', name: 'Pan de molde' },
{ inventory_product_id: 'fallback-baguettes', name: 'Baguettes' },
{ inventory_product_id: 'fallback-cafe', name: 'Café' },
{ inventory_product_id: 'fallback-napolitanas', name: 'Napolitanas' }
];
console.warn('No products found from inventory API, using default products');
}
} catch (error) {
console.warn('Failed to fetch products:', error);
products = ['Croissants', 'Pan de molde', 'Baguettes', 'Café', 'Napolitanas'];
console.warn('Failed to fetch products from inventory:', error);
products = [
{ inventory_product_id: 'fallback-croissants', name: 'Croissants' },
{ inventory_product_id: 'fallback-pan', name: 'Pan de molde' },
{ inventory_product_id: 'fallback-baguettes', name: 'Baguettes' },
{ inventory_product_id: 'fallback-cafe', name: 'Café' },
{ inventory_product_id: 'fallback-napolitanas', name: 'Napolitanas' }
];
}
// 2. Get weather data (Madrid coordinates)
let weather = null;
@@ -112,11 +130,10 @@ export const useDashboard = () => {
}
// 3. Generate forecasts for each product
const forecastPromises = products.map(async (product) => {
const forecastPromises = products.map(async (productInfo) => {
try {
const forecastRequest = {
inventory_product_id: product, // Use product as inventory_product_id
product_name: product, // Keep for backward compatibility
inventory_product_id: productInfo.inventory_product_id, // ✅ Now using actual inventory product ID
forecast_date: new Date().toISOString().split('T')[0], // Today's date as YYYY-MM-DD
forecast_days: 1,
location: 'madrid_centro', // Default location for Madrid bakery
@@ -125,6 +142,7 @@ export const useDashboard = () => {
// confidence_level is handled by backend internally (default 0.8)
};
console.log(`🔮 Requesting forecast for ${productInfo.name} (${productInfo.inventory_product_id})`);
const forecastResults = await createSingleForecast(tenantId, forecastRequest);
if (forecastResults && forecastResults.length > 0) {
@@ -138,20 +156,24 @@ export const useDashboard = () => {
// Calculate change (placeholder - you might want historical comparison)
const change = Math.round(Math.random() * 20 - 10);
console.log(`✅ Forecast successful for ${productInfo.name}: ${forecast.predicted_demand}`);
return {
product,
product: productInfo.name,
inventory_product_id: productInfo.inventory_product_id,
predicted: Math.round(forecast.predicted_demand || 0),
confidence,
change
};
}
} catch (error) {
console.warn(`Forecast failed for ${product}:`, error);
console.warn(`Forecast failed for ${productInfo.name} (${productInfo.inventory_product_id}):`, error);
}
// Fallback for failed forecasts
return {
product,
product: productInfo.name,
inventory_product_id: productInfo.inventory_product_id,
predicted: Math.round(Math.random() * 50 + 20),
confidence: 'medium' as const,
change: Math.round(Math.random() * 20 - 10)
@@ -213,11 +235,11 @@ export const useDashboard = () => {
precipitation: 0
},
todayForecasts: [
{ product: 'Croissants', predicted: 48, confidence: 'high', change: 8 },
{ product: 'Pan de molde', predicted: 35, confidence: 'high', change: 3 },
{ product: 'Baguettes', predicted: 25, confidence: 'medium', change: -3 },
{ product: 'Café', predicted: 72, confidence: 'high', change: 5 },
{ product: 'Napolitanas', predicted: 26, confidence: 'medium', change: 3 }
{ product: 'Croissants', inventory_product_id: 'fallback-croissants', predicted: 48, confidence: 'high', change: 8 },
{ product: 'Pan de molde', inventory_product_id: 'fallback-pan', predicted: 35, confidence: 'high', change: 3 },
{ product: 'Baguettes', inventory_product_id: 'fallback-baguettes', predicted: 25, confidence: 'medium', change: -3 },
{ product: 'Café', inventory_product_id: 'fallback-cafe', predicted: 72, confidence: 'high', change: 5 },
{ product: 'Napolitanas', inventory_product_id: 'fallback-napolitanas', predicted: 26, confidence: 'medium', change: 3 }
],
metrics: {
totalSales: 1247,
@@ -225,7 +247,13 @@ export const useDashboard = () => {
accuracy: 87.2,
stockouts: 2
},
products: ['Croissants', 'Pan de molde', 'Baguettes', 'Café', 'Napolitanas']
products: [
{ inventory_product_id: 'fallback-croissants', name: 'Croissants' },
{ inventory_product_id: 'fallback-pan', name: 'Pan de molde' },
{ inventory_product_id: 'fallback-baguettes', name: 'Baguettes' },
{ inventory_product_id: 'fallback-cafe', name: 'Café' },
{ inventory_product_id: 'fallback-napolitanas', name: 'Napolitanas' }
]
});
} finally {
setIsLoading(false);
@@ -241,8 +269,8 @@ export const useDashboard = () => {
return {
...dashboardData,
isLoading: isLoading || salesLoading || externalLoading || forecastLoading,
error: error || salesError || externalError || forecastError,
isLoading: isLoading || salesLoading || inventoryLoading || externalLoading || forecastLoading,
error: error || salesError || inventoryError || externalError || forecastError,
reload: () => tenantId ? loadDashboardData(tenantId) : Promise.resolve(),
clearError: () => setError(null)
};

View File

@@ -1,6 +1,6 @@
// Real API hook for Order Suggestions using backend data
import { useState, useCallback, useEffect } from 'react';
import { useSales, useExternal, useForecast } from '../api';
import { useSales, useExternal, useForecast, useInventoryProducts } from '../api';
import { useTenantId } from './useTenantId';
import type { DailyOrderItem, WeeklyOrderItem } from '../components/simple/OrderSuggestions';
@@ -42,10 +42,12 @@ export const useOrderSuggestions = () => {
console.log('🏢 OrderSuggestions: Tenant info:', { tenantId, tenantLoading, tenantError });
const {
getProductsList,
getSalesAnalytics,
getDashboardStats
} = useSales();
const {
getProductsList
} = useInventoryProducts();
const {
getCurrentWeather
} = useExternal();

View File

@@ -144,11 +144,8 @@ export const useTenantId = () => {
const apiTenantId = await fetchTenantIdFromAPI();
if (!apiTenantId) {
console.log('❌ TenantId: No tenant found, using fallback');
// Use the tenant ID from the logs as fallback
const fallbackTenantId = 'bd5261b9-dc52-4d5c-b378-faaf440d9b58';
storeTenantId(fallbackTenantId);
setError(null);
console.log('❌ TenantId: No tenant found for this user');
setError('No tenant found for this user. Please complete onboarding or contact support.');
}
setIsLoading(false);

View File

@@ -141,7 +141,7 @@ const RegisterPage: React.FC<RegisterPageProps> = ({ onLogin, onNavigateToLogin
toast.success('¡Pago procesado correctamente!');
setFormData(prev => ({ ...prev, paymentCompleted: true }));
setPaymentStep('completed');
// Skip intermediate page and proceed directly to registration
onPaymentSuccess();
} catch (error) {
@@ -299,14 +299,11 @@ const RegisterPage: React.FC<RegisterPageProps> = ({ onLogin, onNavigateToLogin
// Move to payment step, or bypass if in development mode
if (bypassPayment) {
// Development bypass: simulate payment completion
// Development bypass: simulate payment completion and proceed directly to registration
setFormData(prev => ({ ...prev, paymentCompleted: true }));
setPaymentStep('completed');
toast.success('🚀 Modo desarrollo: Pago omitido');
// Proceed directly to registration
setTimeout(() => {
handleRegistrationComplete();
}, 1500);
// Proceed directly to registration without intermediate page
handleRegistrationComplete();
} else {
setPaymentStep('payment');
}

View File

@@ -1,6 +1,10 @@
import React, { useState, useEffect } from 'react';
import { TrendingUp, TrendingDown, Calendar, Cloud, AlertTriangle, Info } from 'lucide-react';
import { TrendingUp, TrendingDown, Calendar, Cloud, AlertTriangle, Info, RefreshCw } from 'lucide-react';
import { LineChart, Line, XAxis, YAxis, CartesianGrid, Tooltip, ResponsiveContainer } from 'recharts';
import { useForecast } from '../../api/hooks/useForecast';
import { useInventory } from '../../api/hooks/useInventory';
import { useTenantId } from '../../hooks/useTenantId';
import type { ForecastResponse } from '../../api/types/forecasting';
interface ForecastData {
date: string;
@@ -9,6 +13,9 @@ interface ForecastData {
confidence: 'high' | 'medium' | 'low';
factors: string[];
weatherImpact?: string;
inventory_product_id?: string;
confidence_lower?: number;
confidence_upper?: number;
}
interface WeatherAlert {
@@ -20,95 +27,270 @@ interface WeatherAlert {
const ForecastPage: React.FC = () => {
const [selectedDate, setSelectedDate] = useState(new Date().toISOString().split('T')[0]);
const [selectedProduct, setSelectedProduct] = useState('all');
const [forecasts, setForecasts] = useState<ForecastData[]>([]);
const [forecastData, setForecastData] = useState<ForecastData[]>([]);
const [weatherAlert, setWeatherAlert] = useState<WeatherAlert | null>(null);
const [isLoading, setIsLoading] = useState(true);
const [isGenerating, setIsGenerating] = useState(false);
// Hooks
const { tenantId } = useTenantId();
const {
forecasts,
isLoading: forecastLoading,
error: forecastError,
createSingleForecast,
getForecasts,
getForecastAlerts,
exportForecasts
} = useForecast();
const {
items: inventoryItems,
isLoading: inventoryLoading,
loadItems
} = useInventory(false); // Disable auto-load, we'll load manually
const products = [
'Croissants', 'Pan de molde', 'Baguettes', 'Napolitanas',
'Café', 'Magdalenas', 'Donuts', 'Bocadillos'
];
// Debug logging
if (process.env.NODE_ENV === 'development') {
console.log('ForecastPage - inventoryItems:', inventoryItems);
console.log('ForecastPage - inventoryLoading:', inventoryLoading);
console.log('ForecastPage - tenantId:', tenantId);
}
// Sample forecast data for the next 7 days
const sampleForecastData = [
{ date: '2024-11-04', croissants: 48, pan: 35, cafe: 72 },
{ date: '2024-11-05', croissants: 52, pan: 38, cafe: 78 },
{ date: '2024-11-06', croissants: 45, pan: 32, cafe: 65 },
{ date: '2024-11-07', croissants: 41, pan: 29, cafe: 58 },
{ date: '2024-11-08', croissants: 56, pan: 42, cafe: 82 },
{ date: '2024-11-09', croissants: 61, pan: 45, cafe: 89 },
{ date: '2024-11-10', croissants: 38, pan: 28, cafe: 55 },
];
// Derived state
const isLoading = forecastLoading || inventoryLoading;
const products = (inventoryItems || []).map(item => ({
id: item.id,
name: item.name || 'Unknown Product'
}));
// Sample forecast data for the next 7 days - will be populated by real data
const [sampleForecastData, setSampleForecastData] = useState<any[]>(() => {
// Generate 7 days starting from today
const data = [];
for (let i = 0; i < 7; i++) {
const date = new Date();
date.setDate(date.getDate() + i);
data.push({
date: date.toISOString().split('T')[0],
croissants: 0,
pan: 0,
cafe: 0
});
}
return data;
});
// Load inventory items on component mount
useEffect(() => {
const loadForecasts = async () => {
setIsLoading(true);
if (tenantId) {
loadItems();
}
}, [tenantId, loadItems]);
// Transform API forecasts to our local format
const transformForecastResponse = (forecast: ForecastResponse): ForecastData => {
// Find product name from inventory items
const inventoryItem = (inventoryItems || []).find(item => item.id === forecast.inventory_product_id);
const productName = inventoryItem?.name || 'Unknown Product';
// Determine confidence level based on confidence_level number
let confidence: 'high' | 'medium' | 'low' = 'medium';
if (forecast.confidence_level) {
if (forecast.confidence_level >= 0.8) confidence = 'high';
else if (forecast.confidence_level >= 0.6) confidence = 'medium';
else confidence = 'low';
}
// Extract factors from features_used or provide defaults
const factors = [];
if (forecast.features_used) {
if (forecast.features_used.is_weekend === false) factors.push('Día laboral');
else if (forecast.features_used.is_weekend === true) factors.push('Fin de semana');
if (forecast.features_used.is_holiday === false) factors.push('Sin eventos especiales');
else if (forecast.features_used.is_holiday === true) factors.push('Día festivo');
if (forecast.features_used.weather_description) factors.push(`Clima: ${forecast.features_used.weather_description}`);
else factors.push('Clima estable');
} else {
factors.push('Día laboral', 'Clima estable', 'Sin eventos especiales');
}
// Determine weather impact
let weatherImpact = 'Sin impacto significativo';
if (forecast.features_used?.temperature) {
const temp = forecast.features_used.temperature;
if (temp < 10) weatherImpact = 'Temperatura baja - posible aumento en bebidas calientes';
else if (temp > 25) weatherImpact = 'Temperatura alta - posible reducción en productos horneados';
}
return {
date: forecast.forecast_date.split('T')[0], // Convert to YYYY-MM-DD
product: productName,
predicted: Math.round(forecast.predicted_demand),
confidence,
factors,
weatherImpact,
inventory_product_id: forecast.inventory_product_id,
confidence_lower: forecast.confidence_lower,
confidence_upper: forecast.confidence_upper,
};
};
// Generate forecasts for available products
const generateForecasts = async () => {
if (!tenantId || !inventoryItems || inventoryItems.length === 0) return;
setIsGenerating(true);
try {
// Generate forecasts for top 3 products for the next 7 days
const productsToForecast = inventoryItems.slice(0, 3);
const chartData = [];
// Generate data for the next 7 days
for (let dayOffset = 0; dayOffset < 7; dayOffset++) {
const forecastDate = new Date();
forecastDate.setDate(forecastDate.getDate() + dayOffset);
const dateStr = forecastDate.toISOString().split('T')[0];
const dayData = {
date: dateStr,
croissants: 0,
pan: 0,
cafe: 0
};
// Generate forecasts for each product for this day
const dayForecasts = await Promise.all(
productsToForecast.map(async (item) => {
try {
const forecastResponses = await createSingleForecast(tenantId, {
inventory_product_id: item.id,
forecast_date: dateStr,
forecast_days: 1,
location: 'Madrid, Spain',
include_external_factors: true,
confidence_intervals: true,
});
return forecastResponses.map(transformForecastResponse);
} catch (error) {
console.error(`Failed to generate forecast for ${item.name} on ${dateStr}:`, error);
return [];
}
})
);
// Process forecasts for this day
const flatDayForecasts = dayForecasts.flat();
flatDayForecasts.forEach((forecast) => {
const key = forecast.product.toLowerCase();
if (key.includes('croissant')) dayData.croissants = forecast.predicted;
else if (key.includes('pan')) dayData.pan = forecast.predicted;
else if (key.includes('cafe')) dayData.cafe = forecast.predicted;
});
chartData.push(dayData);
// Store forecasts for selected date display
if (dateStr === selectedDate) {
setForecastData(flatDayForecasts);
}
}
// Update chart with 7 days of data
setSampleForecastData(chartData);
// Set a sample weather alert
setWeatherAlert({
type: 'rain',
impact: 'Condiciones climáticas estables para el día seleccionado',
recommendation: 'Mantener la producción según las predicciones'
});
} catch (error) {
console.error('Error generating forecasts:', error);
} finally {
setIsGenerating(false);
}
};
// Load existing forecasts when component mounts or date changes
useEffect(() => {
const loadExistingForecasts = async () => {
if (!tenantId) return;
try {
// Simulate API call
await new Promise(resolve => setTimeout(resolve, 1000));
// Try to get existing forecasts first
const existingForecasts = await getForecasts(tenantId);
// Mock weather alert
setWeatherAlert({
type: 'rain',
impact: 'Se esperan lluvias moderadas mañana',
recommendation: 'Reduce la producción de productos frescos en un 20%'
});
// Mock forecast data
const mockForecasts: ForecastData[] = [
{
date: selectedDate,
product: 'Croissants',
predicted: 48,
confidence: 'high',
factors: ['Día laboral', 'Clima estable', 'Sin eventos especiales'],
weatherImpact: 'Sin impacto significativo'
},
{
date: selectedDate,
product: 'Pan de molde',
predicted: 35,
confidence: 'high',
factors: ['Demanda constante', 'Histórico estable'],
weatherImpact: 'Sin impacto'
},
{
date: selectedDate,
product: 'Café',
predicted: 72,
confidence: 'medium',
factors: ['Temperatura fresca', 'Día laboral'],
weatherImpact: 'Aumento del 10% por temperatura'
},
{
date: selectedDate,
product: 'Baguettes',
predicted: 28,
confidence: 'medium',
factors: ['Día entre semana', 'Demanda normal'],
weatherImpact: 'Sin impacto'
},
{
date: selectedDate,
product: 'Napolitanas',
predicted: 23,
confidence: 'low',
factors: ['Variabilidad alta', 'Datos limitados'],
weatherImpact: 'Posible reducción del 5%'
console.log('🔍 ForecastPage - existingForecasts:', existingForecasts);
console.log('🔍 ForecastPage - existingForecasts type:', typeof existingForecasts);
console.log('🔍 ForecastPage - existingForecasts isArray:', Array.isArray(existingForecasts));
if (Array.isArray(existingForecasts) && existingForecasts.length > 0) {
// Filter forecasts for selected date
const dateForecasts = existingForecasts
.filter(f => f.forecast_date && f.forecast_date.split('T')[0] === selectedDate)
.map(transformForecastResponse);
if (dateForecasts.length > 0) {
setForecastData(dateForecasts);
}
];
setForecasts(mockForecasts);
// Update 7-day chart with existing forecasts
const chartData = [];
for (let dayOffset = 0; dayOffset < 7; dayOffset++) {
const forecastDate = new Date();
forecastDate.setDate(forecastDate.getDate() + dayOffset);
const dateStr = forecastDate.toISOString().split('T')[0];
const dayData = {
date: dateStr,
croissants: 0,
pan: 0,
cafe: 0
};
// Find existing forecasts for this day
const dayForecasts = existingForecasts
.filter(f => f.forecast_date && f.forecast_date.split('T')[0] === dateStr)
.map(transformForecastResponse);
dayForecasts.forEach((forecast) => {
const key = forecast.product.toLowerCase();
if (key.includes('croissant')) dayData.croissants = forecast.predicted;
else if (key.includes('pan')) dayData.pan = forecast.predicted;
else if (key.includes('cafe')) dayData.cafe = forecast.predicted;
});
chartData.push(dayData);
}
setSampleForecastData(chartData);
} else {
console.log('🔍 ForecastPage - No existing forecasts found or invalid format');
}
// Load alerts
const alerts = await getForecastAlerts(tenantId);
if (Array.isArray(alerts) && alerts.length > 0) {
// Convert first alert to weather alert format
const alert = alerts[0];
setWeatherAlert({
type: 'rain', // Default type
impact: alert.message || 'Alert information not available',
recommendation: 'Revisa las recomendaciones del sistema'
});
}
} catch (error) {
console.error('Error loading forecasts:', error);
} finally {
setIsLoading(false);
console.error('Error loading existing forecasts:', error);
}
};
loadForecasts();
}, [selectedDate]);
if (inventoryItems && inventoryItems.length > 0) {
loadExistingForecasts();
}
}, [tenantId, selectedDate, inventoryItems, getForecasts, getForecastAlerts]);
const getConfidenceColor = (confidence: string) => {
switch (confidence) {
@@ -137,8 +319,8 @@ const ForecastPage: React.FC = () => {
};
const filteredForecasts = selectedProduct === 'all'
? forecasts
: forecasts.filter(f => f.product.toLowerCase().includes(selectedProduct.toLowerCase()));
? forecastData
: forecastData.filter(f => f.product.toLowerCase().includes(selectedProduct.toLowerCase()));
if (isLoading) {
return (
@@ -185,7 +367,7 @@ const ForecastPage: React.FC = () => {
{/* Controls */}
<div className="bg-white p-6 rounded-xl shadow-soft">
<div className="grid grid-cols-1 md:grid-cols-2 gap-4">
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-4">
<div>
<label className="block text-sm font-medium text-gray-700 mb-2">
Fecha de predicción
@@ -214,11 +396,54 @@ const ForecastPage: React.FC = () => {
>
<option value="all">Todos los productos</option>
{products.map(product => (
<option key={product} value={product.toLowerCase()}>{product}</option>
<option key={product.id} value={product.name.toLowerCase()}>{product.name}</option>
))}
</select>
</div>
<div>
<label className="block text-sm font-medium text-gray-700 mb-2">
Generar predicciones
</label>
<button
onClick={generateForecasts}
disabled={isGenerating || !tenantId || !(inventoryItems && inventoryItems.length > 0)}
className="w-full px-4 py-3 bg-primary-500 hover:bg-primary-600 disabled:bg-gray-300 text-white rounded-xl transition-colors flex items-center justify-center"
>
{isGenerating ? (
<>
<RefreshCw className="h-4 w-4 mr-2 animate-spin" />
Generando...
</>
) : (
<>
<TrendingUp className="h-4 w-4 mr-2" />
Generar Predicciones
</>
)}
</button>
</div>
</div>
{forecastError && (
<div className="mt-4 p-4 bg-red-50 border border-red-200 rounded-lg">
<div className="flex items-center">
<AlertTriangle className="h-5 w-5 text-red-600 mr-2" />
<span className="text-red-800 text-sm">Error: {forecastError}</span>
</div>
</div>
)}
{forecastData.length === 0 && !isLoading && !isGenerating && (
<div className="mt-4 p-4 bg-blue-50 border border-blue-200 rounded-lg">
<div className="flex items-center">
<Info className="h-5 w-5 text-blue-600 mr-2" />
<span className="text-blue-800 text-sm">
No hay predicciones para la fecha seleccionada. Haz clic en "Generar Predicciones" para crear nuevas predicciones.
</span>
</div>
</div>
)}
</div>
{/* Forecast Cards */}
@@ -388,19 +613,31 @@ const ForecastPage: React.FC = () => {
Acciones Rápidas
</h3>
<div className="grid grid-cols-1 sm:grid-cols-2 lg:grid-cols-3 gap-4">
<button className="p-4 border border-gray-300 rounded-lg hover:border-primary-500 hover:bg-primary-50 transition-all text-left">
<button
onClick={() => tenantId && exportForecasts(tenantId, 'csv')}
disabled={!tenantId || forecastData.length === 0}
className="p-4 border border-gray-300 rounded-lg hover:border-primary-500 hover:bg-primary-50 disabled:opacity-50 disabled:cursor-not-allowed transition-all text-left"
>
<div className="font-medium text-gray-900">Exportar Predicciones</div>
<div className="text-sm text-gray-500 mt-1">Descargar en formato CSV</div>
</button>
<button className="p-4 border border-gray-300 rounded-lg hover:border-primary-500 hover:bg-primary-50 transition-all text-left">
<div className="font-medium text-gray-900">Configurar Alertas</div>
<div className="text-sm text-gray-500 mt-1">Recibir notificaciones automáticas</div>
<button
onClick={generateForecasts}
disabled={isGenerating || !tenantId || !(inventoryItems && inventoryItems.length > 0)}
className="p-4 border border-gray-300 rounded-lg hover:border-primary-500 hover:bg-primary-50 disabled:opacity-50 disabled:cursor-not-allowed transition-all text-left"
>
<div className="font-medium text-gray-900">Actualizar Predicciones</div>
<div className="text-sm text-gray-500 mt-1">Generar nuevas predicciones</div>
</button>
<button className="p-4 border border-gray-300 rounded-lg hover:border-primary-500 hover:bg-primary-50 transition-all text-left">
<div className="font-medium text-gray-900">Ver Precisión Histórica</div>
<div className="text-sm text-gray-500 mt-1">Analizar rendimiento del modelo</div>
<button
onClick={() => tenantId && getForecastAlerts(tenantId)}
disabled={!tenantId}
className="p-4 border border-gray-300 rounded-lg hover:border-primary-500 hover:bg-primary-50 disabled:opacity-50 disabled:cursor-not-allowed transition-all text-left"
>
<div className="font-medium text-gray-900">Ver Alertas</div>
<div className="text-sm text-gray-500 mt-1">Revisar notificaciones del sistema</div>
</button>
</div>
</div>

View File

@@ -7,13 +7,13 @@ import SmartHistoricalDataImport from '../../components/onboarding/SmartHistoric
import {
useTenant,
useTraining,
useSales,
useTrainingWebSocket,
useOnboarding,
TenantCreate,
TrainingJobRequest
} from '../../api';
import { useTraining } from '../../api/hooks/useTraining';
import { OnboardingRouter } from '../../utils/onboardingRouter';
@@ -134,7 +134,7 @@ const OnboardingPage: React.FC<OnboardingPageProps> = ({ user, onComplete }) =>
fetchTenantIdFromBackend();
}, [tenantId, user, getUserTenants]);
// WebSocket connection for real-time training updates
// Enhanced WebSocket connection for real-time training updates
const {
status,
jobUpdates,
@@ -143,7 +143,11 @@ const OnboardingPage: React.FC<OnboardingPageProps> = ({ user, onComplete }) =>
isConnected,
lastMessage,
tenantId: resolvedTenantId,
wsUrl
wsUrl,
connectionError,
isAuthenticationError,
refreshConnection,
retryWithAuth
} = useTrainingWebSocket(trainingJobId || 'pending', tenantId);
// Handle WebSocket job updates
@@ -203,12 +207,19 @@ const OnboardingPage: React.FC<OnboardingPageProps> = ({ user, onComplete }) =>
currentStep: 'Error en el entrenamiento'
}));
} else if (messageType === 'initial_status') {
} else if (messageType === 'initial_status' || messageType === 'current_status') {
console.log('Received training status update:', messageType, data);
setTrainingProgress(prev => ({
...prev,
progress: typeof data.progress === 'number' ? data.progress : prev.progress,
status: data.status || prev.status,
currentStep: data.current_step || data.currentStep || prev.currentStep
currentStep: data.current_step || data.currentStep || prev.currentStep,
productsCompleted: data.products_completed || data.productsCompleted || prev.productsCompleted,
productsTotal: data.products_total || data.productsTotal || prev.productsTotal,
estimatedTimeRemaining: data.estimated_time_remaining_minutes ||
data.estimated_time_remaining ||
data.estimatedTimeRemaining ||
prev.estimatedTimeRemaining
}));
}
}, []);
@@ -228,10 +239,94 @@ const OnboardingPage: React.FC<OnboardingPageProps> = ({ user, onComplete }) =>
}
}, [jobUpdates, processWebSocketMessage]);
// Connect to WebSocket when training starts
// Enhanced WebSocket connection management with polling fallback
useEffect(() => {
if (tenantId && trainingJobId && currentStep === 3) {
console.log('Connecting to training WebSocket:', { tenantId, trainingJobId, wsUrl });
connect();
// Simple polling fallback for training completion detection (now that we fixed the 404 issue)
const pollingInterval = setInterval(async () => {
if (trainingProgress.status === 'running' || trainingProgress.status === 'pending') {
try {
// Check training job status via REST API as fallback
const response = await fetch(`http://localhost:8000/api/v1/tenants/${tenantId}/training/jobs/${trainingJobId}/status`, {
headers: {
'Authorization': `Bearer ${localStorage.getItem('auth_token')}`,
'X-Tenant-ID': tenantId
}
});
if (response.ok) {
const jobStatus = await response.json();
// If the job is completed but we haven't received WebSocket notification
if (jobStatus.status === 'completed' && (trainingProgress.status === 'running' || trainingProgress.status === 'pending')) {
console.log('Training completed detected via REST polling fallback');
setTrainingProgress(prev => ({
...prev,
progress: 100,
status: 'completed',
currentStep: 'Entrenamiento completado',
estimatedTimeRemaining: 0
}));
// Mark training step as completed in onboarding API
completeStep('training_completed', {
training_completed_at: new Date().toISOString(),
user_id: user?.id,
tenant_id: tenantId,
completion_detected_via: 'rest_polling_fallback'
}).catch(error => {
console.warn('Failed to mark training as completed in API:', error);
});
// Show celebration and auto-advance to final step after 3 seconds
toast.success('🎉 Training completed! Your AI model is ready to use.', {
duration: 5000,
icon: '🤖'
});
setTimeout(() => {
manualNavigation.current = true;
setCurrentStep(4);
}, 3000);
// Clear the polling interval
clearInterval(pollingInterval);
}
// If job failed, update status
if (jobStatus.status === 'failed' && (trainingProgress.status === 'running' || trainingProgress.status === 'pending')) {
console.log('Training failure detected via REST polling fallback');
setTrainingProgress(prev => ({
...prev,
status: 'failed',
error: jobStatus.error_message || 'Error en el entrenamiento',
currentStep: 'Error en el entrenamiento'
}));
clearInterval(pollingInterval);
}
}
} catch (error) {
// Ignore polling errors to avoid noise
console.debug('REST polling error (expected if training not started):', error);
}
} else if (trainingProgress.status === 'completed' || trainingProgress.status === 'failed') {
// Clear polling if training is finished
clearInterval(pollingInterval);
}
}, 15000); // Poll every 15 seconds (less aggressive than before)
return () => {
if (isConnected) {
disconnect();
}
clearInterval(pollingInterval);
};
}
return () => {
@@ -239,7 +334,35 @@ const OnboardingPage: React.FC<OnboardingPageProps> = ({ user, onComplete }) =>
disconnect();
}
};
}, [tenantId, trainingJobId, currentStep, connect, disconnect, isConnected]);
}, [tenantId, trainingJobId, currentStep]); // Removed problematic dependencies that cause reconnection loops
// Handle connection errors with user feedback
useEffect(() => {
if (connectionError) {
if (isAuthenticationError) {
toast.error('Sesión expirada. Reintentando conexión...');
// Auto-retry authentication errors after 3 seconds
setTimeout(() => {
retryWithAuth();
}, 3000);
} else {
console.warn('WebSocket connection error:', connectionError);
// Don't show error toast for non-auth errors as they auto-retry
}
}
}, [connectionError, isAuthenticationError, retryWithAuth]);
// Enhanced WebSocket status logging
useEffect(() => {
console.log('WebSocket status changed:', {
status,
isConnected,
jobId: trainingJobId,
tenantId,
connectionError,
isAuthenticationError
});
}, [status, isConnected, trainingJobId, tenantId, connectionError, isAuthenticationError]);
const storeTenantId = (tenantId: string) => {
@@ -632,6 +755,10 @@ const OnboardingPage: React.FC<OnboardingPageProps> = ({ user, onComplete }) =>
estimatedTimeRemaining: trainingProgress.estimatedTimeRemaining,
error: trainingProgress.error
}}
websocketStatus={status}
connectionError={connectionError}
isConnected={isConnected}
onRetryConnection={refreshConnection}
onTimeout={() => {
toast.success('El entrenamiento continuará en segundo plano. ¡Puedes empezar a explorar!');
onComplete(); // Navigate to dashboard

View File

@@ -122,85 +122,60 @@ async def metrics():
@app.websocket("/api/v1/ws/tenants/{tenant_id}/training/jobs/{job_id}/live")
async def websocket_training_progress(websocket: WebSocket, tenant_id: str, job_id: str):
"""WebSocket proxy for training progress updates"""
"""WebSocket proxy that forwards connections directly to training service"""
await websocket.accept()
# Get token from query params
token = websocket.query_params.get("token")
if not token:
logger.warning(f"WebSocket connection rejected - missing token for job {job_id}")
await websocket.close(code=1008, reason="Authentication token required")
return
# Build HTTP URL to training service (we'll use HTTP client to proxy)
logger.info(f"Proxying WebSocket connection to training service for job {job_id}, tenant {tenant_id}")
# Build WebSocket URL to training service
training_service_base = settings.TRAINING_SERVICE_URL.rstrip('/')
training_ws_url = f"{training_service_base}/api/v1/ws/tenants/{tenant_id}/training/jobs/{job_id}/live?token={token}"
training_ws_url = training_service_base.replace('http://', 'ws://').replace('https://', 'wss://')
training_ws_url = f"{training_ws_url}/api/v1/ws/tenants/{tenant_id}/training/jobs/{job_id}/live?token={token}"
try:
# Use HTTP client to connect to training service WebSocket
async with httpx.AsyncClient() as client:
# Since we can't easily proxy WebSocket with httpx, let's try a different approach
# We'll make periodic HTTP requests to get training status
logger.info(f"Starting WebSocket proxy for training job {job_id}")
# Connect to training service WebSocket
import websockets
async with websockets.connect(training_ws_url) as training_ws:
logger.info(f"Connected to training service WebSocket for job {job_id}")
# Send initial connection confirmation
await websocket.send_json({
"type": "connection_established",
"job_id": job_id,
"tenant_id": tenant_id
})
# Poll for training updates
last_status = None
while True:
async def forward_to_training():
"""Forward messages from frontend to training service"""
try:
# Make HTTP request to get current training status
status_url = f"{training_service_base}/api/v1/tenants/{tenant_id}/training/jobs/{job_id}/status"
response = await client.get(
status_url,
headers={"Authorization": f"Bearer {token}"},
timeout=5.0
)
if response.status_code == 200:
current_status = response.json()
# Only send update if status changed
if current_status != last_status:
await websocket.send_json({
"type": "training_progress",
"data": current_status
})
last_status = current_status
# If training is completed or failed, we can stop polling
if current_status.get('status') in ['completed', 'failed', 'cancelled']:
await websocket.send_json({
"type": "training_" + current_status.get('status', 'completed'),
"data": current_status
})
break
# Wait before next poll
await asyncio.sleep(2)
except WebSocketDisconnect:
logger.info("WebSocket client disconnected")
break
except httpx.TimeoutException:
# Continue polling even if request times out
await asyncio.sleep(5)
continue
async for message in websocket.iter_text():
await training_ws.send(message)
except Exception as e:
logger.error(f"Error polling training status: {e}")
await asyncio.sleep(5)
continue
logger.error(f"Error forwarding to training service: {e}")
async def forward_to_frontend():
"""Forward messages from training service to frontend"""
try:
async for message in training_ws:
await websocket.send_text(message)
except Exception as e:
logger.error(f"Error forwarding to frontend: {e}")
# Run both forwarding tasks concurrently
await asyncio.gather(
forward_to_training(),
forward_to_frontend(),
return_exceptions=True
)
except WebSocketDisconnect:
logger.info("WebSocket client disconnected during setup")
except Exception as e:
logger.error(f"WebSocket proxy error: {e}")
await websocket.close(code=1011, reason="Internal server error")
logger.error(f"WebSocket proxy error for job {job_id}: {e}")
try:
await websocket.close(code=1011, reason="Training service connection failed")
except:
pass
finally:
logger.info(f"WebSocket proxy closed for job {job_id}")
if __name__ == "__main__":
import uvicorn

View File

@@ -12,4 +12,5 @@ email-validator==2.0.0
aio-pika==9.3.0
pytz==2023.3
python-logstash==0.4.8
structlog==23.2.0
structlog==23.2.0
websockets==12.0

View File

@@ -236,11 +236,19 @@ class MadridTrafficClient(BaseTrafficClient, BaseAPIClient):
try:
# Process by year and month to avoid memory issues
current_date = start_date.replace(day=1) # Start from beginning of month
now = datetime.now()
while current_date <= end_date:
year = current_date.year
month = current_date.month
# Skip current month and future months (no historical data available yet)
if (year == now.year and month >= now.month) or year > now.year:
self.logger.info("Skipping current/future month - no historical data available",
year=year, month=month)
current_date = self._next_month(current_date)
continue
# Build historical URL
zip_url = self.api_client._build_historical_url(year, month)
@@ -251,7 +259,7 @@ class MadridTrafficClient(BaseTrafficClient, BaseAPIClient):
zip_content = await self.api_client.fetch_historical_zip(zip_url)
if not zip_content:
self.logger.warning("Failed to fetch historical ZIP", url=zip_url)
current_date = current_date.replace(month=current_date.month + 1) if current_date.month < 12 else current_date.replace(year=current_date.year + 1, month=1)
current_date = self._next_month(current_date)
continue
# Process ZIP content with enhanced parsing
@@ -286,11 +294,8 @@ class MadridTrafficClient(BaseTrafficClient, BaseAPIClient):
filtered_records=len(filtered_records),
total_records=len(historical_records))
# Move to next month
if current_date.month == 12:
current_date = current_date.replace(year=current_date.year + 1, month=1)
else:
current_date = current_date.replace(month=current_date.month + 1)
# Move to next month - extracted to helper method
current_date = self._next_month(current_date)
return historical_records
@@ -347,4 +352,10 @@ class MadridTrafficClient(BaseTrafficClient, BaseAPIClient):
zip_url=zip_url, error=str(e))
return []
def _next_month(self, current_date: datetime) -> datetime:
"""Helper method to move to next month"""
if current_date.month == 12:
return current_date.replace(year=current_date.year + 1, month=1)
else:
return current_date.replace(month=current_date.month + 1)

View File

@@ -42,22 +42,9 @@ class MadridTrafficAPIClient(BaseAPIClient):
def _build_historical_url(self, year: int, month: int) -> str:
"""Build historical ZIP URL for given year and month"""
# Madrid historical data URL pattern
base_url = "https://datos.madrid.es/egob/catalogo/208627"
# URL numbering pattern (this may need adjustment based on actual URLs)
# Note: Historical data is only available for past periods, not current/future
if year == 2023:
url_number = 116 + (month - 1) # 116-127 for 2023
elif year == 2024:
url_number = 128 + (month - 1) # 128-139 for 2024
elif year == 2025:
# For 2025, use the continuing numbering from 2024
url_number = 140 + (month - 1) # Starting from 140 for January 2025
else:
url_number = 116 # Fallback to 2023 data
return f"{base_url}-{url_number}-transporte-ptomedida-historico.zip"
# Madrid uses a direct file pattern now: https://datos.madrid.es/egobfiles/MANUAL/208627/MM-YYYY.zip
# Only historical data is available (not current month)
return f"https://datos.madrid.es/egobfiles/MANUAL/208627/{month:02d}-{year}.zip"
async def fetch_current_traffic_xml(self, endpoint: Optional[str] = None) -> Optional[str]:
"""Fetch current traffic XML data"""

View File

@@ -84,18 +84,22 @@ class TrafficRepository:
if not traffic_data_list:
return 0
# Check for existing records to avoid duplicates
# Check for existing records to avoid duplicates - batch the queries to avoid parameter limit
dates = [data.get('date') for data in traffic_data_list if data.get('date')]
existing_dates = set()
if dates:
existing_stmt = select(TrafficData.date).where(
and_(
TrafficData.location_id == location_id,
TrafficData.date.in_(dates)
# PostgreSQL has a limit of 32767 parameters, so batch the queries
batch_size = 30000 # Safe batch size under the limit
for i in range(0, len(dates), batch_size):
date_batch = dates[i:i + batch_size]
existing_stmt = select(TrafficData.date).where(
and_(
TrafficData.location_id == location_id,
TrafficData.date.in_(date_batch)
)
)
)
result = await self.session.execute(existing_stmt)
existing_dates = {row[0] for row in result.fetchall()}
result = await self.session.execute(existing_stmt)
existing_dates.update({row[0] for row in result.fetchall()})
logger.debug(f"Found {len(existing_dates)} existing records for location {location_id}")
batch_records = []

View File

@@ -420,20 +420,20 @@ class EnhancedForecastingService:
if prediction['prediction'] > 100: # Threshold for high demand
alerts_to_create.append({
"tenant_id": str(forecast.tenant_id),
"forecast_id": forecast.id,
"forecast_id": str(forecast.id), # Convert UUID to string
"alert_type": "high_demand",
"severity": "high" if prediction['prediction'] > 200 else "medium",
"message": f"High demand predicted for inventory product {forecast.inventory_product_id}: {prediction['prediction']:.1f} units"
"message": f"High demand predicted for inventory product {str(forecast.inventory_product_id)}: {prediction['prediction']:.1f} units"
})
# Check for low demand alert
elif prediction['prediction'] < 10: # Threshold for low demand
alerts_to_create.append({
"tenant_id": str(forecast.tenant_id),
"forecast_id": forecast.id,
"forecast_id": str(forecast.id), # Convert UUID to string
"alert_type": "low_demand",
"severity": "low",
"message": f"Low demand predicted for inventory product {forecast.inventory_product_id}: {prediction['prediction']:.1f} units"
"message": f"Low demand predicted for inventory product {str(forecast.inventory_product_id)}: {prediction['prediction']:.1f} units"
})
# Check for stockout risk (very low prediction with narrow confidence interval)
@@ -441,10 +441,10 @@ class EnhancedForecastingService:
if prediction['prediction'] < 5 and confidence_interval < 10:
alerts_to_create.append({
"tenant_id": str(forecast.tenant_id),
"forecast_id": forecast.id,
"forecast_id": str(forecast.id), # Convert UUID to string
"alert_type": "stockout_risk",
"severity": "critical",
"message": f"Stockout risk for inventory product {forecast.inventory_product_id}: predicted {prediction['prediction']:.1f} units with high confidence"
"message": f"Stockout risk for inventory product {str(forecast.inventory_product_id)}: predicted {prediction['prediction']:.1f} units with high confidence"
})
# Create alerts
@@ -462,7 +462,7 @@ class EnhancedForecastingService:
return ForecastResponse(
id=str(cache_entry.id),
tenant_id=str(cache_entry.tenant_id),
inventory_product_id=cache_entry.inventory_product_id,
inventory_product_id=str(cache_entry.inventory_product_id), # Convert UUID to string
location=cache_entry.location,
forecast_date=cache_entry.forecast_date,
predicted_demand=cache_entry.predicted_demand,
@@ -486,7 +486,7 @@ class EnhancedForecastingService:
return ForecastResponse(
id=str(forecast.id),
tenant_id=str(forecast.tenant_id),
inventory_product_id=forecast.inventory_product_id,
inventory_product_id=str(forecast.inventory_product_id), # Convert UUID to string
location=forecast.location,
forecast_date=forecast.forecast_date,
predicted_demand=forecast.predicted_demand,
@@ -514,7 +514,7 @@ class EnhancedForecastingService:
return {
"id": str(forecast.id),
"tenant_id": str(forecast.tenant_id),
"inventory_product_id": forecast.inventory_product_id,
"inventory_product_id": str(forecast.inventory_product_id), # Convert UUID to string
"location": forecast.location,
"forecast_date": forecast.forecast_date.isoformat(),
"predicted_demand": forecast.predicted_demand,

View File

@@ -90,6 +90,13 @@ class IngredientRepository(BaseRepository[Ingredient, IngredientCreate, Ingredie
) -> List[Ingredient]:
"""Get ingredients for a tenant with filtering"""
try:
# Handle search filter separately since it requires special query logic
if filters and filters.get('search'):
search_term = filters['search']
logger.info(f"Searching ingredients with term: '{search_term}'", tenant_id=tenant_id)
return await self.search_ingredients(tenant_id, search_term, skip, limit)
# Handle other filters with standard multi-get
query_filters = {'tenant_id': tenant_id}
if filters:
if filters.get('category'):

View File

@@ -26,28 +26,6 @@ def get_sales_service():
"""Dependency injection for SalesService"""
return SalesService()
@router.get("/tenants/{tenant_id}/sales/products")
async def get_products_list(
tenant_id: UUID = Path(..., description="Tenant ID"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
sales_service: SalesService = Depends(get_sales_service)
):
"""Get list of products using repository pattern"""
try:
logger.debug("Getting products list with repository pattern", tenant_id=tenant_id)
products = await sales_service.get_products_list(str(tenant_id))
logger.debug("Products list retrieved using repository",
count=len(products),
tenant_id=tenant_id)
return products
except Exception as e:
logger.error("Failed to get products list",
error=str(e),
tenant_id=tenant_id)
raise HTTPException(status_code=500, detail=f"Failed to get products list: {str(e)}")
@router.post("/tenants/{tenant_id}/sales", response_model=SalesDataResponse)
async def create_sales_record(

View File

@@ -97,8 +97,9 @@ class SalesRepository(BaseRepository[SalesData, SalesDataCreate, SalesDataUpdate
# Apply pagination
stmt = stmt.offset(query_params.offset).limit(query_params.limit)
else:
# Default ordering
stmt = stmt.order_by(desc(SalesData.date)).limit(50)
# Default ordering with safety limit for direct repository calls
# Note: API calls always provide query_params, so this only applies to direct usage
stmt = stmt.order_by(desc(SalesData.date)).limit(10000)
result = await self.session.execute(stmt)
records = result.scalars().all()
@@ -279,24 +280,3 @@ class SalesRepository(BaseRepository[SalesData, SalesDataCreate, SalesDataUpdate
logger.error("Failed to validate sales record", error=str(e), record_id=record_id)
raise
async def get_product_statistics(self, tenant_id: str) -> List[Dict[str, Any]]:
"""Get product statistics for tenant"""
try:
# Note: product_name field was removed - product info now managed via inventory service
# This method should be updated to query products from inventory service
# For now, return inventory_product_ids to avoid breaking existing code
stmt = select(SalesData.inventory_product_id).where(
and_(
SalesData.tenant_id == tenant_id,
SalesData.inventory_product_id.is_not(None)
)
).distinct()
result = await self.session.execute(stmt)
products = [str(row[0]) for row in result if row[0]]
return sorted(products)
except Exception as e:
logger.error("Failed to get product categories", error=str(e), tenant_id=tenant_id)
raise

View File

@@ -286,14 +286,67 @@ class AIOnboardingService:
)
suggestions.append(suggestion)
business_model = BusinessModelAnalysis(
model=business_model_raw.get("model", "unknown"),
confidence=business_model_raw.get("confidence", 0.0),
ingredient_count=business_model_raw.get("ingredient_count", 0),
finished_product_count=business_model_raw.get("finished_product_count", 0),
ingredient_ratio=business_model_raw.get("ingredient_ratio", 0.0),
recommendations=business_model_raw.get("recommendations", [])
)
# Check if enhanced business intelligence data is available
bi_data = product_analysis.get('__business_intelligence__')
if bi_data and bi_data.get('confidence_score', 0) > 0.6:
# Use enhanced business intelligence analysis
business_type = bi_data.get('business_type', 'bakery')
business_model_detected = bi_data.get('business_model', 'individual')
# Map business intelligence results to existing model format
model_mapping = {
'individual': 'individual_bakery',
'central_distribution': 'central_baker_satellite',
'central_bakery': 'central_baker_satellite',
'hybrid': 'hybrid_bakery'
}
mapped_model = model_mapping.get(business_model_detected, 'individual_bakery')
# Count ingredients vs finished products from suggestions
ingredient_count = sum(1 for s in suggestions if s.product_type == 'ingredient')
finished_product_count = sum(1 for s in suggestions if s.product_type == 'finished_product')
total_products = len(suggestions)
ingredient_ratio = ingredient_count / total_products if total_products > 0 else 0.0
# Enhanced recommendations based on BI analysis
enhanced_recommendations = bi_data.get('recommendations', [])
# Add business type specific recommendations
if business_type == 'coffee_shop':
enhanced_recommendations.extend([
"Configure beverage inventory management",
"Set up quick-service item tracking",
"Enable all-day service optimization"
])
business_model = BusinessModelAnalysis(
model=mapped_model,
confidence=bi_data.get('confidence_score', 0.0),
ingredient_count=ingredient_count,
finished_product_count=finished_product_count,
ingredient_ratio=ingredient_ratio,
recommendations=enhanced_recommendations[:6] # Limit to top 6 recommendations
)
logger.info("Using enhanced business intelligence for model analysis",
detected_type=business_type,
detected_model=business_model_detected,
mapped_model=mapped_model,
confidence=bi_data.get('confidence_score'))
else:
# Fallback to basic inventory service analysis
business_model = BusinessModelAnalysis(
model=business_model_raw.get("model", "unknown"),
confidence=business_model_raw.get("confidence", 0.0),
ingredient_count=business_model_raw.get("ingredient_count", 0),
finished_product_count=business_model_raw.get("finished_product_count", 0),
ingredient_ratio=business_model_raw.get("ingredient_ratio", 0.0),
recommendations=business_model_raw.get("recommendations", [])
)
logger.info("Using basic inventory service business model analysis")
# Calculate confidence metrics
high_confidence_count = sum(1 for s in suggestions if s.confidence_score >= 0.7)
@@ -674,6 +727,85 @@ class AIOnboardingService:
"avg_unit_price": avg_unit_price
}
# Add enhanced business intelligence analysis
try:
from app.services.business_intelligence_service import BusinessIntelligenceService
bi_service = BusinessIntelligenceService()
# Convert parsed data to format expected by BI service
sales_data = []
product_data = []
for row in rows:
# Create sales record from CSV row
sales_record = {
'date': row.get(date_column, ''),
'product_name': row.get(product_column, ''),
'name': row.get(product_column, ''),
'quantity_sold': 0,
'revenue': 0,
'location_id': row.get('location', 'main'),
'sales_channel': row.get('channel', 'in_store'),
'supplier_name': row.get('supplier', ''),
'brand': row.get('brand', '')
}
# Parse quantity
if quantity_column:
try:
qty_raw = row.get(quantity_column, 1)
if qty_raw and str(qty_raw).strip():
sales_record['quantity_sold'] = int(float(str(qty_raw).replace(',', '.')))
except:
sales_record['quantity_sold'] = 1
# Parse revenue
if revenue_column:
try:
rev_raw = row.get(revenue_column)
if rev_raw and str(rev_raw).strip():
sales_record['revenue'] = float(str(rev_raw).replace(',', '.').replace('', '').replace('$', '').strip())
except:
pass
sales_data.append(sales_record)
# Create product data entry
product_data.append({
'name': sales_record['product_name'],
'supplier_name': sales_record.get('supplier_name', ''),
'brand': sales_record.get('brand', '')
})
# Run business intelligence analysis
if sales_data:
detection_result = await bi_service.analyze_business_from_sales_data(
sales_data=sales_data,
product_data=product_data
)
# Store business intelligence results in product_analysis
product_analysis['__business_intelligence__'] = {
"business_type": detection_result.business_type,
"business_model": detection_result.business_model,
"confidence_score": detection_result.confidence_score,
"indicators": detection_result.indicators,
"recommendations": detection_result.recommendations,
"analysis_summary": f"{detection_result.business_type.title()} - {detection_result.business_model.replace('_', ' ').title()}"
}
logger.info("Enhanced business intelligence analysis completed",
business_type=detection_result.business_type,
business_model=detection_result.business_model,
confidence=detection_result.confidence_score)
else:
logger.warning("No sales data available for business intelligence analysis")
except Exception as bi_error:
logger.warning("Business intelligence analysis failed", error=str(bi_error))
# Continue with basic analysis even if BI fails
return product_analysis
except Exception as e:

View File

@@ -428,7 +428,7 @@ class DataImportService:
repository: SalesRepository,
filename: Optional[str] = None
) -> Dict[str, Any]:
"""Enhanced CSV processing with better data handling"""
"""Enhanced CSV processing with batch product resolution for better reliability"""
try:
reader = csv.DictReader(io.StringIO(csv_content))
rows = list(reader)
@@ -445,22 +445,41 @@ class DataImportService:
# Enhanced column mapping
column_mapping = self._detect_columns(list(rows[0].keys()))
records_created = 0
errors = []
warnings = []
# Pre-process to extract unique products for batch creation
unique_products = set()
parsed_rows = []
logger.info(f"Processing {len(rows)} records from CSV with enhanced mapping")
logger.info(f"Pre-processing {len(rows)} records to identify unique products")
for index, row in enumerate(rows):
try:
# Enhanced data parsing and validation
parsed_data = await self._parse_row_data(row, column_mapping, index + 1)
if parsed_data.get("skip"):
errors.extend(parsed_data.get("errors", []))
warnings.extend(parsed_data.get("warnings", []))
continue
# Resolve product name to inventory_product_id
if not parsed_data.get("skip"):
unique_products.add((
parsed_data["product_name"],
parsed_data.get("product_category", "general")
))
parsed_rows.append((index, parsed_data))
except Exception as e:
logger.warning(f"Failed to parse row {index + 1}: {e}")
continue
logger.info(f"Found {len(unique_products)} unique products, attempting batch resolution")
# Try to resolve/create all unique products in batch
await self._batch_resolve_products(unique_products, tenant_id)
# Now process the actual sales records
records_created = 0
errors = []
warnings = []
logger.info(f"Processing {len(parsed_rows)} validated records for sales creation")
for index, parsed_data in parsed_rows:
try:
# Resolve product name to inventory_product_id (should be cached now)
inventory_product_id = await self._resolve_product_to_inventory_id(
parsed_data["product_name"],
parsed_data.get("product_category"),
@@ -914,47 +933,57 @@ class DataImportService:
logger.info("Import cache cleared for new session")
async def _resolve_product_to_inventory_id(self, product_name: str, product_category: Optional[str], tenant_id: UUID) -> Optional[UUID]:
"""Resolve a product name to an inventory_product_id via the inventory service with caching and rate limiting"""
"""Resolve a product name to an inventory_product_id via the inventory service with improved error handling and fallback"""
# Check cache first
if product_name in self.product_cache:
logger.debug("Product resolved from cache", product_name=product_name, tenant_id=tenant_id)
return self.product_cache[product_name]
# Skip if this product already failed to resolve
# Skip if this product already failed to resolve after all attempts
if product_name in self.failed_products:
logger.debug("Skipping previously failed product", product_name=product_name, tenant_id=tenant_id)
return None
max_retries = 3
base_delay = 1.0 # Start with 1 second delay
max_retries = 5 # Increased retries
base_delay = 2.0 # Increased base delay
fallback_retry_delay = 10.0 # Longer delay for fallback attempts
for attempt in range(max_retries):
try:
# Add delay before API calls to avoid rate limiting
# Add progressive delay to avoid rate limiting
if attempt > 0:
delay = base_delay * (2 ** (attempt - 1)) # Exponential backoff
# Use longer delays for later attempts
if attempt >= 3:
delay = fallback_retry_delay # Use fallback delay for later attempts
else:
delay = base_delay * (2 ** (attempt - 1)) # Exponential backoff
logger.info(f"Retrying product resolution after {delay}s delay",
product_name=product_name, attempt=attempt, tenant_id=tenant_id)
await asyncio.sleep(delay)
# First try to search for existing product by name
products = await self.inventory_client.search_products(product_name, tenant_id)
try:
products = await self.inventory_client.search_products(product_name, tenant_id)
if products:
# Return the first matching product's ID
product_id = products[0].get('id')
if product_id:
uuid_id = UUID(str(product_id))
self.product_cache[product_name] = uuid_id # Cache for future use
logger.info("Resolved product to existing inventory ID",
product_name=product_name, product_id=product_id, tenant_id=tenant_id)
return uuid_id
except Exception as search_error:
logger.warning("Product search failed, trying direct creation",
product_name=product_name, error=str(search_error), tenant_id=tenant_id)
if products:
# Return the first matching product's ID
product_id = products[0].get('id')
if product_id:
uuid_id = UUID(str(product_id))
self.product_cache[product_name] = uuid_id # Cache for future use
logger.info("Resolved product to existing inventory ID",
product_name=product_name, product_id=product_id, tenant_id=tenant_id)
return uuid_id
# Add delay before creation attempt to avoid hitting rate limits
await asyncio.sleep(1.0)
# Add small delay before creation attempt to avoid hitting rate limits
await asyncio.sleep(0.5)
# If not found, create a new ingredient/product in inventory
# If not found or search failed, create a new ingredient/product in inventory
ingredient_data = {
'name': product_name,
'type': 'finished_product', # Assuming sales are of finished products
@@ -965,36 +994,133 @@ class DataImportService:
'category': product_category or 'general'
}
created_product = await self.inventory_client.create_ingredient(ingredient_data, str(tenant_id))
if created_product and created_product.get('id'):
product_id = created_product['id']
uuid_id = UUID(str(product_id))
self.product_cache[product_name] = uuid_id # Cache for future use
logger.info("Created new inventory product for sales data",
product_name=product_name, product_id=product_id, tenant_id=tenant_id)
return uuid_id
try:
created_product = await self.inventory_client.create_ingredient(ingredient_data, str(tenant_id))
if created_product and created_product.get('id'):
product_id = created_product['id']
uuid_id = UUID(str(product_id))
self.product_cache[product_name] = uuid_id # Cache for future use
logger.info("Created new inventory product for sales data",
product_name=product_name, product_id=product_id, tenant_id=tenant_id)
return uuid_id
except Exception as creation_error:
logger.warning("Product creation failed",
product_name=product_name, error=str(creation_error), tenant_id=tenant_id)
logger.warning("Failed to resolve or create product in inventory",
product_name=product_name, tenant_id=tenant_id, attempt=attempt)
except Exception as e:
error_str = str(e)
if "429" in error_str or "rate limit" in error_str.lower():
logger.warning("Rate limit hit, retrying",
if "429" in error_str or "rate limit" in error_str.lower() or "too many requests" in error_str.lower():
logger.warning("Rate limit or service overload detected, retrying with longer delay",
product_name=product_name, attempt=attempt, error=error_str, tenant_id=tenant_id)
if attempt < max_retries - 1:
continue # Retry with exponential backoff
elif "503" in error_str or "502" in error_str or "service unavailable" in error_str.lower():
logger.warning("Service unavailable, retrying with backoff",
product_name=product_name, attempt=attempt, error=error_str, tenant_id=tenant_id)
if attempt < max_retries - 1:
continue # Retry for service unavailable errors
elif "timeout" in error_str.lower() or "connection" in error_str.lower():
logger.warning("Network issue detected, retrying",
product_name=product_name, attempt=attempt, error=error_str, tenant_id=tenant_id)
if attempt < max_retries - 1:
continue # Retry for network issues
else:
logger.error("Error resolving product to inventory ID",
logger.error("Non-retryable error resolving product to inventory ID",
error=error_str, product_name=product_name, tenant_id=tenant_id)
break # Don't retry for non-rate-limit errors
if attempt < max_retries - 1:
# Still retry even for other errors, in case it's transient
continue
else:
break # Don't retry on final attempt
# If all retries failed, mark as failed and return None
# If all retries failed, log detailed error but don't mark as permanently failed yet
# Instead, we'll implement a fallback mechanism
logger.error("Failed to resolve product after all retries, attempting fallback",
product_name=product_name, tenant_id=tenant_id)
# FALLBACK: Try to create a temporary product with minimal data
try:
# Use a simplified approach with minimal data
fallback_data = {
'name': product_name,
'type': 'finished_product',
'unit': 'unit',
'current_stock': 0,
'cost_per_unit': 0
}
logger.info("Attempting fallback product creation with minimal data",
product_name=product_name, tenant_id=tenant_id)
created_product = await self.inventory_client.create_ingredient(fallback_data, str(tenant_id))
if created_product and created_product.get('id'):
product_id = created_product['id']
uuid_id = UUID(str(product_id))
self.product_cache[product_name] = uuid_id
logger.info("SUCCESS: Fallback product creation succeeded",
product_name=product_name, product_id=product_id, tenant_id=tenant_id)
return uuid_id
except Exception as fallback_error:
logger.error("Fallback product creation also failed",
product_name=product_name, error=str(fallback_error), tenant_id=tenant_id)
# Only mark as permanently failed after all attempts including fallback
self.failed_products.add(product_name)
logger.error("Failed to resolve product after all retries",
logger.error("CRITICAL: Permanently failed to resolve product - this will result in missing training data",
product_name=product_name, tenant_id=tenant_id)
return None
async def _batch_resolve_products(self, unique_products: set, tenant_id: str) -> None:
"""Batch resolve/create products to reduce API calls and improve success rate"""
if not unique_products:
return
logger.info(f"Starting batch product resolution for {len(unique_products)} unique products")
# Convert set to list for easier handling
products_list = list(unique_products)
batch_size = 5 # Process in smaller batches to avoid overwhelming the inventory service
for i in range(0, len(products_list), batch_size):
batch = products_list[i:i + batch_size]
logger.info(f"Processing batch {i//batch_size + 1}/{(len(products_list) + batch_size - 1)//batch_size}")
# Process each product in the batch with retry logic
for product_name, product_category in batch:
try:
# Skip if already in cache or failed list
if product_name in self.product_cache or product_name in self.failed_products:
continue
# Try to resolve the product
await self._resolve_product_to_inventory_id(product_name, product_category, tenant_id)
# Add small delay between products to be gentle on the API
await asyncio.sleep(0.5)
except Exception as e:
logger.warning(f"Failed to batch process product {product_name}: {e}")
continue
# Add delay between batches
if i + batch_size < len(products_list):
logger.info("Waiting between batches to avoid rate limiting...")
await asyncio.sleep(2.0)
successful_resolutions = len([p for p, _ in products_list if p in self.product_cache])
failed_resolutions = len([p for p, _ in products_list if p in self.failed_products])
logger.info(f"Batch product resolution completed: {successful_resolutions} successful, {failed_resolutions} failed")
if failed_resolutions > 0:
logger.warning(f"ATTENTION: {failed_resolutions} products failed to resolve - these will be missing from training data")
return
def _structure_messages(self, messages: List[Union[str, Dict]]) -> List[Dict[str, Any]]:
"""Convert string messages to structured format"""
structured = []

View File

@@ -285,26 +285,6 @@ class SalesService:
# Don't fail the main operation for auxiliary actions
logger.warning("Failed to execute post-create actions", error=str(e), record_id=record.id)
async def get_products_list(self, tenant_id: str) -> List[Dict[str, Any]]:
"""Get list of all products with sales data for tenant using repository pattern"""
try:
async with get_db_transaction() as db:
repository = SalesRepository(db)
# Use repository method for product statistics
products = await repository.get_product_statistics(tenant_id)
logger.debug("Products list retrieved successfully",
tenant_id=tenant_id,
product_count=len(products))
return products
except Exception as e:
logger.error("Failed to get products list",
error=str(e),
tenant_id=tenant_id)
raise DatabaseError(f"Failed to get products list: {str(e)}")
# New inventory integration methods
async def search_inventory_products(self, search_term: str, tenant_id: UUID,

View File

@@ -186,6 +186,15 @@ async def execute_enhanced_training_job_background(
enhanced_training_service = EnhancedTrainingService(database_manager)
try:
# Create initial training log entry first
await enhanced_training_service._update_job_status_repository(
job_id=job_id,
status="pending",
progress=0,
current_step="Starting enhanced training job",
tenant_id=tenant_id
)
# Publish job started event
await publish_job_started(job_id, tenant_id, {
"enhanced_features": True,
@@ -214,7 +223,8 @@ async def execute_enhanced_training_job_background(
job_id=job_id,
status="running",
progress=0,
current_step="Initializing enhanced training pipeline"
current_step="Initializing enhanced training pipeline",
tenant_id=tenant_id
)
# Execute the enhanced training pipeline with repository pattern
@@ -232,7 +242,8 @@ async def execute_enhanced_training_job_background(
status="completed",
progress=100,
current_step="Enhanced training completed successfully",
results=result
results=result,
tenant_id=tenant_id
)
# Publish enhanced completion event
@@ -262,7 +273,8 @@ async def execute_enhanced_training_job_background(
status="failed",
progress=0,
current_step="Enhanced training failed",
error_message=str(training_error)
error_message=str(training_error),
tenant_id=tenant_id
)
except Exception as status_error:
logger.error("Failed to update job status after training error",

View File

@@ -92,9 +92,27 @@ class EnhancedBakeryMLTrainer:
# Get unique products from the sales data
products = sales_df['inventory_product_id'].unique().tolist()
logger.info("Training enhanced models",
# Debug: Log sales data details to understand why only one product is found
total_sales_records = len(sales_df)
sales_by_product = sales_df.groupby('inventory_product_id').size().to_dict()
logger.info("Enhanced training pipeline - Sales data analysis",
total_sales_records=total_sales_records,
products_count=len(products),
products=products)
products=products,
sales_by_product=sales_by_product)
if len(products) == 1:
logger.warning("Only ONE product found in sales data - this may indicate a data fetching issue",
tenant_id=tenant_id,
single_product_id=products[0],
total_sales_records=total_sales_records)
elif len(products) == 0:
raise ValueError("No products found in sales data")
else:
logger.info("Multiple products detected for training",
products_count=len(products))
self.status_publisher.products_total = len(products)
@@ -512,7 +530,7 @@ class EnhancedBakeryMLTrainer:
from_column='quantity_sold',
to_column='quantity')
required_columns = ['date', 'product_name', 'quantity']
required_columns = ['date', 'inventory_product_id', 'quantity']
missing_columns = [col for col in required_columns if col not in sales_df.columns]
if missing_columns:
raise ValueError(f"Missing required columns: {missing_columns}")
@@ -541,7 +559,7 @@ class EnhancedBakeryMLTrainer:
try:
logger.info("Enhanced model evaluation starting",
tenant_id=tenant_id,
product_name=product_name)
inventory_product_id=inventory_product_id)
# Get database session and repositories
async with self.database_manager.get_session() as db_session:

View File

@@ -574,13 +574,14 @@ class TrainingDataOrchestrator:
if city_count >= 1: # At least some city awareness
city_aware_records += 1
# Record is valid if it has basic requirements
if record_score >= 2:
# Record is valid if it has basic requirements (date + any traffic field)
# Lowered requirement from >= 2 to >= 1 to accept records with just date or traffic data
if record_score >= 1:
valid_records += 1
total_records = len(traffic_data)
validity_threshold = 0.3
enhancement_threshold = 0.2 # Lower threshold for enhanced features
validity_threshold = 0.1 # Reduced from 0.3 to 0.1 - accept if 10% of records are valid
enhancement_threshold = 0.1 # Reduced threshold for enhanced features
basic_validity = (valid_records / total_records) >= validity_threshold
has_enhancements = (enhanced_records / total_records) >= enhancement_threshold

View File

@@ -141,6 +141,30 @@ class EnhancedTrainingService:
logger.error("Training aborted - no sales data", tenant_id=tenant_id, job_id=job_id)
raise ValueError(error_msg)
# Debug: Analyze the sales data structure to understand product distribution
sales_df_debug = pd.DataFrame(sales_data)
if 'inventory_product_id' in sales_df_debug.columns:
unique_products_found = sales_df_debug['inventory_product_id'].unique()
product_counts = sales_df_debug['inventory_product_id'].value_counts().to_dict()
logger.info("Pre-flight sales data analysis",
tenant_id=tenant_id,
job_id=job_id,
total_sales_records=len(sales_data),
unique_products_count=len(unique_products_found),
unique_products=unique_products_found.tolist(),
records_per_product=product_counts)
if len(unique_products_found) == 1:
logger.warning("POTENTIAL ISSUE: Only ONE unique product found in all sales data",
tenant_id=tenant_id,
single_product=unique_products_found[0],
record_count=len(sales_data))
else:
logger.warning("No 'inventory_product_id' column found in sales data",
tenant_id=tenant_id,
columns=list(sales_df_debug.columns))
logger.info(f"Pre-flight check passed: {len(sales_data)} sales records found",
tenant_id=tenant_id, job_id=job_id)
@@ -536,18 +560,69 @@ class EnhancedTrainingService:
progress: int = None,
current_step: str = None,
error_message: str = None,
results: Dict = None):
results: Dict = None,
tenant_id: str = None):
"""Update job status using repository pattern"""
try:
async with self.database_manager.get_session() as session:
await self._init_repositories(session)
await self.training_log_repo.update_log_progress(
job_id=job_id,
progress=progress,
current_step=current_step,
status=status
)
# Check if log exists, create if not
existing_log = await self.training_log_repo.get_log_by_job_id(job_id)
if not existing_log:
# Create initial log entry
if not tenant_id:
# Extract tenant_id from job_id if not provided
# Format: enhanced_training_{tenant_id}_{job_suffix}
try:
parts = job_id.split('_')
if len(parts) >= 3 and parts[0] == 'enhanced' and parts[1] == 'training':
tenant_id = parts[2]
except Exception:
logger.warning(f"Could not extract tenant_id from job_id {job_id}")
if tenant_id:
log_data = {
"job_id": job_id,
"tenant_id": tenant_id,
"status": status or "pending",
"progress": progress or 0,
"current_step": current_step or "initializing",
"start_time": datetime.utcnow()
}
if error_message:
log_data["error_message"] = error_message
if results:
log_data["results"] = results
await self.training_log_repo.create_training_log(log_data)
logger.info("Created initial training log", job_id=job_id, tenant_id=tenant_id)
else:
logger.error("Cannot create training log without tenant_id", job_id=job_id)
return
else:
# Update existing log
await self.training_log_repo.update_log_progress(
job_id=job_id,
progress=progress,
current_step=current_step,
status=status
)
# Update additional fields if provided
if error_message or results:
update_data = {}
if error_message:
update_data["error_message"] = error_message
if results:
update_data["results"] = results
if status in ["completed", "failed"]:
update_data["end_time"] = datetime.utcnow()
if update_data:
await self.training_log_repo.update(existing_log.id, update_data)
except Exception as e:
logger.error("Failed to update job status using repository",

View File

@@ -89,13 +89,13 @@ class TrainingServiceClient(BaseServiceClient):
async def get_active_model_for_product(
self,
tenant_id: str,
product_name: str
inventory_product_id: str
) -> Optional[Dict[str, Any]]:
"""
Get the active model for a specific product
Get the active model for a specific product by inventory product ID
This is the preferred method since models are stored per product.
"""
result = await self.get(f"models/{product_name}/active", tenant_id=tenant_id)
result = await self.get(f"models/{inventory_product_id}/active", tenant_id=tenant_id)
return result
async def deploy_model(self, tenant_id: str, model_id: str) -> Optional[Dict[str, Any]]:

View File

@@ -1,162 +0,0 @@
#!/usr/bin/env python3
"""
Test script to debug onboarding inventory creation step by step
"""
import asyncio
import json
import httpx
import sys
from uuid import uuid4
# Test configuration
GATEWAY_URL = "http://localhost:8000"
TENANT_ID = "946206b3-7446-436b-b29d-f265b28d9ff5"
# Test token (you'll need to replace this with a real token)
TEST_TOKEN = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIzOTUyYTEwOC1lNWFmLTRlMjktOTJkOC0xMjc0MTBiOWJiYmEiLCJ1c2VyX2lkIjoiMzk1MmExMDgtZTVhZi00ZTI5LTkyZDgtMTI3NDEwYjliYmJhIiwiZW1haWwiOiJkZnNmc2RAdGVzdC5jb20iLCJ0eXBlIjoiYWNjZXNzIiwiZXhwIjoxNzU1MTY3NTk4LCJpYXQiOjE3NTUxNjU3OTgsImlzcyI6ImJha2VyeS1hdXRoIiwiZnVsbF9uYW1lIjoiZGZzZGZzZGYiLCJpc192ZXJpZmllZCI6ZmFsc2UsImlzX2FjdGl2ZSI6dHJ1ZSwicm9sZSI6InVzZXIifQ.hYyRqqqZ-Ud-uzn42l_ic-QjP-NWYvT8RmwmU12uaQU"
async def test_onboarding_flow():
"""Test the complete onboarding inventory creation flow"""
print("🧪 Testing Onboarding Inventory Creation Flow")
print("=" * 60)
headers = {
"Authorization": f"Bearer {TEST_TOKEN}",
"Content-Type": "application/json"
}
async with httpx.AsyncClient(timeout=30.0) as client:
# Step 1: Test direct ingredient creation via inventory service
print("\n1⃣ Testing Direct Ingredient Creation via Inventory Service")
print("-" * 50)
ingredient_data = {
"name": "Test Flour",
"description": "Test ingredient for debugging",
"category": "flour", # Use valid enum value
"unit_of_measure": "kg", # Use correct field name
"brand": "Test Supplier",
"is_active": True
}
ingredient_url = f"{GATEWAY_URL}/api/v1/tenants/{TENANT_ID}/ingredients"
print(f"URL: {ingredient_url}")
print(f"Data: {json.dumps(ingredient_data, indent=2)}")
try:
response = await client.post(ingredient_url, json=ingredient_data, headers=headers)
print(f"Status: {response.status_code}")
print(f"Response: {response.text}")
if response.status_code == 201:
print("✅ Direct ingredient creation SUCCESS!")
created_ingredient = response.json()
ingredient_id = created_ingredient.get('id')
print(f"Created ingredient ID: {ingredient_id}")
else:
print("❌ Direct ingredient creation FAILED!")
if response.status_code == 401:
print("❌ Authentication failed - token might be expired")
return
except Exception as e:
print(f"❌ Direct ingredient creation ERROR: {e}")
# Step 2: Test onboarding inventory creation endpoint
print("\n2⃣ Testing Onboarding Inventory Creation Endpoint")
print("-" * 50)
# Create test suggestions like the frontend sends
suggestions = [
{
"suggestion_id": str(uuid4()),
"approved": True,
"modifications": {},
"original_name": "Pan",
"suggested_name": "Pan",
"product_type": "finished_product",
"category": "other_products",
"unit_of_measure": "units",
"confidence_score": 0.9,
"estimated_shelf_life_days": None,
"requires_refrigeration": False,
"requires_freezing": False,
"is_seasonal": False,
"suggested_supplier": None,
"notes": "Test bread product"
},
{
"suggestion_id": str(uuid4()),
"approved": True,
"modifications": {},
"original_name": "Test Croissant",
"suggested_name": "Test Croissant",
"product_type": "finished_product",
"category": "pastries",
"unit_of_measure": "units",
"confidence_score": 0.8,
"estimated_shelf_life_days": 2,
"requires_refrigeration": False,
"requires_freezing": False,
"is_seasonal": False,
"suggested_supplier": "Test Bakery",
"notes": "Test pastry product"
}
]
onboarding_data = {"suggestions": suggestions}
onboarding_url = f"{GATEWAY_URL}/api/v1/tenants/{TENANT_ID}/onboarding/create-inventory"
print(f"URL: {onboarding_url}")
print(f"Suggestions count: {len(suggestions)}")
try:
response = await client.post(onboarding_url, json=onboarding_data, headers=headers)
print(f"Status: {response.status_code}")
print(f"Response: {response.text}")
if response.status_code == 200:
result = response.json()
print(f"✅ Onboarding inventory creation completed!")
print(f"Created items: {len(result.get('created_items', []))}")
print(f"Failed items: {len(result.get('failed_items', []))}")
print(f"Success rate: {result.get('success_rate', 0)}")
if result.get('failed_items'):
print("\n❌ Failed items details:")
for item in result['failed_items']:
print(f" - {item.get('suggestion_id')}: {item.get('error')}")
else:
print("❌ Onboarding inventory creation FAILED!")
except Exception as e:
print(f"❌ Onboarding inventory creation ERROR: {e}")
# Step 3: Test service health
print("\n3⃣ Testing Service Health")
print("-" * 50)
services = [
("Gateway", f"{GATEWAY_URL}/health"),
("Inventory Service", "http://localhost:8008/health"),
("Sales Service", "http://localhost:8004/health")
]
for service_name, health_url in services:
try:
response = await client.get(health_url)
status = "✅ Healthy" if response.status_code == 200 else f"❌ Unhealthy ({response.status_code})"
print(f"{service_name}: {status}")
except Exception as e:
print(f"{service_name}: ❌ Error - {e}")
if __name__ == "__main__":
print("Starting onboarding flow debug test...")
print("Make sure your services are running with docker-compose!")
print()
asyncio.run(test_onboarding_flow())

View File

@@ -1,153 +0,0 @@
#!/usr/bin/env python3
"""
Test script to verify the sales import fix for inventory_product_id issue
"""
import asyncio
import json
import httpx
import csv
import io
from uuid import uuid4
# Test configuration
GATEWAY_URL = "http://localhost:8000"
TENANT_ID = "946206b3-7446-436b-b29d-f265b28d9ff5"
# Test token
TEST_TOKEN = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIzOTUyYTEwOC1lNWFmLTRlMjktOTJkOC0xMjc0MTBiOWJiYmEiLCJ1c2VyX2lkIjoiMzk1MmExMDgtZTVhZi00ZTI5LTkyZDgtMTI3NDEwYjliYmJhIiwiZW1haWwiOiJkZnNmc2RAdGVzdC5jb20iLCJ0eXBlIjoiYWNjZXNzIiwiZXhwIjoxNzU1MTY3NTk4LCJpYXQiOjE3NTUxNjU3OTgsImlzcyI6ImJha2VyeS1hdXRoIiwiZnVsbF9uYW1lIjoiZGZzZGZzZGYiLCJpc192ZXJpZmllZCI6ZmFsc2UsImlzX2FjdGl2ZSI6dHJ1ZSwicm9sZSI6InVzZXIifQ.hYyRqqqZ-Ud-uzn42l_ic-QjP-NWYvT8RmwmU12uaQU"
async def test_sales_import():
"""Test the sales import functionality with the inventory_product_id fix"""
print("🧪 Testing Sales Import Fix for inventory_product_id")
print("=" * 60)
headers = {
"Authorization": f"Bearer {TEST_TOKEN}",
"Content-Type": "application/json"
}
async with httpx.AsyncClient(timeout=60.0) as client:
# Step 1: Create test CSV data
print("\n1⃣ Creating Test CSV Data")
print("-" * 50)
csv_data = [
["date", "product_name", "quantity_sold", "revenue", "location_id"],
["2024-01-15", "Test Bread", "10", "25.50", "store-1"],
["2024-01-15", "Test Croissant", "5", "15.00", "store-1"],
["2024-01-15", "Test Muffin", "8", "20.00", "store-2"]
]
# Convert to CSV string
csv_string = io.StringIO()
writer = csv.writer(csv_string)
for row in csv_data:
writer.writerow(row)
csv_content = csv_string.getvalue()
print(f"CSV Content:")
print(csv_content)
# Step 2: Test the import endpoint
print("\n2⃣ Testing Sales Data Import")
print("-" * 50)
import_data = {
"csv_data": csv_content,
"import_type": "enhanced",
"overwrite_existing": False
}
import_url = f"{GATEWAY_URL}/api/v1/tenants/{TENANT_ID}/sales/import/csv"
print(f"URL: {import_url}")
try:
response = await client.post(import_url, json=import_data, headers=headers)
print(f"Status: {response.status_code}")
if response.status_code == 200:
result = response.json()
print(f"✅ Sales import completed!")
print(f"Records processed: {result.get('records_processed', 0)}")
print(f"Records created: {result.get('records_created', 0)}")
print(f"Records failed: {result.get('records_failed', 0)}")
print(f"Success: {result.get('success', False)}")
if result.get('errors'):
print(f"\n❌ Errors ({len(result['errors'])}):")
for error in result['errors'][:5]: # Show first 5 errors
print(f" - {error}")
if result.get('warnings'):
print(f"\n⚠️ Warnings ({len(result['warnings'])}):")
for warning in result['warnings'][:3]: # Show first 3 warnings
print(f" - {warning}")
else:
print("❌ Sales import FAILED!")
print(f"Response: {response.text}")
except Exception as e:
print(f"❌ Sales import ERROR: {e}")
# Step 3: Check if inventory items were created
print("\n3⃣ Checking Inventory Items Creation")
print("-" * 50)
try:
ingredients_url = f"{GATEWAY_URL}/api/v1/tenants/{TENANT_ID}/ingredients"
response = await client.get(ingredients_url, headers=headers)
if response.status_code == 200:
ingredients = response.json()
print(f"✅ Found {len(ingredients)} inventory items")
test_products = ["Test Bread", "Test Croissant", "Test Muffin"]
for product in test_products:
found = any(ingredient.get('name') == product for ingredient in ingredients)
status = "" if found else ""
print(f" {status} {product}")
else:
print(f"❌ Failed to fetch inventory items: {response.status_code}")
except Exception as e:
print(f"❌ Error checking inventory items: {e}")
# Step 4: Check sales records
print("\n4⃣ Checking Sales Records")
print("-" * 50)
try:
sales_url = f"{GATEWAY_URL}/api/v1/tenants/{TENANT_ID}/sales"
response = await client.get(sales_url, headers=headers)
if response.status_code == 200:
sales_data = response.json()
sales_count = len(sales_data) if isinstance(sales_data, list) else sales_data.get('total', 0)
print(f"✅ Found {sales_count} sales records")
# Show recent records
records = sales_data if isinstance(sales_data, list) else sales_data.get('records', [])
for i, record in enumerate(records[:3]): # Show first 3 records
inventory_id = record.get('inventory_product_id')
date = record.get('date', 'Unknown')
quantity = record.get('quantity_sold', 0)
revenue = record.get('revenue', 0)
print(f" Record {i+1}: Date={date}, Qty={quantity}, Revenue=${revenue}, InventoryID={inventory_id}")
else:
print(f"❌ Failed to fetch sales records: {response.status_code}")
except Exception as e:
print(f"❌ Error checking sales records: {e}")
if __name__ == "__main__":
print("Testing sales import fix...")
print("Make sure your services are running with docker-compose!")
print()
asyncio.run(test_sales_import())