Fix new services implementation 3
This commit is contained in:
@@ -163,7 +163,8 @@ export const useForecast = () => {
|
||||
tenantId: string,
|
||||
format: 'csv' | 'excel' | 'json',
|
||||
params?: {
|
||||
product_name?: string;
|
||||
inventory_product_id?: string; // Primary way to filter by product
|
||||
product_name?: string; // For backward compatibility
|
||||
start_date?: string;
|
||||
end_date?: string;
|
||||
}
|
||||
|
||||
@@ -91,7 +91,8 @@ export class ForecastingService {
|
||||
async getForecasts(
|
||||
tenantId: string,
|
||||
params?: BaseQueryParams & {
|
||||
product_name?: string;
|
||||
inventory_product_id?: string; // Primary way to filter by product
|
||||
product_name?: string; // For backward compatibility - will need inventory service lookup
|
||||
start_date?: string;
|
||||
end_date?: string;
|
||||
model_id?: string;
|
||||
@@ -158,7 +159,8 @@ export class ForecastingService {
|
||||
}
|
||||
|
||||
return forecastsArray.map((forecast: any) => ({
|
||||
product_name: forecast.product_name,
|
||||
inventory_product_id: forecast.inventory_product_id,
|
||||
product_name: forecast.product_name, // Optional - for display
|
||||
next_day_prediction: forecast.predicted_demand || 0,
|
||||
next_week_avg: forecast.predicted_demand || 0,
|
||||
trend_direction: 'stable' as const,
|
||||
@@ -168,9 +170,10 @@ export class ForecastingService {
|
||||
} catch (error) {
|
||||
console.error('QuickForecasts API call failed, using fallback data:', error);
|
||||
|
||||
// Return mock data for common bakery products
|
||||
// Return mock data for common bakery products (using mock inventory_product_ids)
|
||||
return [
|
||||
{
|
||||
inventory_product_id: 'mock-pan-de-molde-001',
|
||||
product_name: 'Pan de Molde',
|
||||
next_day_prediction: 25,
|
||||
next_week_avg: 175,
|
||||
@@ -179,6 +182,7 @@ export class ForecastingService {
|
||||
last_updated: new Date().toISOString()
|
||||
},
|
||||
{
|
||||
inventory_product_id: 'mock-baguettes-002',
|
||||
product_name: 'Baguettes',
|
||||
next_day_prediction: 20,
|
||||
next_week_avg: 140,
|
||||
@@ -187,6 +191,7 @@ export class ForecastingService {
|
||||
last_updated: new Date().toISOString()
|
||||
},
|
||||
{
|
||||
inventory_product_id: 'mock-croissants-003',
|
||||
product_name: 'Croissants',
|
||||
next_day_prediction: 15,
|
||||
next_week_avg: 105,
|
||||
@@ -195,6 +200,7 @@ export class ForecastingService {
|
||||
last_updated: new Date().toISOString()
|
||||
},
|
||||
{
|
||||
inventory_product_id: 'mock-magdalenas-004',
|
||||
product_name: 'Magdalenas',
|
||||
next_day_prediction: 12,
|
||||
next_week_avg: 84,
|
||||
@@ -244,7 +250,8 @@ export class ForecastingService {
|
||||
tenantId: string,
|
||||
format: 'csv' | 'excel' | 'json',
|
||||
params?: {
|
||||
product_name?: string;
|
||||
inventory_product_id?: string; // Primary way to filter by product
|
||||
product_name?: string; // For backward compatibility
|
||||
start_date?: string;
|
||||
end_date?: string;
|
||||
}
|
||||
@@ -272,7 +279,8 @@ export class ForecastingService {
|
||||
async getForecastAccuracy(
|
||||
tenantId: string,
|
||||
params?: {
|
||||
product_name?: string;
|
||||
inventory_product_id?: string; // Primary way to filter by product
|
||||
product_name?: string; // For backward compatibility
|
||||
model_id?: string;
|
||||
start_date?: string;
|
||||
end_date?: string;
|
||||
@@ -280,7 +288,8 @@ export class ForecastingService {
|
||||
): Promise<{
|
||||
overall_accuracy: number;
|
||||
product_accuracy: Array<{
|
||||
product_name: string;
|
||||
inventory_product_id: string;
|
||||
product_name?: string; // Optional - for display
|
||||
accuracy: number;
|
||||
sample_size: number;
|
||||
}>;
|
||||
|
||||
@@ -139,7 +139,8 @@ export class SalesService {
|
||||
params?: {
|
||||
start_date?: string;
|
||||
end_date?: string;
|
||||
product_names?: string[];
|
||||
inventory_product_ids?: string[]; // Primary way to filter by products
|
||||
product_names?: string[]; // For backward compatibility - will need inventory service lookup
|
||||
metrics?: string[];
|
||||
}
|
||||
): Promise<any> {
|
||||
|
||||
@@ -176,7 +176,8 @@ export interface PurchaseOrderItem {
|
||||
price_list_item_id?: string;
|
||||
ingredient_id: string;
|
||||
product_code?: string;
|
||||
product_name: string;
|
||||
inventory_product_id: string; // Reference to inventory service product
|
||||
product_name?: string; // Optional - for display, populated by frontend from inventory service
|
||||
ordered_quantity: number;
|
||||
unit_of_measure: string;
|
||||
unit_price: number;
|
||||
@@ -207,7 +208,8 @@ export interface CreatePurchaseOrderRequest {
|
||||
items: {
|
||||
ingredient_id: string;
|
||||
product_code?: string;
|
||||
product_name: string;
|
||||
inventory_product_id: string; // Reference to inventory service product
|
||||
product_name?: string; // Optional - for backward compatibility
|
||||
ordered_quantity: number;
|
||||
unit_of_measure: string;
|
||||
unit_price: number;
|
||||
@@ -268,7 +270,8 @@ export interface DeliveryItem {
|
||||
delivery_id: string;
|
||||
purchase_order_item_id: string;
|
||||
ingredient_id: string;
|
||||
product_name: string;
|
||||
inventory_product_id: string; // Reference to inventory service product
|
||||
product_name?: string; // Optional - for display, populated by frontend from inventory service
|
||||
ordered_quantity: number;
|
||||
delivered_quantity: number;
|
||||
accepted_quantity: number;
|
||||
|
||||
@@ -101,7 +101,8 @@ export class TrainingService {
|
||||
async getModels(
|
||||
tenantId: string,
|
||||
params?: BaseQueryParams & {
|
||||
product_name?: string;
|
||||
inventory_product_id?: string; // Primary way to filter by product
|
||||
product_name?: string; // For backward compatibility - will need inventory service lookup
|
||||
is_active?: boolean;
|
||||
}
|
||||
): Promise<PaginatedResponse<ModelInfo>> {
|
||||
|
||||
@@ -9,8 +9,10 @@ export interface SalesData {
|
||||
id: string;
|
||||
tenant_id: string;
|
||||
date: string;
|
||||
product_name: string;
|
||||
category?: string;
|
||||
inventory_product_id: string; // Reference to inventory service product
|
||||
// Note: product_name now needs to be fetched from inventory service using inventory_product_id
|
||||
product_name?: string; // Optional - for backward compatibility, populated by frontend logic
|
||||
category?: string; // Optional - fetched from inventory service
|
||||
quantity: number;
|
||||
unit_price: number;
|
||||
total_revenue: number;
|
||||
@@ -55,7 +57,9 @@ export interface SalesDataQuery extends BaseQueryParams {
|
||||
tenant_id: string;
|
||||
start_date?: string;
|
||||
end_date?: string;
|
||||
product_names?: string[];
|
||||
// Note: product_names filtering now requires inventory service integration or use inventory_product_ids
|
||||
product_names?: string[]; // For backward compatibility - will need inventory service lookup
|
||||
inventory_product_ids?: string[]; // Primary way to filter by products
|
||||
location_ids?: string[];
|
||||
sources?: string[];
|
||||
min_quantity?: number;
|
||||
@@ -64,7 +68,7 @@ export interface SalesDataQuery extends BaseQueryParams {
|
||||
max_revenue?: number;
|
||||
search_term?: string;
|
||||
sales_channel?: string;
|
||||
inventory_product_id?: string;
|
||||
inventory_product_id?: string; // Single product filter
|
||||
is_validated?: boolean;
|
||||
}
|
||||
|
||||
@@ -115,7 +119,8 @@ export interface DashboardStats {
|
||||
}
|
||||
|
||||
export interface ProductStats {
|
||||
product_name: string;
|
||||
inventory_product_id: string; // Reference to inventory service product
|
||||
product_name?: string; // Optional - for display, populated by frontend from inventory service
|
||||
total_quantity: number;
|
||||
total_revenue: number;
|
||||
avg_price: number;
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
import { ExternalFactors } from './data';
|
||||
|
||||
export interface SingleForecastRequest {
|
||||
product_name: string;
|
||||
inventory_product_id: string;
|
||||
forecast_date: string;
|
||||
forecast_days: number;
|
||||
location: string;
|
||||
@@ -16,7 +16,8 @@ export interface SingleForecastRequest {
|
||||
}
|
||||
|
||||
export interface BatchForecastRequest {
|
||||
product_names?: string[];
|
||||
inventory_product_ids?: string[]; // Primary way to specify products
|
||||
product_names?: string[]; // For backward compatibility - will need inventory service lookup
|
||||
forecast_date: string;
|
||||
forecast_days: number;
|
||||
location: string;
|
||||
@@ -28,7 +29,8 @@ export interface BatchForecastRequest {
|
||||
export interface ForecastResponse {
|
||||
id: string;
|
||||
tenant_id: string;
|
||||
product_name: string;
|
||||
inventory_product_id: string;
|
||||
product_name?: string; // Optional - for display, populated by frontend from inventory service
|
||||
forecast_date: string;
|
||||
predicted_demand: number;
|
||||
confidence_lower?: number;
|
||||
@@ -77,7 +79,8 @@ export interface ForecastAlert {
|
||||
}
|
||||
|
||||
export interface QuickForecast {
|
||||
product_name: string;
|
||||
inventory_product_id: string;
|
||||
product_name?: string; // Optional - for display, populated by frontend from inventory service
|
||||
next_day_prediction: number;
|
||||
next_week_avg: number;
|
||||
trend_direction: 'up' | 'down' | 'stable';
|
||||
|
||||
@@ -14,7 +14,7 @@ export interface TrainingJobRequest {
|
||||
}
|
||||
|
||||
export interface SingleProductTrainingRequest {
|
||||
product_name: string;
|
||||
inventory_product_id: string;
|
||||
config?: TrainingJobConfig;
|
||||
priority?: number;
|
||||
}
|
||||
@@ -81,11 +81,12 @@ export interface TrainingJobResults {
|
||||
total_training_time_seconds: number;
|
||||
average_model_accuracy?: number;
|
||||
trained_models: TrainedModelInfo[];
|
||||
failed_products?: string[];
|
||||
failed_products?: string[]; // inventory_product_ids of failed products
|
||||
}
|
||||
|
||||
export interface TrainedModelInfo {
|
||||
product_name: string;
|
||||
inventory_product_id: string;
|
||||
product_name?: string; // Optional - for display, populated by frontend from inventory service
|
||||
model_id: string;
|
||||
model_type: string;
|
||||
accuracy_metrics: TrainingMetrics;
|
||||
@@ -107,7 +108,8 @@ export interface TrainingMetrics {
|
||||
export interface ModelInfo {
|
||||
model_id: string;
|
||||
tenant_id: string;
|
||||
product_name: string;
|
||||
inventory_product_id: string;
|
||||
product_name?: string; // Optional - for display, populated by frontend from inventory service
|
||||
model_type: string;
|
||||
model_path: string;
|
||||
version: number;
|
||||
|
||||
@@ -33,7 +33,8 @@ interface PurchaseOrderFormProps {
|
||||
interface OrderItem {
|
||||
ingredient_id: string;
|
||||
product_code: string;
|
||||
product_name: string;
|
||||
inventory_product_id: string; // Reference to inventory service product
|
||||
product_name: string; // For backward compatibility and display
|
||||
ordered_quantity: number;
|
||||
unit_of_measure: string;
|
||||
unit_price: number;
|
||||
@@ -80,6 +81,7 @@ const initialFormData: FormData = {
|
||||
const initialOrderItem: OrderItem = {
|
||||
ingredient_id: '',
|
||||
product_code: '',
|
||||
inventory_product_id: '',
|
||||
product_name: '',
|
||||
ordered_quantity: 0,
|
||||
unit_of_measure: '',
|
||||
@@ -123,7 +125,8 @@ const PurchaseOrderForm: React.FC<PurchaseOrderFormProps> = ({
|
||||
items: order.items?.map(item => ({
|
||||
ingredient_id: item.ingredient_id,
|
||||
product_code: item.product_code || '',
|
||||
product_name: item.product_name,
|
||||
inventory_product_id: item.inventory_product_id,
|
||||
product_name: item.product_name || '',
|
||||
ordered_quantity: item.ordered_quantity,
|
||||
unit_of_measure: item.unit_of_measure,
|
||||
unit_price: item.unit_price,
|
||||
@@ -193,6 +196,7 @@ const PurchaseOrderForm: React.FC<PurchaseOrderFormProps> = ({
|
||||
const ingredient = ingredients.find(ing => ing.id === ingredientId);
|
||||
if (ingredient) {
|
||||
handleItemChange(index, 'ingredient_id', ingredientId);
|
||||
handleItemChange(index, 'inventory_product_id', ingredient.id);
|
||||
handleItemChange(index, 'product_name', ingredient.name);
|
||||
handleItemChange(index, 'unit_of_measure', ingredient.unit_of_measure);
|
||||
handleItemChange(index, 'product_code', ingredient.sku || '');
|
||||
@@ -279,6 +283,7 @@ const PurchaseOrderForm: React.FC<PurchaseOrderFormProps> = ({
|
||||
items: formData.items.map(item => ({
|
||||
ingredient_id: item.ingredient_id,
|
||||
product_code: item.product_code || undefined,
|
||||
inventory_product_id: item.inventory_product_id,
|
||||
product_name: item.product_name,
|
||||
ordered_quantity: item.ordered_quantity,
|
||||
unit_of_measure: item.unit_of_measure,
|
||||
|
||||
@@ -115,7 +115,8 @@ export const useDashboard = () => {
|
||||
const forecastPromises = products.map(async (product) => {
|
||||
try {
|
||||
const forecastRequest = {
|
||||
product_name: product,
|
||||
inventory_product_id: product, // Use product as inventory_product_id
|
||||
product_name: product, // Keep for backward compatibility
|
||||
forecast_date: new Date().toISOString().split('T')[0], // Today's date as YYYY-MM-DD
|
||||
forecast_days: 1,
|
||||
location: 'madrid_centro', // Default location for Madrid bakery
|
||||
|
||||
@@ -101,7 +101,9 @@ export const useOrderSuggestions = () => {
|
||||
|
||||
for (const product of dailyProducts) {
|
||||
// Find forecast for this product
|
||||
const forecast = quickForecasts.find(f => f.product_name === product);
|
||||
const forecast = quickForecasts.find(f =>
|
||||
f.product_name === product || f.inventory_product_id === product
|
||||
);
|
||||
|
||||
if (forecast) {
|
||||
// Calculate suggested quantity based on prediction
|
||||
|
||||
@@ -4,7 +4,8 @@ import { createSlice, createAsyncThunk, PayloadAction } from '@reduxjs/toolkit';
|
||||
export interface Forecast {
|
||||
id: string;
|
||||
tenant_id: string;
|
||||
product_name: string;
|
||||
inventory_product_id: string; // Reference to inventory service product
|
||||
product_name?: string; // Optional - for display, populated by frontend from inventory service
|
||||
location: string;
|
||||
forecast_date: string;
|
||||
created_at: string;
|
||||
@@ -41,7 +42,8 @@ export interface ForecastAlert {
|
||||
id: string;
|
||||
tenant_id: string;
|
||||
type: 'high_demand' | 'low_demand' | 'stockout_risk' | 'overproduction';
|
||||
product_name: string;
|
||||
inventory_product_id: string; // Reference to inventory service product
|
||||
product_name?: string; // Optional - for display, populated by frontend from inventory service
|
||||
message: string;
|
||||
severity: 'low' | 'medium' | 'high';
|
||||
created_at: string;
|
||||
@@ -109,13 +111,15 @@ export const generateForecast = createAsyncThunk(
|
||||
'forecast/generate',
|
||||
async ({
|
||||
tenantId,
|
||||
productName,
|
||||
inventoryProductId,
|
||||
productName, // For backward compatibility
|
||||
forecastDate,
|
||||
forecastDays = 1,
|
||||
location
|
||||
}: {
|
||||
tenantId: string;
|
||||
productName: string;
|
||||
inventoryProductId?: string;
|
||||
productName?: string; // For backward compatibility
|
||||
forecastDate: string;
|
||||
forecastDays?: number;
|
||||
location: string;
|
||||
@@ -127,7 +131,7 @@ export const generateForecast = createAsyncThunk(
|
||||
'Authorization': `Bearer ${localStorage.getItem('auth_token')}`,
|
||||
},
|
||||
body: JSON.stringify({
|
||||
product_name: productName,
|
||||
inventory_product_id: inventoryProductId || productName, // Use inventoryProductId or fallback to productName for backward compatibility
|
||||
forecast_date: forecastDate,
|
||||
forecast_days: forecastDays,
|
||||
location,
|
||||
@@ -146,11 +150,13 @@ export const generateBatchForecast = createAsyncThunk(
|
||||
'forecast/generateBatch',
|
||||
async ({
|
||||
tenantId,
|
||||
products,
|
||||
inventoryProductIds,
|
||||
products, // For backward compatibility
|
||||
forecastDays = 7
|
||||
}: {
|
||||
tenantId: string;
|
||||
products: string[];
|
||||
inventoryProductIds?: string[];
|
||||
products?: string[]; // For backward compatibility
|
||||
forecastDays?: number;
|
||||
}) => {
|
||||
const response = await fetch(`/api/v1/tenants/${tenantId}/forecasts/batch`, {
|
||||
@@ -160,7 +166,7 @@ export const generateBatchForecast = createAsyncThunk(
|
||||
'Authorization': `Bearer ${localStorage.getItem('auth_token')}`,
|
||||
},
|
||||
body: JSON.stringify({
|
||||
products,
|
||||
inventory_product_ids: inventoryProductIds || products, // Use inventoryProductIds or fallback to products for backward compatibility
|
||||
forecast_days: forecastDays,
|
||||
batch_name: `Batch_${new Date().toISOString()}`,
|
||||
}),
|
||||
@@ -358,7 +364,7 @@ const forecastSlice = createSlice({
|
||||
state.isLoading = false;
|
||||
// Convert API forecasts to QuickForecast format
|
||||
state.todayForecasts = (action.payload.forecasts || []).map((forecast: Forecast) => ({
|
||||
product: forecast.product_name,
|
||||
product: forecast.product_name || forecast.inventory_product_id, // Use product_name if available, otherwise use ID
|
||||
predicted: Math.round(forecast.predicted_demand),
|
||||
confidence: forecast.confidence_level > 0.8 ? 'high' :
|
||||
forecast.confidence_level > 0.6 ? 'medium' : 'low',
|
||||
|
||||
@@ -56,7 +56,7 @@ async def create_enhanced_single_forecast(
|
||||
|
||||
logger.info("Generating enhanced single forecast",
|
||||
tenant_id=tenant_id,
|
||||
product_name=request.product_name,
|
||||
inventory_product_id=request.inventory_product_id,
|
||||
forecast_date=request.forecast_date.isoformat())
|
||||
|
||||
# Record metrics
|
||||
@@ -124,13 +124,13 @@ async def create_enhanced_batch_forecast(
|
||||
|
||||
logger.info("Generating enhanced batch forecasts",
|
||||
tenant_id=tenant_id,
|
||||
products_count=len(request.products),
|
||||
forecast_dates_count=len(request.forecast_dates))
|
||||
products_count=len(request.inventory_product_ids),
|
||||
forecast_dates_count=request.forecast_days)
|
||||
|
||||
# Record metrics
|
||||
if metrics:
|
||||
metrics.increment_counter("enhanced_batch_forecasts_total")
|
||||
metrics.histogram("enhanced_batch_forecast_products_count", len(request.products))
|
||||
metrics.histogram("enhanced_batch_forecast_products_count", len(request.inventory_product_ids))
|
||||
|
||||
# Generate batch forecasts using enhanced service
|
||||
batch_result = await enhanced_forecasting_service.generate_batch_forecasts(
|
||||
@@ -174,7 +174,7 @@ async def create_enhanced_batch_forecast(
|
||||
@track_execution_time("enhanced_get_forecasts_duration_seconds", "forecasting-service")
|
||||
async def get_enhanced_tenant_forecasts(
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
product_name: Optional[str] = Query(None, description="Filter by product name"),
|
||||
inventory_product_id: Optional[str] = Query(None, description="Filter by inventory product ID"),
|
||||
start_date: Optional[date] = Query(None, description="Start date filter"),
|
||||
end_date: Optional[date] = Query(None, description="End date filter"),
|
||||
skip: int = Query(0, description="Number of records to skip"),
|
||||
@@ -203,7 +203,7 @@ async def get_enhanced_tenant_forecasts(
|
||||
# Get forecasts using enhanced service
|
||||
forecasts = await enhanced_forecasting_service.get_tenant_forecasts(
|
||||
tenant_id=tenant_id,
|
||||
product_name=product_name,
|
||||
inventory_product_id=inventory_product_id,
|
||||
start_date=start_date,
|
||||
end_date=end_date,
|
||||
skip=skip,
|
||||
@@ -218,7 +218,7 @@ async def get_enhanced_tenant_forecasts(
|
||||
"forecasts": forecasts,
|
||||
"total_returned": len(forecasts),
|
||||
"filters": {
|
||||
"product_name": product_name,
|
||||
"inventory_product_id": inventory_product_id,
|
||||
"start_date": start_date.isoformat() if start_date else None,
|
||||
"end_date": end_date.isoformat() if end_date else None
|
||||
},
|
||||
|
||||
@@ -59,14 +59,14 @@ async def generate_enhanced_realtime_prediction(
|
||||
|
||||
logger.info("Generating enhanced real-time prediction",
|
||||
tenant_id=tenant_id,
|
||||
product_name=prediction_request.get("product_name"))
|
||||
inventory_product_id=prediction_request.get("inventory_product_id"))
|
||||
|
||||
# Record metrics
|
||||
if metrics:
|
||||
metrics.increment_counter("enhanced_realtime_predictions_total")
|
||||
|
||||
# Validate required fields
|
||||
required_fields = ["product_name", "model_id", "features"]
|
||||
required_fields = ["inventory_product_id", "model_id", "features"]
|
||||
missing_fields = [field for field in required_fields if field not in prediction_request]
|
||||
if missing_fields:
|
||||
raise HTTPException(
|
||||
@@ -91,7 +91,7 @@ async def generate_enhanced_realtime_prediction(
|
||||
|
||||
return {
|
||||
"tenant_id": tenant_id,
|
||||
"product_name": prediction_request["product_name"],
|
||||
"inventory_product_id": prediction_request["inventory_product_id"],
|
||||
"model_id": prediction_request["model_id"],
|
||||
"prediction": prediction_result,
|
||||
"generated_at": datetime.now().isoformat(),
|
||||
@@ -205,7 +205,7 @@ async def generate_enhanced_batch_predictions(
|
||||
@track_execution_time("enhanced_get_prediction_cache_duration_seconds", "forecasting-service")
|
||||
async def get_enhanced_prediction_cache(
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
product_name: Optional[str] = Query(None, description="Filter by product name"),
|
||||
inventory_product_id: Optional[str] = Query(None, description="Filter by inventory product ID"),
|
||||
skip: int = Query(0, description="Number of records to skip"),
|
||||
limit: int = Query(100, description="Number of records to return"),
|
||||
request_obj: Request = None,
|
||||
@@ -232,7 +232,7 @@ async def get_enhanced_prediction_cache(
|
||||
# Get cached predictions using enhanced service
|
||||
cached_predictions = await enhanced_forecasting_service.get_cached_predictions(
|
||||
tenant_id=tenant_id,
|
||||
product_name=product_name,
|
||||
inventory_product_id=inventory_product_id,
|
||||
skip=skip,
|
||||
limit=limit
|
||||
)
|
||||
@@ -245,7 +245,7 @@ async def get_enhanced_prediction_cache(
|
||||
"cached_predictions": cached_predictions,
|
||||
"total_returned": len(cached_predictions),
|
||||
"filters": {
|
||||
"product_name": product_name
|
||||
"inventory_product_id": inventory_product_id
|
||||
},
|
||||
"pagination": {
|
||||
"skip": skip,
|
||||
@@ -271,7 +271,7 @@ async def get_enhanced_prediction_cache(
|
||||
@track_execution_time("enhanced_clear_prediction_cache_duration_seconds", "forecasting-service")
|
||||
async def clear_enhanced_prediction_cache(
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
product_name: Optional[str] = Query(None, description="Clear cache for specific product"),
|
||||
inventory_product_id: Optional[str] = Query(None, description="Clear cache for specific inventory product ID"),
|
||||
request_obj: Request = None,
|
||||
current_tenant: str = Depends(get_current_tenant_id_dep),
|
||||
enhanced_forecasting_service: EnhancedForecastingService = Depends(get_enhanced_forecasting_service)
|
||||
@@ -296,7 +296,7 @@ async def clear_enhanced_prediction_cache(
|
||||
# Clear cache using enhanced service
|
||||
cleared_count = await enhanced_forecasting_service.clear_prediction_cache(
|
||||
tenant_id=tenant_id,
|
||||
product_name=product_name
|
||||
inventory_product_id=inventory_product_id
|
||||
)
|
||||
|
||||
if metrics:
|
||||
@@ -305,13 +305,13 @@ async def clear_enhanced_prediction_cache(
|
||||
|
||||
logger.info("Enhanced prediction cache cleared",
|
||||
tenant_id=tenant_id,
|
||||
product_name=product_name,
|
||||
inventory_product_id=inventory_product_id,
|
||||
cleared_count=cleared_count)
|
||||
|
||||
return {
|
||||
"message": "Prediction cache cleared successfully",
|
||||
"tenant_id": tenant_id,
|
||||
"product_name": product_name,
|
||||
"inventory_product_id": inventory_product_id,
|
||||
"cleared_count": cleared_count,
|
||||
"enhanced_features": True,
|
||||
"repository_integration": True
|
||||
|
||||
@@ -40,7 +40,7 @@ class BakeryForecaster:
|
||||
self.database_manager = database_manager or create_database_manager(settings.DATABASE_URL, "forecasting-service")
|
||||
self.predictor = BakeryPredictor(database_manager)
|
||||
|
||||
async def generate_forecast_with_repository(self, tenant_id: str, product_name: str,
|
||||
async def generate_forecast_with_repository(self, tenant_id: str, inventory_product_id: str,
|
||||
forecast_date: date, model_id: str = None) -> Dict[str, Any]:
|
||||
"""Generate forecast with repository integration"""
|
||||
try:
|
||||
@@ -48,7 +48,7 @@ class BakeryForecaster:
|
||||
# Implementation would be added here
|
||||
return {
|
||||
"tenant_id": tenant_id,
|
||||
"product_name": product_name,
|
||||
"inventory_product_id": inventory_product_id,
|
||||
"forecast_date": forecast_date.isoformat(),
|
||||
"prediction": 0.0,
|
||||
"confidence_interval": {"lower": 0.0, "upper": 0.0},
|
||||
|
||||
@@ -18,7 +18,7 @@ class Forecast(Base):
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
product_name = Column(String(255), nullable=False, index=True)
|
||||
inventory_product_id = Column(UUID(as_uuid=True), nullable=False, index=True) # Reference to inventory service
|
||||
location = Column(String(255), nullable=False, index=True)
|
||||
|
||||
# Forecast period
|
||||
@@ -53,7 +53,7 @@ class Forecast(Base):
|
||||
features_used = Column(JSON)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<Forecast(id={self.id}, product={self.product_name}, date={self.forecast_date})>"
|
||||
return f"<Forecast(id={self.id}, inventory_product_id={self.inventory_product_id}, date={self.forecast_date})>"
|
||||
|
||||
class PredictionBatch(Base):
|
||||
"""Batch prediction requests"""
|
||||
|
||||
@@ -19,7 +19,7 @@ class ModelPerformanceMetric(Base):
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
model_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
product_name = Column(String(255), nullable=False)
|
||||
inventory_product_id = Column(UUID(as_uuid=True), nullable=False) # Reference to inventory service
|
||||
|
||||
# Performance metrics
|
||||
mae = Column(Float) # Mean Absolute Error
|
||||
@@ -48,7 +48,7 @@ class PredictionCache(Base):
|
||||
|
||||
# Cached data
|
||||
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
product_name = Column(String(255), nullable=False)
|
||||
inventory_product_id = Column(UUID(as_uuid=True), nullable=False) # Reference to inventory service
|
||||
location = Column(String(255), nullable=False)
|
||||
forecast_date = Column(DateTime(timezone=True), nullable=False)
|
||||
|
||||
@@ -64,4 +64,4 @@ class PredictionCache(Base):
|
||||
hit_count = Column(Integer, default=0)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<PredictionCache(key={self.cache_key}, product={self.product_name})>"
|
||||
return f"<PredictionCache(key={self.cache_key}, inventory_product_id={self.inventory_product_id})>"
|
||||
|
||||
@@ -34,21 +34,21 @@ class ForecastingBaseRepository(BaseRepository):
|
||||
)
|
||||
return await self.get_multi(skip=skip, limit=limit)
|
||||
|
||||
async def get_by_product_name(
|
||||
async def get_by_inventory_product_id(
|
||||
self,
|
||||
tenant_id: str,
|
||||
product_name: str,
|
||||
inventory_product_id: str,
|
||||
skip: int = 0,
|
||||
limit: int = 100
|
||||
) -> List:
|
||||
"""Get records by tenant and product"""
|
||||
if hasattr(self.model, 'product_name'):
|
||||
"""Get records by tenant and inventory product"""
|
||||
if hasattr(self.model, 'inventory_product_id'):
|
||||
return await self.get_multi(
|
||||
skip=skip,
|
||||
limit=limit,
|
||||
filters={
|
||||
"tenant_id": tenant_id,
|
||||
"product_name": product_name
|
||||
"inventory_product_id": inventory_product_id
|
||||
},
|
||||
order_by="created_at",
|
||||
order_desc=True
|
||||
@@ -163,17 +163,17 @@ class ForecastingBaseRepository(BaseRepository):
|
||||
|
||||
# Get records by product if applicable
|
||||
product_stats = {}
|
||||
if hasattr(self.model, 'product_name'):
|
||||
if hasattr(self.model, 'inventory_product_id'):
|
||||
product_query = text(f"""
|
||||
SELECT product_name, COUNT(*) as count
|
||||
SELECT inventory_product_id, COUNT(*) as count
|
||||
FROM {table_name}
|
||||
WHERE tenant_id = :tenant_id
|
||||
GROUP BY product_name
|
||||
GROUP BY inventory_product_id
|
||||
ORDER BY count DESC
|
||||
""")
|
||||
|
||||
result = await self.session.execute(product_query, {"tenant_id": tenant_id})
|
||||
product_stats = {row.product_name: row.count for row in result.fetchall()}
|
||||
product_stats = {row.inventory_product_id: row.count for row in result.fetchall()}
|
||||
|
||||
return {
|
||||
"total_records": total_records,
|
||||
@@ -206,11 +206,11 @@ class ForecastingBaseRepository(BaseRepository):
|
||||
if not isinstance(tenant_id, str) or len(tenant_id) < 1:
|
||||
errors.append("Invalid tenant_id format")
|
||||
|
||||
# Validate product_name if present
|
||||
if "product_name" in data and data["product_name"]:
|
||||
product_name = data["product_name"]
|
||||
if not isinstance(product_name, str) or len(product_name) < 1:
|
||||
errors.append("Invalid product_name format")
|
||||
# Validate inventory_product_id if present
|
||||
if "inventory_product_id" in data and data["inventory_product_id"]:
|
||||
inventory_product_id = data["inventory_product_id"]
|
||||
if not isinstance(inventory_product_id, str) or len(inventory_product_id) < 1:
|
||||
errors.append("Invalid inventory_product_id format")
|
||||
|
||||
# Validate dates if present - accept datetime objects, date objects, and date strings
|
||||
date_fields = ["forecast_date", "created_at", "evaluation_date", "expires_at"]
|
||||
|
||||
@@ -29,7 +29,7 @@ class ForecastRepository(ForecastingBaseRepository):
|
||||
# Validate forecast data
|
||||
validation_result = self._validate_forecast_data(
|
||||
forecast_data,
|
||||
["tenant_id", "product_name", "location", "forecast_date",
|
||||
["tenant_id", "inventory_product_id", "location", "forecast_date",
|
||||
"predicted_demand", "confidence_lower", "confidence_upper", "model_id"]
|
||||
)
|
||||
|
||||
@@ -50,7 +50,7 @@ class ForecastRepository(ForecastingBaseRepository):
|
||||
logger.info("Forecast created successfully",
|
||||
forecast_id=forecast.id,
|
||||
tenant_id=forecast.tenant_id,
|
||||
product_name=forecast.product_name,
|
||||
inventory_product_id=forecast.inventory_product_id,
|
||||
forecast_date=forecast.forecast_date.isoformat())
|
||||
|
||||
return forecast
|
||||
@@ -60,7 +60,7 @@ class ForecastRepository(ForecastingBaseRepository):
|
||||
except Exception as e:
|
||||
logger.error("Failed to create forecast",
|
||||
tenant_id=forecast_data.get("tenant_id"),
|
||||
product_name=forecast_data.get("product_name"),
|
||||
inventory_product_id=forecast_data.get("inventory_product_id"),
|
||||
error=str(e))
|
||||
raise DatabaseError(f"Failed to create forecast: {str(e)}")
|
||||
|
||||
@@ -69,15 +69,15 @@ class ForecastRepository(ForecastingBaseRepository):
|
||||
tenant_id: str,
|
||||
start_date: date,
|
||||
end_date: date,
|
||||
product_name: str = None,
|
||||
inventory_product_id: str = None,
|
||||
location: str = None
|
||||
) -> List[Forecast]:
|
||||
"""Get forecasts within a date range"""
|
||||
try:
|
||||
filters = {"tenant_id": tenant_id}
|
||||
|
||||
if product_name:
|
||||
filters["product_name"] = product_name
|
||||
if inventory_product_id:
|
||||
filters["inventory_product_id"] = inventory_product_id
|
||||
if location:
|
||||
filters["location"] = location
|
||||
|
||||
@@ -100,14 +100,14 @@ class ForecastRepository(ForecastingBaseRepository):
|
||||
async def get_latest_forecast_for_product(
|
||||
self,
|
||||
tenant_id: str,
|
||||
product_name: str,
|
||||
inventory_product_id: str,
|
||||
location: str = None
|
||||
) -> Optional[Forecast]:
|
||||
"""Get the most recent forecast for a product"""
|
||||
try:
|
||||
filters = {
|
||||
"tenant_id": tenant_id,
|
||||
"product_name": product_name
|
||||
"inventory_product_id": inventory_product_id
|
||||
}
|
||||
if location:
|
||||
filters["location"] = location
|
||||
@@ -124,7 +124,7 @@ class ForecastRepository(ForecastingBaseRepository):
|
||||
except Exception as e:
|
||||
logger.error("Failed to get latest forecast for product",
|
||||
tenant_id=tenant_id,
|
||||
product_name=product_name,
|
||||
inventory_product_id=inventory_product_id,
|
||||
error=str(e))
|
||||
raise DatabaseError(f"Failed to get latest forecast: {str(e)}")
|
||||
|
||||
@@ -132,7 +132,7 @@ class ForecastRepository(ForecastingBaseRepository):
|
||||
self,
|
||||
tenant_id: str,
|
||||
forecast_date: date,
|
||||
product_name: str = None
|
||||
inventory_product_id: str = None
|
||||
) -> List[Forecast]:
|
||||
"""Get all forecasts for a specific date"""
|
||||
try:
|
||||
@@ -154,7 +154,7 @@ class ForecastRepository(ForecastingBaseRepository):
|
||||
async def get_forecast_accuracy_metrics(
|
||||
self,
|
||||
tenant_id: str,
|
||||
product_name: str = None,
|
||||
inventory_product_id: str = None,
|
||||
days_back: int = 30
|
||||
) -> Dict[str, Any]:
|
||||
"""Get forecast accuracy metrics"""
|
||||
@@ -168,9 +168,9 @@ class ForecastRepository(ForecastingBaseRepository):
|
||||
"cutoff_date": cutoff_date
|
||||
}
|
||||
|
||||
if product_name:
|
||||
conditions.append("product_name = :product_name")
|
||||
params["product_name"] = product_name
|
||||
if inventory_product_id:
|
||||
conditions.append("inventory_product_id = :inventory_product_id")
|
||||
params["inventory_product_id"] = inventory_product_id
|
||||
|
||||
query_text = f"""
|
||||
SELECT
|
||||
@@ -180,7 +180,7 @@ class ForecastRepository(ForecastingBaseRepository):
|
||||
MAX(predicted_demand) as max_predicted_demand,
|
||||
AVG(confidence_upper - confidence_lower) as avg_confidence_interval,
|
||||
AVG(processing_time_ms) as avg_processing_time_ms,
|
||||
COUNT(DISTINCT product_name) as unique_products,
|
||||
COUNT(DISTINCT inventory_product_id) as unique_products,
|
||||
COUNT(DISTINCT model_id) as unique_models
|
||||
FROM forecasts
|
||||
WHERE {' AND '.join(conditions)}
|
||||
@@ -233,7 +233,7 @@ class ForecastRepository(ForecastingBaseRepository):
|
||||
async def get_demand_trends(
|
||||
self,
|
||||
tenant_id: str,
|
||||
product_name: str,
|
||||
inventory_product_id: str,
|
||||
days_back: int = 30
|
||||
) -> Dict[str, Any]:
|
||||
"""Get demand trends for a product"""
|
||||
@@ -249,7 +249,7 @@ class ForecastRepository(ForecastingBaseRepository):
|
||||
COUNT(*) as forecast_count
|
||||
FROM forecasts
|
||||
WHERE tenant_id = :tenant_id
|
||||
AND product_name = :product_name
|
||||
AND inventory_product_id = :inventory_product_id
|
||||
AND forecast_date >= :cutoff_date
|
||||
GROUP BY DATE(forecast_date)
|
||||
ORDER BY date DESC
|
||||
@@ -257,7 +257,7 @@ class ForecastRepository(ForecastingBaseRepository):
|
||||
|
||||
result = await self.session.execute(text(query_text), {
|
||||
"tenant_id": tenant_id,
|
||||
"product_name": product_name,
|
||||
"inventory_product_id": inventory_product_id,
|
||||
"cutoff_date": cutoff_date
|
||||
})
|
||||
|
||||
@@ -280,7 +280,7 @@ class ForecastRepository(ForecastingBaseRepository):
|
||||
trend_direction = "stable"
|
||||
|
||||
return {
|
||||
"product_name": product_name,
|
||||
"inventory_product_id": inventory_product_id,
|
||||
"period_days": days_back,
|
||||
"trends": trends,
|
||||
"trend_direction": trend_direction,
|
||||
@@ -290,10 +290,10 @@ class ForecastRepository(ForecastingBaseRepository):
|
||||
except Exception as e:
|
||||
logger.error("Failed to get demand trends",
|
||||
tenant_id=tenant_id,
|
||||
product_name=product_name,
|
||||
inventory_product_id=inventory_product_id,
|
||||
error=str(e))
|
||||
return {
|
||||
"product_name": product_name,
|
||||
"inventory_product_id": inventory_product_id,
|
||||
"period_days": days_back,
|
||||
"trends": [],
|
||||
"trend_direction": "unknown",
|
||||
@@ -311,7 +311,7 @@ class ForecastRepository(ForecastingBaseRepository):
|
||||
COUNT(*) as usage_count,
|
||||
AVG(predicted_demand) as avg_prediction,
|
||||
MAX(forecast_date) as last_used,
|
||||
COUNT(DISTINCT product_name) as products_covered
|
||||
COUNT(DISTINCT inventory_product_id) as products_covered
|
||||
FROM forecasts
|
||||
WHERE tenant_id = :tenant_id
|
||||
GROUP BY model_id, algorithm
|
||||
@@ -403,7 +403,7 @@ class ForecastRepository(ForecastingBaseRepository):
|
||||
# Validate each forecast
|
||||
validation_result = self._validate_forecast_data(
|
||||
forecast_data,
|
||||
["tenant_id", "product_name", "location", "forecast_date",
|
||||
["tenant_id", "inventory_product_id", "location", "forecast_date",
|
||||
"predicted_demand", "confidence_lower", "confidence_upper", "model_id"]
|
||||
)
|
||||
|
||||
|
||||
@@ -29,7 +29,7 @@ class PerformanceMetricRepository(ForecastingBaseRepository):
|
||||
# Validate metric data
|
||||
validation_result = self._validate_forecast_data(
|
||||
metric_data,
|
||||
["model_id", "tenant_id", "product_name", "evaluation_date"]
|
||||
["model_id", "tenant_id", "inventory_product_id", "evaluation_date"]
|
||||
)
|
||||
|
||||
if not validation_result["is_valid"]:
|
||||
@@ -41,7 +41,7 @@ class PerformanceMetricRepository(ForecastingBaseRepository):
|
||||
metric_id=metric.id,
|
||||
model_id=metric.model_id,
|
||||
tenant_id=metric.tenant_id,
|
||||
product_name=metric.product_name)
|
||||
inventory_product_id=metric.inventory_product_id)
|
||||
|
||||
return metric
|
||||
|
||||
@@ -93,7 +93,7 @@ class PerformanceMetricRepository(ForecastingBaseRepository):
|
||||
async def get_performance_trends(
|
||||
self,
|
||||
tenant_id: str,
|
||||
product_name: str = None,
|
||||
inventory_product_id: str = None,
|
||||
days: int = 30
|
||||
) -> Dict[str, Any]:
|
||||
"""Get performance trends over time"""
|
||||
@@ -109,14 +109,14 @@ class PerformanceMetricRepository(ForecastingBaseRepository):
|
||||
"start_date": start_date
|
||||
}
|
||||
|
||||
if product_name:
|
||||
conditions.append("product_name = :product_name")
|
||||
params["product_name"] = product_name
|
||||
if inventory_product_id:
|
||||
conditions.append("inventory_product_id = :inventory_product_id")
|
||||
params["inventory_product_id"] = inventory_product_id
|
||||
|
||||
query_text = f"""
|
||||
SELECT
|
||||
DATE(evaluation_date) as date,
|
||||
product_name,
|
||||
inventory_product_id,
|
||||
AVG(mae) as avg_mae,
|
||||
AVG(mape) as avg_mape,
|
||||
AVG(rmse) as avg_rmse,
|
||||
@@ -124,8 +124,8 @@ class PerformanceMetricRepository(ForecastingBaseRepository):
|
||||
COUNT(*) as measurement_count
|
||||
FROM model_performance_metrics
|
||||
WHERE {' AND '.join(conditions)}
|
||||
GROUP BY DATE(evaluation_date), product_name
|
||||
ORDER BY date DESC, product_name
|
||||
GROUP BY DATE(evaluation_date), inventory_product_id
|
||||
ORDER BY date DESC, inventory_product_id
|
||||
"""
|
||||
|
||||
result = await self.session.execute(text(query_text), params)
|
||||
@@ -134,7 +134,7 @@ class PerformanceMetricRepository(ForecastingBaseRepository):
|
||||
for row in result.fetchall():
|
||||
trends.append({
|
||||
"date": row.date.isoformat() if row.date else None,
|
||||
"product_name": row.product_name,
|
||||
"inventory_product_id": row.inventory_product_id,
|
||||
"metrics": {
|
||||
"avg_mae": float(row.avg_mae) if row.avg_mae else None,
|
||||
"avg_mape": float(row.avg_mape) if row.avg_mape else None,
|
||||
@@ -146,7 +146,7 @@ class PerformanceMetricRepository(ForecastingBaseRepository):
|
||||
|
||||
return {
|
||||
"tenant_id": tenant_id,
|
||||
"product_name": product_name,
|
||||
"inventory_product_id": inventory_product_id,
|
||||
"period_days": days,
|
||||
"trends": trends,
|
||||
"total_measurements": len(trends)
|
||||
@@ -155,11 +155,11 @@ class PerformanceMetricRepository(ForecastingBaseRepository):
|
||||
except Exception as e:
|
||||
logger.error("Failed to get performance trends",
|
||||
tenant_id=tenant_id,
|
||||
product_name=product_name,
|
||||
inventory_product_id=inventory_product_id,
|
||||
error=str(e))
|
||||
return {
|
||||
"tenant_id": tenant_id,
|
||||
"product_name": product_name,
|
||||
"inventory_product_id": inventory_product_id,
|
||||
"period_days": days,
|
||||
"trends": [],
|
||||
"total_measurements": 0
|
||||
|
||||
@@ -27,18 +27,18 @@ class PredictionCacheRepository(ForecastingBaseRepository):
|
||||
def _generate_cache_key(
|
||||
self,
|
||||
tenant_id: str,
|
||||
product_name: str,
|
||||
inventory_product_id: str,
|
||||
location: str,
|
||||
forecast_date: datetime
|
||||
) -> str:
|
||||
"""Generate cache key for prediction"""
|
||||
key_data = f"{tenant_id}:{product_name}:{location}:{forecast_date.isoformat()}"
|
||||
key_data = f"{tenant_id}:{inventory_product_id}:{location}:{forecast_date.isoformat()}"
|
||||
return hashlib.md5(key_data.encode()).hexdigest()
|
||||
|
||||
async def cache_prediction(
|
||||
self,
|
||||
tenant_id: str,
|
||||
product_name: str,
|
||||
inventory_product_id: str,
|
||||
location: str,
|
||||
forecast_date: datetime,
|
||||
predicted_demand: float,
|
||||
@@ -49,13 +49,13 @@ class PredictionCacheRepository(ForecastingBaseRepository):
|
||||
) -> PredictionCache:
|
||||
"""Cache a prediction result"""
|
||||
try:
|
||||
cache_key = self._generate_cache_key(tenant_id, product_name, location, forecast_date)
|
||||
cache_key = self._generate_cache_key(tenant_id, inventory_product_id, location, forecast_date)
|
||||
expires_at = datetime.utcnow() + timedelta(hours=expires_in_hours)
|
||||
|
||||
cache_data = {
|
||||
"cache_key": cache_key,
|
||||
"tenant_id": tenant_id,
|
||||
"product_name": product_name,
|
||||
"inventory_product_id": inventory_product_id,
|
||||
"location": location,
|
||||
"forecast_date": forecast_date,
|
||||
"predicted_demand": predicted_demand,
|
||||
@@ -80,20 +80,20 @@ class PredictionCacheRepository(ForecastingBaseRepository):
|
||||
except Exception as e:
|
||||
logger.error("Failed to cache prediction",
|
||||
tenant_id=tenant_id,
|
||||
product_name=product_name,
|
||||
inventory_product_id=inventory_product_id,
|
||||
error=str(e))
|
||||
raise DatabaseError(f"Failed to cache prediction: {str(e)}")
|
||||
|
||||
async def get_cached_prediction(
|
||||
self,
|
||||
tenant_id: str,
|
||||
product_name: str,
|
||||
inventory_product_id: str,
|
||||
location: str,
|
||||
forecast_date: datetime
|
||||
) -> Optional[PredictionCache]:
|
||||
"""Get cached prediction if valid"""
|
||||
try:
|
||||
cache_key = self._generate_cache_key(tenant_id, product_name, location, forecast_date)
|
||||
cache_key = self._generate_cache_key(tenant_id, inventory_product_id, location, forecast_date)
|
||||
|
||||
cache_entry = await self.get_by_field("cache_key", cache_key)
|
||||
|
||||
@@ -119,14 +119,14 @@ class PredictionCacheRepository(ForecastingBaseRepository):
|
||||
except Exception as e:
|
||||
logger.error("Failed to get cached prediction",
|
||||
tenant_id=tenant_id,
|
||||
product_name=product_name,
|
||||
inventory_product_id=inventory_product_id,
|
||||
error=str(e))
|
||||
return None
|
||||
|
||||
async def invalidate_cache(
|
||||
self,
|
||||
tenant_id: str,
|
||||
product_name: str = None,
|
||||
inventory_product_id: str = None,
|
||||
location: str = None
|
||||
) -> int:
|
||||
"""Invalidate cache entries"""
|
||||
@@ -134,9 +134,9 @@ class PredictionCacheRepository(ForecastingBaseRepository):
|
||||
conditions = ["tenant_id = :tenant_id"]
|
||||
params = {"tenant_id": tenant_id}
|
||||
|
||||
if product_name:
|
||||
conditions.append("product_name = :product_name")
|
||||
params["product_name"] = product_name
|
||||
if inventory_product_id:
|
||||
conditions.append("inventory_product_id = :inventory_product_id")
|
||||
params["inventory_product_id"] = inventory_product_id
|
||||
|
||||
if location:
|
||||
conditions.append("location = :location")
|
||||
@@ -152,7 +152,7 @@ class PredictionCacheRepository(ForecastingBaseRepository):
|
||||
|
||||
logger.info("Cache invalidated",
|
||||
tenant_id=tenant_id,
|
||||
product_name=product_name,
|
||||
inventory_product_id=inventory_product_id,
|
||||
location=location,
|
||||
invalidated_count=invalidated_count)
|
||||
|
||||
@@ -204,7 +204,7 @@ class PredictionCacheRepository(ForecastingBaseRepository):
|
||||
SUM(hit_count) as total_hits,
|
||||
AVG(hit_count) as avg_hits_per_entry,
|
||||
MAX(hit_count) as max_hits,
|
||||
COUNT(DISTINCT product_name) as unique_products
|
||||
COUNT(DISTINCT inventory_product_id) as unique_products
|
||||
FROM prediction_cache
|
||||
{base_filter}
|
||||
""")
|
||||
@@ -268,7 +268,7 @@ class PredictionCacheRepository(ForecastingBaseRepository):
|
||||
|
||||
query_text = f"""
|
||||
SELECT
|
||||
product_name,
|
||||
inventory_product_id,
|
||||
location,
|
||||
hit_count,
|
||||
predicted_demand,
|
||||
@@ -285,7 +285,7 @@ class PredictionCacheRepository(ForecastingBaseRepository):
|
||||
popular_predictions = []
|
||||
for row in result.fetchall():
|
||||
popular_predictions.append({
|
||||
"product_name": row.product_name,
|
||||
"inventory_product_id": row.inventory_product_id,
|
||||
"location": row.location,
|
||||
"hit_count": int(row.hit_count),
|
||||
"predicted_demand": float(row.predicted_demand),
|
||||
|
||||
@@ -22,7 +22,8 @@ class AlertType(str, Enum):
|
||||
|
||||
class ForecastRequest(BaseModel):
|
||||
"""Request schema for generating forecasts"""
|
||||
product_name: str = Field(..., description="Product name")
|
||||
inventory_product_id: str = Field(..., description="Inventory product UUID reference")
|
||||
# product_name: str = Field(..., description="Product name") # DEPRECATED - use inventory_product_id
|
||||
forecast_date: date = Field(..., description="Starting date for forecast")
|
||||
forecast_days: int = Field(1, ge=1, le=30, description="Number of days to forecast")
|
||||
location: str = Field(..., description="Location identifier")
|
||||
@@ -40,14 +41,15 @@ class BatchForecastRequest(BaseModel):
|
||||
"""Request schema for batch forecasting"""
|
||||
tenant_id: str = Field(..., description="Tenant ID")
|
||||
batch_name: str = Field(..., description="Batch name for tracking")
|
||||
products: List[str] = Field(..., description="List of product names")
|
||||
inventory_product_ids: List[str] = Field(..., description="List of inventory product IDs")
|
||||
forecast_days: int = Field(7, ge=1, le=30, description="Number of days to forecast")
|
||||
|
||||
class ForecastResponse(BaseModel):
|
||||
"""Response schema for forecast results"""
|
||||
id: str
|
||||
tenant_id: str
|
||||
product_name: str
|
||||
inventory_product_id: str # Reference to inventory service
|
||||
# product_name: str # Can be fetched from inventory service if needed for display
|
||||
location: str
|
||||
forecast_date: datetime
|
||||
|
||||
|
||||
@@ -78,7 +78,7 @@ class EnhancedForecastingService:
|
||||
logger.error("Batch forecast generation failed", error=str(e))
|
||||
raise
|
||||
|
||||
async def get_tenant_forecasts(self, tenant_id: str, product_name: str = None,
|
||||
async def get_tenant_forecasts(self, tenant_id: str, inventory_product_id: str = None,
|
||||
start_date: date = None, end_date: date = None,
|
||||
skip: int = 0, limit: int = 100) -> List[Dict]:
|
||||
"""Get tenant forecasts with filtering"""
|
||||
@@ -149,7 +149,7 @@ class EnhancedForecastingService:
|
||||
logger.error("Batch predictions failed", error=str(e))
|
||||
raise
|
||||
|
||||
async def get_cached_predictions(self, tenant_id: str, product_name: str = None,
|
||||
async def get_cached_predictions(self, tenant_id: str, inventory_product_id: str = None,
|
||||
skip: int = 0, limit: int = 100) -> List[Dict]:
|
||||
"""Get cached predictions"""
|
||||
try:
|
||||
@@ -159,7 +159,7 @@ class EnhancedForecastingService:
|
||||
logger.error("Failed to get cached predictions", error=str(e))
|
||||
raise
|
||||
|
||||
async def clear_prediction_cache(self, tenant_id: str, product_name: str = None) -> int:
|
||||
async def clear_prediction_cache(self, tenant_id: str, inventory_product_id: str = None) -> int:
|
||||
"""Clear prediction cache"""
|
||||
try:
|
||||
# Implementation would use repository pattern
|
||||
@@ -195,7 +195,7 @@ class EnhancedForecastingService:
|
||||
try:
|
||||
logger.info("Generating enhanced forecast",
|
||||
tenant_id=tenant_id,
|
||||
product=request.product_name,
|
||||
inventory_product_id=request.inventory_product_id,
|
||||
date=request.forecast_date.isoformat())
|
||||
|
||||
# Get session and initialize repositories
|
||||
@@ -204,20 +204,20 @@ class EnhancedForecastingService:
|
||||
|
||||
# Step 1: Check cache first
|
||||
cached_prediction = await repos['cache'].get_cached_prediction(
|
||||
tenant_id, request.product_name, request.location, request.forecast_date
|
||||
tenant_id, request.inventory_product_id, request.location, request.forecast_date
|
||||
)
|
||||
|
||||
if cached_prediction:
|
||||
logger.debug("Using cached prediction",
|
||||
tenant_id=tenant_id,
|
||||
product=request.product_name)
|
||||
inventory_product_id=request.inventory_product_id)
|
||||
return self._create_forecast_response_from_cache(cached_prediction)
|
||||
|
||||
# Step 2: Get model with validation
|
||||
model_data = await self._get_latest_model_with_fallback(tenant_id, request.product_name)
|
||||
model_data = await self._get_latest_model_with_fallback(tenant_id, request.inventory_product_id)
|
||||
|
||||
if not model_data:
|
||||
raise ValueError(f"No valid model available for product: {request.product_name}")
|
||||
raise ValueError(f"No valid model available for product: {request.inventory_product_id}")
|
||||
|
||||
# Step 3: Prepare features with fallbacks
|
||||
features = await self._prepare_forecast_features_with_fallbacks(tenant_id, request)
|
||||
@@ -244,7 +244,7 @@ class EnhancedForecastingService:
|
||||
|
||||
forecast_data = {
|
||||
"tenant_id": tenant_id,
|
||||
"product_name": request.product_name,
|
||||
"inventory_product_id": request.inventory_product_id,
|
||||
"location": request.location,
|
||||
"forecast_date": forecast_datetime,
|
||||
"predicted_demand": adjusted_prediction['prediction'],
|
||||
@@ -271,7 +271,7 @@ class EnhancedForecastingService:
|
||||
# Step 7: Cache the prediction
|
||||
await repos['cache'].cache_prediction(
|
||||
tenant_id=tenant_id,
|
||||
product_name=request.product_name,
|
||||
inventory_product_id=request.inventory_product_id,
|
||||
location=request.location,
|
||||
forecast_date=forecast_datetime,
|
||||
predicted_demand=adjusted_prediction['prediction'],
|
||||
@@ -296,14 +296,14 @@ class EnhancedForecastingService:
|
||||
logger.error("Error generating enhanced forecast",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id,
|
||||
product=request.product_name,
|
||||
inventory_product_id=request.inventory_product_id,
|
||||
processing_time=processing_time)
|
||||
raise
|
||||
|
||||
async def get_forecast_history(
|
||||
self,
|
||||
tenant_id: str,
|
||||
product_name: Optional[str] = None,
|
||||
inventory_product_id: Optional[str] = None,
|
||||
start_date: Optional[date] = None,
|
||||
end_date: Optional[date] = None
|
||||
) -> List[Dict[str, Any]]:
|
||||
@@ -314,7 +314,7 @@ class EnhancedForecastingService:
|
||||
|
||||
if start_date and end_date:
|
||||
forecasts = await repos['forecast'].get_forecasts_by_date_range(
|
||||
tenant_id, start_date, end_date, product_name
|
||||
tenant_id, start_date, end_date, inventory_product_id
|
||||
)
|
||||
else:
|
||||
# Get recent forecasts (last 30 days)
|
||||
@@ -374,7 +374,7 @@ class EnhancedForecastingService:
|
||||
self,
|
||||
tenant_id: str,
|
||||
batch_name: str,
|
||||
products: List[str],
|
||||
inventory_product_ids: List[str],
|
||||
forecast_days: int = 7
|
||||
) -> Dict[str, Any]:
|
||||
"""Create batch prediction job using repository"""
|
||||
@@ -386,7 +386,7 @@ class EnhancedForecastingService:
|
||||
batch_data = {
|
||||
"tenant_id": tenant_id,
|
||||
"batch_name": batch_name,
|
||||
"total_products": len(products),
|
||||
"total_products": len(inventory_product_ids),
|
||||
"forecast_days": forecast_days,
|
||||
"status": "pending"
|
||||
}
|
||||
@@ -396,12 +396,12 @@ class EnhancedForecastingService:
|
||||
logger.info("Batch prediction created",
|
||||
batch_id=batch.id,
|
||||
tenant_id=tenant_id,
|
||||
total_products=len(products))
|
||||
total_products=len(inventory_product_ids))
|
||||
|
||||
return {
|
||||
"batch_id": str(batch.id),
|
||||
"status": batch.status,
|
||||
"total_products": len(products),
|
||||
"total_products": len(inventory_product_ids),
|
||||
"created_at": batch.requested_at.isoformat()
|
||||
}
|
||||
|
||||
@@ -423,7 +423,7 @@ class EnhancedForecastingService:
|
||||
"forecast_id": forecast.id,
|
||||
"alert_type": "high_demand",
|
||||
"severity": "high" if prediction['prediction'] > 200 else "medium",
|
||||
"message": f"High demand predicted for {forecast.product_name}: {prediction['prediction']:.1f} units"
|
||||
"message": f"High demand predicted for inventory product {forecast.inventory_product_id}: {prediction['prediction']:.1f} units"
|
||||
})
|
||||
|
||||
# Check for low demand alert
|
||||
@@ -433,7 +433,7 @@ class EnhancedForecastingService:
|
||||
"forecast_id": forecast.id,
|
||||
"alert_type": "low_demand",
|
||||
"severity": "low",
|
||||
"message": f"Low demand predicted for {forecast.product_name}: {prediction['prediction']:.1f} units"
|
||||
"message": f"Low demand predicted for inventory product {forecast.inventory_product_id}: {prediction['prediction']:.1f} units"
|
||||
})
|
||||
|
||||
# Check for stockout risk (very low prediction with narrow confidence interval)
|
||||
@@ -444,7 +444,7 @@ class EnhancedForecastingService:
|
||||
"forecast_id": forecast.id,
|
||||
"alert_type": "stockout_risk",
|
||||
"severity": "critical",
|
||||
"message": f"Stockout risk for {forecast.product_name}: predicted {prediction['prediction']:.1f} units with high confidence"
|
||||
"message": f"Stockout risk for inventory product {forecast.inventory_product_id}: predicted {prediction['prediction']:.1f} units with high confidence"
|
||||
})
|
||||
|
||||
# Create alerts
|
||||
@@ -462,7 +462,7 @@ class EnhancedForecastingService:
|
||||
return ForecastResponse(
|
||||
id=str(cache_entry.id),
|
||||
tenant_id=str(cache_entry.tenant_id),
|
||||
product_name=cache_entry.product_name,
|
||||
inventory_product_id=cache_entry.inventory_product_id,
|
||||
location=cache_entry.location,
|
||||
forecast_date=cache_entry.forecast_date,
|
||||
predicted_demand=cache_entry.predicted_demand,
|
||||
@@ -486,7 +486,7 @@ class EnhancedForecastingService:
|
||||
return ForecastResponse(
|
||||
id=str(forecast.id),
|
||||
tenant_id=str(forecast.tenant_id),
|
||||
product_name=forecast.product_name,
|
||||
inventory_product_id=forecast.inventory_product_id,
|
||||
location=forecast.location,
|
||||
forecast_date=forecast.forecast_date,
|
||||
predicted_demand=forecast.predicted_demand,
|
||||
@@ -514,7 +514,7 @@ class EnhancedForecastingService:
|
||||
return {
|
||||
"id": str(forecast.id),
|
||||
"tenant_id": str(forecast.tenant_id),
|
||||
"product_name": forecast.product_name,
|
||||
"inventory_product_id": forecast.inventory_product_id,
|
||||
"location": forecast.location,
|
||||
"forecast_date": forecast.forecast_date.isoformat(),
|
||||
"predicted_demand": forecast.predicted_demand,
|
||||
@@ -527,17 +527,17 @@ class EnhancedForecastingService:
|
||||
}
|
||||
|
||||
# Additional helper methods from original service
|
||||
async def _get_latest_model_with_fallback(self, tenant_id: str, product_name: str) -> Optional[Dict[str, Any]]:
|
||||
async def _get_latest_model_with_fallback(self, tenant_id: str, inventory_product_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get the latest trained model with fallback strategies"""
|
||||
try:
|
||||
model_data = await self.model_client.get_best_model_for_forecasting(
|
||||
tenant_id=tenant_id,
|
||||
product_name=product_name
|
||||
inventory_product_id=inventory_product_id
|
||||
)
|
||||
|
||||
if model_data:
|
||||
logger.info("Found specific model for product",
|
||||
product=product_name,
|
||||
inventory_product_id=inventory_product_id,
|
||||
model_id=model_data.get('model_id'))
|
||||
return model_data
|
||||
|
||||
|
||||
@@ -62,7 +62,7 @@ class ModelClient:
|
||||
async def get_best_model_for_forecasting(
|
||||
self,
|
||||
tenant_id: str,
|
||||
product_name: Optional[str] = None
|
||||
inventory_product_id: Optional[str] = None
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get the best model for forecasting based on performance metrics
|
||||
@@ -71,7 +71,7 @@ class ModelClient:
|
||||
# Get latest model
|
||||
latest_model = await self.clients.training.get_active_model_for_product(
|
||||
tenant_id=tenant_id,
|
||||
product_name=product_name
|
||||
inventory_product_id=inventory_product_id
|
||||
)
|
||||
|
||||
if not latest_model:
|
||||
@@ -137,7 +137,7 @@ class ModelClient:
|
||||
logger.info("Found fallback model for tenant",
|
||||
tenant_id=tenant_id,
|
||||
model_id=best_model.get('id', 'unknown'),
|
||||
product=best_model.get('product_name', 'unknown'))
|
||||
inventory_product_id=best_model.get('inventory_product_id', 'unknown'))
|
||||
return best_model
|
||||
|
||||
logger.warning("No fallback models available for tenant", tenant_id=tenant_id)
|
||||
|
||||
@@ -38,7 +38,7 @@ class PredictionService:
|
||||
async def validate_prediction_request(self, request: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Validate prediction request"""
|
||||
try:
|
||||
required_fields = ["product_name", "model_id", "features"]
|
||||
required_fields = ["inventory_product_id", "model_id", "features"]
|
||||
missing_fields = [field for field in required_fields if field not in request]
|
||||
|
||||
if missing_fields:
|
||||
|
||||
@@ -39,7 +39,7 @@ class SalesRepository(BaseRepository[SalesData, SalesDataCreate, SalesDataUpdate
|
||||
logger.info(
|
||||
"Created sales record",
|
||||
record_id=record.id,
|
||||
product=record.product_name,
|
||||
inventory_product_id=record.inventory_product_id,
|
||||
quantity=record.quantity_sold,
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
@@ -65,10 +65,16 @@ class SalesRepository(BaseRepository[SalesData, SalesDataCreate, SalesDataUpdate
|
||||
stmt = stmt.where(SalesData.date >= query_params.start_date)
|
||||
if query_params.end_date:
|
||||
stmt = stmt.where(SalesData.date <= query_params.end_date)
|
||||
if query_params.product_name:
|
||||
stmt = stmt.where(SalesData.product_name.ilike(f"%{query_params.product_name}%"))
|
||||
if query_params.product_category:
|
||||
stmt = stmt.where(SalesData.product_category == query_params.product_category)
|
||||
# Note: product_name queries now require joining with inventory service
|
||||
# if query_params.product_name:
|
||||
# # Would need to join with inventory service to filter by product name
|
||||
# pass
|
||||
# Note: product_category field was removed - filtering by category now requires inventory service
|
||||
# if query_params.product_category:
|
||||
# # Would need to join with inventory service to filter by product category
|
||||
# pass
|
||||
if hasattr(query_params, 'inventory_product_id') and query_params.inventory_product_id:
|
||||
stmt = stmt.where(SalesData.inventory_product_id == query_params.inventory_product_id)
|
||||
if query_params.location_id:
|
||||
stmt = stmt.where(SalesData.location_id == query_params.location_id)
|
||||
if query_params.sales_channel:
|
||||
@@ -174,7 +180,7 @@ class SalesRepository(BaseRepository[SalesData, SalesDataCreate, SalesDataUpdate
|
||||
|
||||
# Top products
|
||||
top_products_query = select(
|
||||
SalesData.product_name,
|
||||
SalesData.inventory_product_id, # Note: was product_name
|
||||
func.sum(SalesData.revenue).label('revenue'),
|
||||
func.sum(SalesData.quantity_sold).label('quantity')
|
||||
).where(SalesData.tenant_id == tenant_id)
|
||||
@@ -185,7 +191,7 @@ class SalesRepository(BaseRepository[SalesData, SalesDataCreate, SalesDataUpdate
|
||||
top_products_query = top_products_query.where(SalesData.date <= end_date)
|
||||
|
||||
top_products_query = top_products_query.group_by(
|
||||
SalesData.product_name
|
||||
SalesData.inventory_product_id # Note: was product_name
|
||||
).order_by(
|
||||
desc(func.sum(SalesData.revenue))
|
||||
).limit(10)
|
||||
@@ -193,7 +199,7 @@ class SalesRepository(BaseRepository[SalesData, SalesDataCreate, SalesDataUpdate
|
||||
top_products_result = await self.session.execute(top_products_query)
|
||||
top_products = [
|
||||
{
|
||||
'product_name': row.product_name,
|
||||
'inventory_product_id': str(row.inventory_product_id), # Note: was product_name
|
||||
'revenue': float(row.revenue) if row.revenue else 0,
|
||||
'quantity': row.quantity or 0
|
||||
}
|
||||
@@ -239,15 +245,12 @@ class SalesRepository(BaseRepository[SalesData, SalesDataCreate, SalesDataUpdate
|
||||
async def get_product_categories(self, tenant_id: UUID) -> List[str]:
|
||||
"""Get distinct product categories for a tenant"""
|
||||
try:
|
||||
stmt = select(SalesData.product_category).where(
|
||||
and_(
|
||||
SalesData.tenant_id == tenant_id,
|
||||
SalesData.product_category.is_not(None)
|
||||
)
|
||||
).distinct()
|
||||
|
||||
result = await self.session.execute(stmt)
|
||||
categories = [row[0] for row in result if row[0]]
|
||||
# Note: product_category field was removed - categories now managed via inventory service
|
||||
# This method should be updated to query categories from inventory service
|
||||
# For now, return empty list to avoid breaking existing code
|
||||
logger.warning("get_product_categories called but product_category field was removed",
|
||||
tenant_id=tenant_id)
|
||||
categories = []
|
||||
|
||||
return sorted(categories)
|
||||
|
||||
@@ -279,15 +282,18 @@ class SalesRepository(BaseRepository[SalesData, SalesDataCreate, SalesDataUpdate
|
||||
async def get_product_statistics(self, tenant_id: str) -> List[Dict[str, Any]]:
|
||||
"""Get product statistics for tenant"""
|
||||
try:
|
||||
stmt = select(SalesData.product_name).where(
|
||||
# Note: product_name field was removed - product info now managed via inventory service
|
||||
# This method should be updated to query products from inventory service
|
||||
# For now, return inventory_product_ids to avoid breaking existing code
|
||||
stmt = select(SalesData.inventory_product_id).where(
|
||||
and_(
|
||||
SalesData.tenant_id == tenant_id,
|
||||
SalesData.product_name.is_not(None)
|
||||
SalesData.inventory_product_id.is_not(None)
|
||||
)
|
||||
).distinct()
|
||||
|
||||
result = await self.session.execute(stmt)
|
||||
products = [row[0] for row in result if row[0]]
|
||||
products = [str(row[0]) for row in result if row[0]]
|
||||
|
||||
return sorted(products)
|
||||
|
||||
|
||||
@@ -53,9 +53,10 @@ class SalesDataCreate(SalesDataBase):
|
||||
|
||||
class SalesDataUpdate(BaseModel):
|
||||
"""Schema for updating sales data"""
|
||||
product_name: Optional[str] = Field(None, min_length=1, max_length=255)
|
||||
product_category: Optional[str] = Field(None, max_length=100)
|
||||
product_sku: Optional[str] = Field(None, max_length=100)
|
||||
# Note: product_name and product_category fields removed - use inventory service for product management
|
||||
# product_name: Optional[str] = Field(None, min_length=1, max_length=255) # DEPRECATED
|
||||
# product_category: Optional[str] = Field(None, max_length=100) # DEPRECATED
|
||||
# product_sku: Optional[str] = Field(None, max_length=100) # DEPRECATED - use inventory service
|
||||
|
||||
quantity_sold: Optional[int] = Field(None, gt=0)
|
||||
unit_price: Optional[Decimal] = Field(None, ge=0)
|
||||
@@ -98,8 +99,10 @@ class SalesDataQuery(BaseModel):
|
||||
"""Schema for sales data queries"""
|
||||
start_date: Optional[datetime] = None
|
||||
end_date: Optional[datetime] = None
|
||||
product_name: Optional[str] = None
|
||||
product_category: Optional[str] = None
|
||||
# Note: product_name and product_category filtering now requires inventory service integration
|
||||
# product_name: Optional[str] = None # DEPRECATED - use inventory_product_id or join with inventory service
|
||||
# product_category: Optional[str] = None # DEPRECATED - use inventory service categories
|
||||
inventory_product_id: Optional[UUID] = None # Filter by specific inventory product ID
|
||||
location_id: Optional[str] = None
|
||||
sales_channel: Optional[str] = None
|
||||
source: Optional[str] = None
|
||||
@@ -136,7 +139,8 @@ class SalesAnalytics(BaseModel):
|
||||
|
||||
class ProductSalesAnalytics(BaseModel):
|
||||
"""Product-specific sales analytics"""
|
||||
product_name: str
|
||||
inventory_product_id: UUID # Reference to inventory service product
|
||||
# Note: product_name can be fetched from inventory service using inventory_product_id
|
||||
total_revenue: Decimal
|
||||
total_quantity: int
|
||||
total_transactions: int
|
||||
|
||||
@@ -1,64 +0,0 @@
|
||||
"""Add inventory product reference and remove redundant product model
|
||||
|
||||
Revision ID: 003
|
||||
Revises: 002
|
||||
Create Date: 2025-01-15 11:00:00.000000
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '003'
|
||||
down_revision = '002'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Add inventory product reference to sales_data table
|
||||
op.add_column('sales_data', sa.Column('inventory_product_id',
|
||||
postgresql.UUID(as_uuid=True), nullable=True))
|
||||
|
||||
# Add product_type column for caching product type from inventory
|
||||
op.add_column('sales_data', sa.Column('product_type',
|
||||
sa.String(20), nullable=True))
|
||||
|
||||
# Create indexes for new columns
|
||||
op.create_index('idx_sales_inventory_product', 'sales_data',
|
||||
['inventory_product_id', 'tenant_id'])
|
||||
op.create_index('idx_sales_product_type', 'sales_data',
|
||||
['product_type', 'tenant_id', 'date'])
|
||||
|
||||
# Drop the redundant products table if it exists
|
||||
op.execute("DROP TABLE IF EXISTS products CASCADE;")
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# Drop new indexes
|
||||
op.drop_index('idx_sales_product_type', table_name='sales_data')
|
||||
op.drop_index('idx_sales_inventory_product', table_name='sales_data')
|
||||
|
||||
# Remove new columns
|
||||
op.drop_column('sales_data', 'product_type')
|
||||
op.drop_column('sales_data', 'inventory_product_id')
|
||||
|
||||
# Recreate products table (basic version)
|
||||
op.create_table(
|
||||
'products',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('name', sa.String(255), nullable=False),
|
||||
sa.Column('sku', sa.String(100), nullable=True),
|
||||
sa.Column('category', sa.String(100), nullable=True),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('is_active', sa.Boolean(), default=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
|
||||
# Recreate basic indexes
|
||||
op.create_index('idx_products_tenant_name', 'products', ['tenant_id', 'name'], unique=True)
|
||||
op.create_index('idx_products_tenant_sku', 'products', ['tenant_id', 'sku'])
|
||||
@@ -1,61 +0,0 @@
|
||||
"""Remove cached product fields - use only inventory_product_id
|
||||
|
||||
Revision ID: 004
|
||||
Revises: 003
|
||||
Create Date: 2025-01-15 12:00:00.000000
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '004'
|
||||
down_revision = '003'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Make inventory_product_id required (NOT NULL)
|
||||
op.alter_column('sales_data', 'inventory_product_id', nullable=False)
|
||||
|
||||
# Remove cached product fields - inventory service is single source of truth
|
||||
op.drop_column('sales_data', 'product_name')
|
||||
op.drop_column('sales_data', 'product_category')
|
||||
op.drop_column('sales_data', 'product_sku')
|
||||
op.drop_column('sales_data', 'product_type')
|
||||
|
||||
# Drop old indexes that referenced removed fields
|
||||
op.execute("DROP INDEX IF EXISTS idx_sales_tenant_product")
|
||||
op.execute("DROP INDEX IF EXISTS idx_sales_tenant_category")
|
||||
op.execute("DROP INDEX IF EXISTS idx_sales_product_date")
|
||||
op.execute("DROP INDEX IF EXISTS idx_sales_sku_date")
|
||||
op.execute("DROP INDEX IF EXISTS idx_sales_product_type")
|
||||
|
||||
# Create optimized indexes for inventory-only approach
|
||||
op.create_index('idx_sales_inventory_product_date', 'sales_data',
|
||||
['inventory_product_id', 'date', 'tenant_id'])
|
||||
op.create_index('idx_sales_tenant_inventory_product', 'sales_data',
|
||||
['tenant_id', 'inventory_product_id'])
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# Drop new indexes
|
||||
op.drop_index('idx_sales_tenant_inventory_product', table_name='sales_data')
|
||||
op.drop_index('idx_sales_inventory_product_date', table_name='sales_data')
|
||||
|
||||
# Add back cached product fields for downgrade compatibility
|
||||
op.add_column('sales_data', sa.Column('product_name', sa.String(255), nullable=True))
|
||||
op.add_column('sales_data', sa.Column('product_category', sa.String(100), nullable=True))
|
||||
op.add_column('sales_data', sa.Column('product_sku', sa.String(100), nullable=True))
|
||||
op.add_column('sales_data', sa.Column('product_type', sa.String(20), nullable=True))
|
||||
|
||||
# Make inventory_product_id optional again
|
||||
op.alter_column('sales_data', 'inventory_product_id', nullable=True)
|
||||
|
||||
# Recreate old indexes
|
||||
op.create_index('idx_sales_tenant_product', 'sales_data', ['tenant_id', 'product_name'])
|
||||
op.create_index('idx_sales_tenant_category', 'sales_data', ['tenant_id', 'product_category'])
|
||||
op.create_index('idx_sales_product_date', 'sales_data', ['product_name', 'date', 'tenant_id'])
|
||||
op.create_index('idx_sales_sku_date', 'sales_data', ['product_sku', 'date', 'tenant_id'])
|
||||
@@ -91,6 +91,7 @@ def sample_sales_data(sample_tenant_id: UUID) -> SalesDataCreate:
|
||||
"""Sample sales data for testing"""
|
||||
return SalesDataCreate(
|
||||
date=datetime.now(timezone.utc),
|
||||
inventory_product_id="550e8400-e29b-41d4-a716-446655440000",
|
||||
product_name="Pan Integral",
|
||||
product_category="Panadería",
|
||||
product_sku="PAN001",
|
||||
@@ -117,6 +118,7 @@ def sample_sales_records(sample_tenant_id: UUID) -> list[dict]:
|
||||
{
|
||||
"tenant_id": sample_tenant_id,
|
||||
"date": base_date,
|
||||
"inventory_product_id": "550e8400-e29b-41d4-a716-446655440001",
|
||||
"product_name": "Croissant",
|
||||
"quantity_sold": 3,
|
||||
"revenue": Decimal("7.50"),
|
||||
@@ -126,6 +128,7 @@ def sample_sales_records(sample_tenant_id: UUID) -> list[dict]:
|
||||
{
|
||||
"tenant_id": sample_tenant_id,
|
||||
"date": base_date,
|
||||
"inventory_product_id": "550e8400-e29b-41d4-a716-446655440002",
|
||||
"product_name": "Café Americano",
|
||||
"quantity_sold": 2,
|
||||
"revenue": Decimal("5.00"),
|
||||
@@ -135,6 +138,7 @@ def sample_sales_records(sample_tenant_id: UUID) -> list[dict]:
|
||||
{
|
||||
"tenant_id": sample_tenant_id,
|
||||
"date": base_date,
|
||||
"inventory_product_id": "550e8400-e29b-41d4-a716-446655440003",
|
||||
"product_name": "Bocadillo Jamón",
|
||||
"quantity_sold": 1,
|
||||
"revenue": Decimal("4.50"),
|
||||
@@ -229,6 +233,7 @@ def performance_test_data(sample_tenant_id: UUID) -> list[dict]:
|
||||
records.append({
|
||||
"tenant_id": sample_tenant_id,
|
||||
"date": base_date,
|
||||
"inventory_product_id": f"550e8400-e29b-41d4-a716-{i:012x}",
|
||||
"product_name": f"Test Product {i % 20}",
|
||||
"quantity_sold": (i % 10) + 1,
|
||||
"revenue": Decimal(str(((i % 10) + 1) * 2.5)),
|
||||
|
||||
@@ -26,7 +26,7 @@ class TestSalesRepository:
|
||||
assert record is not None
|
||||
assert record.id is not None
|
||||
assert record.tenant_id == sample_tenant_id
|
||||
assert record.product_name == sample_sales_data.product_name
|
||||
assert record.inventory_product_id == sample_sales_data.inventory_product_id
|
||||
assert record.quantity_sold == sample_sales_data.quantity_sold
|
||||
assert record.revenue == sample_sales_data.revenue
|
||||
|
||||
@@ -42,7 +42,7 @@ class TestSalesRepository:
|
||||
|
||||
assert retrieved_record is not None
|
||||
assert retrieved_record.id == created_record.id
|
||||
assert retrieved_record.product_name == created_record.product_name
|
||||
assert retrieved_record.inventory_product_id == created_record.inventory_product_id
|
||||
|
||||
async def test_get_by_tenant(self, populated_db, sample_tenant_id):
|
||||
"""Test getting records by tenant"""
|
||||
@@ -57,10 +57,12 @@ class TestSalesRepository:
|
||||
"""Test getting records by product"""
|
||||
repository = SalesRepository(populated_db)
|
||||
|
||||
records = await repository.get_by_product(sample_tenant_id, "Croissant")
|
||||
# Get by inventory_product_id instead of product name
|
||||
test_product_id = "550e8400-e29b-41d4-a716-446655440001"
|
||||
records = await repository.get_by_inventory_product_id(sample_tenant_id, test_product_id)
|
||||
|
||||
assert len(records) == 1
|
||||
assert records[0].product_name == "Croissant"
|
||||
assert records[0].inventory_product_id == test_product_id
|
||||
|
||||
async def test_update_record(self, test_db_session, sample_tenant_id, sample_sales_data):
|
||||
"""Test updating a sales record"""
|
||||
@@ -71,6 +73,7 @@ class TestSalesRepository:
|
||||
|
||||
# Update record
|
||||
update_data = SalesDataUpdate(
|
||||
inventory_product_id="550e8400-e29b-41d4-a716-446655440999",
|
||||
product_name="Updated Product",
|
||||
quantity_sold=10,
|
||||
revenue=Decimal("25.00")
|
||||
@@ -78,7 +81,7 @@ class TestSalesRepository:
|
||||
|
||||
updated_record = await repository.update(created_record.id, update_data.model_dump(exclude_unset=True))
|
||||
|
||||
assert updated_record.product_name == "Updated Product"
|
||||
assert updated_record.inventory_product_id == "550e8400-e29b-41d4-a716-446655440999"
|
||||
assert updated_record.quantity_sold == 10
|
||||
assert updated_record.revenue == Decimal("25.00")
|
||||
|
||||
@@ -137,7 +140,7 @@ class TestSalesRepository:
|
||||
repository = SalesRepository(populated_db)
|
||||
|
||||
query = SalesDataQuery(
|
||||
product_name="Croissant",
|
||||
inventory_product_id="550e8400-e29b-41d4-a716-446655440001",
|
||||
limit=10,
|
||||
offset=0
|
||||
)
|
||||
@@ -145,7 +148,7 @@ class TestSalesRepository:
|
||||
records = await repository.get_by_tenant(sample_tenant_id, query)
|
||||
|
||||
assert len(records) == 1
|
||||
assert records[0].product_name == "Croissant"
|
||||
assert records[0].inventory_product_id == "550e8400-e29b-41d4-a716-446655440001"
|
||||
|
||||
async def test_bulk_create(self, test_db_session, sample_tenant_id):
|
||||
"""Test bulk creating records"""
|
||||
@@ -155,6 +158,7 @@ class TestSalesRepository:
|
||||
bulk_data = [
|
||||
{
|
||||
"date": datetime.now(timezone.utc),
|
||||
"inventory_product_id": f"550e8400-e29b-41d4-a716-{i+100:012x}",
|
||||
"product_name": f"Product {i}",
|
||||
"quantity_sold": i + 1,
|
||||
"revenue": Decimal(str((i + 1) * 2.5)),
|
||||
|
||||
@@ -31,7 +31,7 @@ class TestSalesService:
|
||||
mock_repository = AsyncMock()
|
||||
mock_record = AsyncMock()
|
||||
mock_record.id = uuid4()
|
||||
mock_record.product_name = sample_sales_data.product_name
|
||||
mock_record.inventory_product_id = sample_sales_data.inventory_product_id
|
||||
mock_repository.create_sales_record.return_value = mock_record
|
||||
|
||||
with patch('app.services.sales_service.SalesRepository', return_value=mock_repository):
|
||||
@@ -49,6 +49,7 @@ class TestSalesService:
|
||||
# Create invalid sales data (future date)
|
||||
invalid_data = SalesDataCreate(
|
||||
date=datetime(2030, 1, 1, tzinfo=timezone.utc), # Future date
|
||||
inventory_product_id="550e8400-e29b-41d4-a716-446655440000",
|
||||
product_name="Test Product",
|
||||
quantity_sold=1,
|
||||
revenue=Decimal("5.00")
|
||||
@@ -61,6 +62,7 @@ class TestSalesService:
|
||||
"""Test updating a sales record"""
|
||||
record_id = uuid4()
|
||||
update_data = SalesDataUpdate(
|
||||
inventory_product_id="550e8400-e29b-41d4-a716-446655440999",
|
||||
product_name="Updated Product",
|
||||
quantity_sold=10
|
||||
)
|
||||
@@ -78,7 +80,7 @@ class TestSalesService:
|
||||
|
||||
# Mock updated record
|
||||
mock_updated = AsyncMock()
|
||||
mock_updated.product_name = "Updated Product"
|
||||
mock_updated.inventory_product_id = "550e8400-e29b-41d4-a716-446655440999"
|
||||
mock_repository.update.return_value = mock_updated
|
||||
|
||||
with patch('app.services.sales_service.SalesRepository', return_value=mock_repository):
|
||||
@@ -88,13 +90,13 @@ class TestSalesService:
|
||||
sample_tenant_id
|
||||
)
|
||||
|
||||
assert result.product_name == "Updated Product"
|
||||
assert result.inventory_product_id == "550e8400-e29b-41d4-a716-446655440999"
|
||||
mock_repository.update.assert_called_once()
|
||||
|
||||
async def test_update_nonexistent_record(self, sales_service, sample_tenant_id):
|
||||
"""Test updating a non-existent record"""
|
||||
record_id = uuid4()
|
||||
update_data = SalesDataUpdate(product_name="Updated Product")
|
||||
update_data = SalesDataUpdate(inventory_product_id="550e8400-e29b-41d4-a716-446655440999", product_name="Updated Product")
|
||||
|
||||
with patch('app.services.sales_service.get_db_transaction') as mock_get_db:
|
||||
mock_db = AsyncMock()
|
||||
@@ -195,7 +197,7 @@ class TestSalesService:
|
||||
|
||||
async def test_get_product_sales(self, sales_service, sample_tenant_id):
|
||||
"""Test getting sales for specific product"""
|
||||
product_name = "Test Product"
|
||||
inventory_product_id = "550e8400-e29b-41d4-a716-446655440000"
|
||||
|
||||
with patch('app.services.sales_service.get_db_transaction') as mock_get_db:
|
||||
mock_db = AsyncMock()
|
||||
@@ -206,7 +208,7 @@ class TestSalesService:
|
||||
mock_repository.get_by_product.return_value = mock_records
|
||||
|
||||
with patch('app.services.sales_service.SalesRepository', return_value=mock_repository):
|
||||
result = await sales_service.get_product_sales(sample_tenant_id, product_name)
|
||||
result = await sales_service.get_product_sales(sample_tenant_id, inventory_product_id)
|
||||
|
||||
assert len(result) == 2
|
||||
mock_repository.get_by_product.assert_called_once()
|
||||
@@ -268,6 +270,7 @@ class TestSalesService:
|
||||
# Test revenue mismatch detection
|
||||
sales_data = SalesDataCreate(
|
||||
date=datetime.now(timezone.utc),
|
||||
inventory_product_id="550e8400-e29b-41d4-a716-446655440000",
|
||||
product_name="Test Product",
|
||||
quantity_sold=5,
|
||||
unit_price=Decimal("2.00"),
|
||||
|
||||
@@ -464,47 +464,47 @@ async def get_orders_by_supplier(
|
||||
raise HTTPException(status_code=500, detail="Failed to retrieve orders by supplier")
|
||||
|
||||
|
||||
@router.get("/ingredients/{ingredient_id}/history")
|
||||
async def get_ingredient_purchase_history(
|
||||
ingredient_id: UUID = Path(..., description="Ingredient ID"),
|
||||
@router.get("/inventory-products/{inventory_product_id}/history")
|
||||
async def get_inventory_product_purchase_history(
|
||||
inventory_product_id: UUID = Path(..., description="Inventory Product ID"),
|
||||
days_back: int = Query(90, ge=1, le=365, description="Number of days to look back"),
|
||||
current_user: UserInfo = Depends(get_current_user),
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
"""Get purchase history for a specific ingredient"""
|
||||
"""Get purchase history for a specific inventory product"""
|
||||
require_permissions(current_user, ["purchase_orders:read"])
|
||||
|
||||
try:
|
||||
service = PurchaseOrderService(db)
|
||||
history = await service.get_ingredient_purchase_history(
|
||||
history = await service.get_inventory_product_purchase_history(
|
||||
tenant_id=current_user.tenant_id,
|
||||
ingredient_id=ingredient_id,
|
||||
inventory_product_id=inventory_product_id,
|
||||
days_back=days_back
|
||||
)
|
||||
return history
|
||||
except Exception as e:
|
||||
logger.error("Error getting ingredient purchase history", ingredient_id=str(ingredient_id), error=str(e))
|
||||
raise HTTPException(status_code=500, detail="Failed to retrieve ingredient purchase history")
|
||||
logger.error("Error getting inventory product purchase history", inventory_product_id=str(inventory_product_id), error=str(e))
|
||||
raise HTTPException(status_code=500, detail="Failed to retrieve inventory product purchase history")
|
||||
|
||||
|
||||
@router.get("/ingredients/top-purchased")
|
||||
async def get_top_purchased_ingredients(
|
||||
@router.get("/inventory-products/top-purchased")
|
||||
async def get_top_purchased_inventory_products(
|
||||
days_back: int = Query(30, ge=1, le=365, description="Number of days to look back"),
|
||||
limit: int = Query(10, ge=1, le=50, description="Number of top ingredients to return"),
|
||||
limit: int = Query(10, ge=1, le=50, description="Number of top inventory products to return"),
|
||||
current_user: UserInfo = Depends(get_current_user),
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
"""Get most purchased ingredients by value"""
|
||||
"""Get most purchased inventory products by value"""
|
||||
require_permissions(current_user, ["purchase_orders:read"])
|
||||
|
||||
try:
|
||||
service = PurchaseOrderService(db)
|
||||
ingredients = await service.get_top_purchased_ingredients(
|
||||
products = await service.get_top_purchased_inventory_products(
|
||||
tenant_id=current_user.tenant_id,
|
||||
days_back=days_back,
|
||||
limit=limit
|
||||
)
|
||||
return ingredients
|
||||
return products
|
||||
except Exception as e:
|
||||
logger.error("Error getting top purchased ingredients", error=str(e))
|
||||
raise HTTPException(status_code=500, detail="Failed to retrieve top purchased ingredients")
|
||||
logger.error("Error getting top purchased inventory products", error=str(e))
|
||||
raise HTTPException(status_code=500, detail="Failed to retrieve top purchased inventory products")
|
||||
@@ -186,9 +186,8 @@ class SupplierPriceList(Base):
|
||||
supplier_id = Column(UUID(as_uuid=True), ForeignKey('suppliers.id'), nullable=False, index=True)
|
||||
|
||||
# Product identification (references inventory service)
|
||||
ingredient_id = Column(UUID(as_uuid=True), nullable=False, index=True) # Reference to inventory.ingredients
|
||||
inventory_product_id = Column(UUID(as_uuid=True), nullable=False, index=True) # Reference to inventory products
|
||||
product_code = Column(String(100), nullable=True) # Supplier's product code
|
||||
product_name = Column(String(255), nullable=False)
|
||||
|
||||
# Pricing information
|
||||
unit_price = Column(Numeric(10, 4), nullable=False)
|
||||
@@ -228,7 +227,7 @@ class SupplierPriceList(Base):
|
||||
# Indexes
|
||||
__table_args__ = (
|
||||
Index('ix_price_lists_tenant_supplier', 'tenant_id', 'supplier_id'),
|
||||
Index('ix_price_lists_ingredient', 'ingredient_id'),
|
||||
Index('ix_price_lists_inventory_product', 'inventory_product_id'),
|
||||
Index('ix_price_lists_active', 'is_active'),
|
||||
Index('ix_price_lists_effective_date', 'effective_date'),
|
||||
)
|
||||
@@ -317,9 +316,8 @@ class PurchaseOrderItem(Base):
|
||||
price_list_item_id = Column(UUID(as_uuid=True), ForeignKey('supplier_price_lists.id'), nullable=True, index=True)
|
||||
|
||||
# Product identification
|
||||
ingredient_id = Column(UUID(as_uuid=True), nullable=False, index=True) # Reference to inventory.ingredients
|
||||
inventory_product_id = Column(UUID(as_uuid=True), nullable=False, index=True) # Reference to inventory products
|
||||
product_code = Column(String(100), nullable=True) # Supplier's product code
|
||||
product_name = Column(String(255), nullable=False)
|
||||
|
||||
# Order quantities
|
||||
ordered_quantity = Column(Integer, nullable=False)
|
||||
@@ -347,7 +345,7 @@ class PurchaseOrderItem(Base):
|
||||
# Indexes
|
||||
__table_args__ = (
|
||||
Index('ix_po_items_tenant_po', 'tenant_id', 'purchase_order_id'),
|
||||
Index('ix_po_items_ingredient', 'ingredient_id'),
|
||||
Index('ix_po_items_inventory_product', 'inventory_product_id'),
|
||||
)
|
||||
|
||||
|
||||
@@ -421,8 +419,7 @@ class DeliveryItem(Base):
|
||||
purchase_order_item_id = Column(UUID(as_uuid=True), ForeignKey('purchase_order_items.id'), nullable=False, index=True)
|
||||
|
||||
# Product identification
|
||||
ingredient_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
product_name = Column(String(255), nullable=False)
|
||||
inventory_product_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
|
||||
# Delivery quantities
|
||||
ordered_quantity = Column(Integer, nullable=False)
|
||||
@@ -451,7 +448,7 @@ class DeliveryItem(Base):
|
||||
# Indexes
|
||||
__table_args__ = (
|
||||
Index('ix_delivery_items_tenant_delivery', 'tenant_id', 'delivery_id'),
|
||||
Index('ix_delivery_items_ingredient', 'ingredient_id'),
|
||||
Index('ix_delivery_items_inventory_product', 'inventory_product_id'),
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -28,19 +28,19 @@ class PurchaseOrderItemRepository(BaseRepository[PurchaseOrderItem]):
|
||||
.all()
|
||||
)
|
||||
|
||||
def get_by_ingredient(
|
||||
def get_by_inventory_product(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
ingredient_id: UUID,
|
||||
inventory_product_id: UUID,
|
||||
limit: int = 20
|
||||
) -> List[PurchaseOrderItem]:
|
||||
"""Get recent order items for a specific ingredient"""
|
||||
"""Get recent order items for a specific inventory product"""
|
||||
return (
|
||||
self.db.query(self.model)
|
||||
.filter(
|
||||
and_(
|
||||
self.model.tenant_id == tenant_id,
|
||||
self.model.ingredient_id == ingredient_id
|
||||
self.model.inventory_product_id == inventory_product_id
|
||||
)
|
||||
)
|
||||
.order_by(self.model.created_at.desc())
|
||||
@@ -103,7 +103,7 @@ class PurchaseOrderItemRepository(BaseRepository[PurchaseOrderItem]):
|
||||
def get_pending_receipt_items(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
ingredient_id: Optional[UUID] = None
|
||||
inventory_product_id: Optional[UUID] = None
|
||||
) -> List[PurchaseOrderItem]:
|
||||
"""Get items pending receipt (not yet delivered)"""
|
||||
query = (
|
||||
@@ -116,8 +116,8 @@ class PurchaseOrderItemRepository(BaseRepository[PurchaseOrderItem]):
|
||||
)
|
||||
)
|
||||
|
||||
if ingredient_id:
|
||||
query = query.filter(self.model.ingredient_id == ingredient_id)
|
||||
if inventory_product_id:
|
||||
query = query.filter(self.model.inventory_product_id == inventory_product_id)
|
||||
|
||||
return query.order_by(self.model.created_at).all()
|
||||
|
||||
@@ -134,13 +134,13 @@ class PurchaseOrderItemRepository(BaseRepository[PurchaseOrderItem]):
|
||||
self.db.refresh(item)
|
||||
return item
|
||||
|
||||
def get_ingredient_purchase_history(
|
||||
def get_inventory_product_purchase_history(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
ingredient_id: UUID,
|
||||
inventory_product_id: UUID,
|
||||
days_back: int = 90
|
||||
) -> Dict[str, Any]:
|
||||
"""Get purchase history and analytics for an ingredient"""
|
||||
"""Get purchase history and analytics for an inventory product"""
|
||||
from datetime import timedelta
|
||||
|
||||
cutoff_date = datetime.utcnow() - timedelta(days=days_back)
|
||||
@@ -151,7 +151,7 @@ class PurchaseOrderItemRepository(BaseRepository[PurchaseOrderItem]):
|
||||
.filter(
|
||||
and_(
|
||||
self.model.tenant_id == tenant_id,
|
||||
self.model.ingredient_id == ingredient_id,
|
||||
self.model.inventory_product_id == inventory_product_id,
|
||||
self.model.created_at >= cutoff_date
|
||||
)
|
||||
)
|
||||
@@ -202,22 +202,21 @@ class PurchaseOrderItemRepository(BaseRepository[PurchaseOrderItem]):
|
||||
"price_trend": price_trend
|
||||
}
|
||||
|
||||
def get_top_purchased_ingredients(
|
||||
def get_top_purchased_inventory_products(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
days_back: int = 30,
|
||||
limit: int = 10
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Get most purchased ingredients by quantity or value"""
|
||||
"""Get most purchased inventory products by quantity or value"""
|
||||
from datetime import timedelta
|
||||
|
||||
cutoff_date = datetime.utcnow() - timedelta(days=days_back)
|
||||
|
||||
# Group by ingredient and calculate totals
|
||||
# Group by inventory product and calculate totals
|
||||
results = (
|
||||
self.db.query(
|
||||
self.model.ingredient_id,
|
||||
self.model.product_name,
|
||||
self.model.inventory_product_id,
|
||||
self.model.unit_of_measure,
|
||||
func.sum(self.model.ordered_quantity).label('total_quantity'),
|
||||
func.sum(self.model.line_total).label('total_amount'),
|
||||
@@ -231,8 +230,7 @@ class PurchaseOrderItemRepository(BaseRepository[PurchaseOrderItem]):
|
||||
)
|
||||
)
|
||||
.group_by(
|
||||
self.model.ingredient_id,
|
||||
self.model.product_name,
|
||||
self.model.inventory_product_id,
|
||||
self.model.unit_of_measure
|
||||
)
|
||||
.order_by(func.sum(self.model.line_total).desc())
|
||||
@@ -242,8 +240,7 @@ class PurchaseOrderItemRepository(BaseRepository[PurchaseOrderItem]):
|
||||
|
||||
return [
|
||||
{
|
||||
"ingredient_id": str(row.ingredient_id),
|
||||
"product_name": row.product_name,
|
||||
"inventory_product_id": str(row.inventory_product_id),
|
||||
"unit_of_measure": row.unit_of_measure,
|
||||
"total_quantity": int(row.total_quantity),
|
||||
"total_amount": round(float(row.total_amount), 2),
|
||||
|
||||
@@ -186,9 +186,8 @@ class SupplierSummary(BaseModel):
|
||||
|
||||
class PurchaseOrderItemCreate(BaseModel):
|
||||
"""Schema for creating purchase order items"""
|
||||
ingredient_id: UUID
|
||||
inventory_product_id: UUID
|
||||
product_code: Optional[str] = Field(None, max_length=100)
|
||||
product_name: str = Field(..., min_length=1, max_length=255)
|
||||
ordered_quantity: int = Field(..., gt=0)
|
||||
unit_of_measure: str = Field(..., max_length=20)
|
||||
unit_price: Decimal = Field(..., gt=0)
|
||||
@@ -210,9 +209,8 @@ class PurchaseOrderItemResponse(BaseModel):
|
||||
tenant_id: UUID
|
||||
purchase_order_id: UUID
|
||||
price_list_item_id: Optional[UUID] = None
|
||||
ingredient_id: UUID
|
||||
inventory_product_id: UUID
|
||||
product_code: Optional[str] = None
|
||||
product_name: str
|
||||
ordered_quantity: int
|
||||
unit_of_measure: str
|
||||
unit_price: Decimal
|
||||
@@ -376,8 +374,7 @@ class PurchaseOrderSummary(BaseModel):
|
||||
class DeliveryItemCreate(BaseModel):
|
||||
"""Schema for creating delivery items"""
|
||||
purchase_order_item_id: UUID
|
||||
ingredient_id: UUID
|
||||
product_name: str = Field(..., min_length=1, max_length=255)
|
||||
inventory_product_id: UUID
|
||||
ordered_quantity: int = Field(..., gt=0)
|
||||
delivered_quantity: int = Field(..., ge=0)
|
||||
accepted_quantity: int = Field(..., ge=0)
|
||||
@@ -400,8 +397,7 @@ class DeliveryItemResponse(BaseModel):
|
||||
tenant_id: UUID
|
||||
delivery_id: UUID
|
||||
purchase_order_item_id: UUID
|
||||
ingredient_id: UUID
|
||||
product_name: str
|
||||
inventory_product_id: UUID
|
||||
ordered_quantity: int
|
||||
delivered_quantity: int
|
||||
accepted_quantity: int
|
||||
|
||||
@@ -444,24 +444,24 @@ class PurchaseOrderService:
|
||||
|
||||
return to_status in valid_transitions.get(from_status, [])
|
||||
|
||||
async def get_ingredient_purchase_history(
|
||||
async def get_inventory_product_purchase_history(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
ingredient_id: UUID,
|
||||
inventory_product_id: UUID,
|
||||
days_back: int = 90
|
||||
) -> Dict[str, Any]:
|
||||
"""Get purchase history for an ingredient"""
|
||||
return self.item_repository.get_ingredient_purchase_history(
|
||||
tenant_id, ingredient_id, days_back
|
||||
"""Get purchase history for an inventory product"""
|
||||
return self.item_repository.get_inventory_product_purchase_history(
|
||||
tenant_id, inventory_product_id, days_back
|
||||
)
|
||||
|
||||
async def get_top_purchased_ingredients(
|
||||
async def get_top_purchased_inventory_products(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
days_back: int = 30,
|
||||
limit: int = 10
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Get most purchased ingredients"""
|
||||
return self.item_repository.get_top_purchased_ingredients(
|
||||
"""Get most purchased inventory products"""
|
||||
return self.item_repository.get_top_purchased_inventory_products(
|
||||
tenant_id, days_back, limit
|
||||
)
|
||||
@@ -1,404 +0,0 @@
|
||||
"""Initial supplier and procurement tables
|
||||
|
||||
Revision ID: 001_initial_supplier_tables
|
||||
Revises:
|
||||
Create Date: 2024-01-15 10:00:00.000000
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects.postgresql import UUID, JSONB
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '001_initial_supplier_tables'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Create suppliers table
|
||||
op.create_table('suppliers',
|
||||
sa.Column('id', UUID(as_uuid=True), nullable=False, primary_key=True),
|
||||
sa.Column('tenant_id', UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('name', sa.String(255), nullable=False),
|
||||
sa.Column('supplier_code', sa.String(50), nullable=True),
|
||||
sa.Column('tax_id', sa.String(50), nullable=True),
|
||||
sa.Column('registration_number', sa.String(100), nullable=True),
|
||||
sa.Column('supplier_type', sa.Enum('INGREDIENTS', 'PACKAGING', 'EQUIPMENT', 'SERVICES', 'UTILITIES', 'MULTI', name='suppliertype'), nullable=False),
|
||||
sa.Column('status', sa.Enum('ACTIVE', 'INACTIVE', 'PENDING_APPROVAL', 'SUSPENDED', 'BLACKLISTED', name='supplierstatus'), nullable=False, default='PENDING_APPROVAL'),
|
||||
sa.Column('contact_person', sa.String(200), nullable=True),
|
||||
sa.Column('email', sa.String(254), nullable=True),
|
||||
sa.Column('phone', sa.String(30), nullable=True),
|
||||
sa.Column('mobile', sa.String(30), nullable=True),
|
||||
sa.Column('website', sa.String(255), nullable=True),
|
||||
sa.Column('address_line1', sa.String(255), nullable=True),
|
||||
sa.Column('address_line2', sa.String(255), nullable=True),
|
||||
sa.Column('city', sa.String(100), nullable=True),
|
||||
sa.Column('state_province', sa.String(100), nullable=True),
|
||||
sa.Column('postal_code', sa.String(20), nullable=True),
|
||||
sa.Column('country', sa.String(100), nullable=True),
|
||||
sa.Column('payment_terms', sa.Enum('CASH_ON_DELIVERY', 'NET_15', 'NET_30', 'NET_45', 'NET_60', 'PREPAID', 'CREDIT_TERMS', name='paymentterms'), nullable=False, default='NET_30'),
|
||||
sa.Column('credit_limit', sa.Numeric(12, 2), nullable=True),
|
||||
sa.Column('currency', sa.String(3), nullable=False, default='EUR'),
|
||||
sa.Column('standard_lead_time', sa.Integer(), nullable=False, default=3),
|
||||
sa.Column('minimum_order_amount', sa.Numeric(10, 2), nullable=True),
|
||||
sa.Column('delivery_area', sa.String(255), nullable=True),
|
||||
sa.Column('quality_rating', sa.Float(), nullable=True, default=0.0),
|
||||
sa.Column('delivery_rating', sa.Float(), nullable=True, default=0.0),
|
||||
sa.Column('total_orders', sa.Integer(), nullable=False, default=0),
|
||||
sa.Column('total_amount', sa.Numeric(12, 2), nullable=False, default=0.0),
|
||||
sa.Column('approved_by', UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('approved_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('rejection_reason', sa.Text(), nullable=True),
|
||||
sa.Column('notes', sa.Text(), nullable=True),
|
||||
sa.Column('certifications', JSONB, nullable=True),
|
||||
sa.Column('business_hours', JSONB, nullable=True),
|
||||
sa.Column('specializations', JSONB, nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('created_by', UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('updated_by', UUID(as_uuid=True), nullable=False)
|
||||
)
|
||||
|
||||
# Create supplier_price_lists table
|
||||
op.create_table('supplier_price_lists',
|
||||
sa.Column('id', UUID(as_uuid=True), nullable=False, primary_key=True),
|
||||
sa.Column('tenant_id', UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('supplier_id', UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('ingredient_id', UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('product_code', sa.String(100), nullable=True),
|
||||
sa.Column('product_name', sa.String(255), nullable=False),
|
||||
sa.Column('unit_price', sa.Numeric(10, 4), nullable=False),
|
||||
sa.Column('unit_of_measure', sa.String(20), nullable=False),
|
||||
sa.Column('minimum_order_quantity', sa.Integer(), nullable=True, default=1),
|
||||
sa.Column('price_per_unit', sa.Numeric(10, 4), nullable=False),
|
||||
sa.Column('tier_pricing', JSONB, nullable=True),
|
||||
sa.Column('effective_date', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('expiry_date', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('is_active', sa.Boolean(), nullable=False, default=True),
|
||||
sa.Column('brand', sa.String(100), nullable=True),
|
||||
sa.Column('packaging_size', sa.String(50), nullable=True),
|
||||
sa.Column('origin_country', sa.String(100), nullable=True),
|
||||
sa.Column('shelf_life_days', sa.Integer(), nullable=True),
|
||||
sa.Column('storage_requirements', sa.Text(), nullable=True),
|
||||
sa.Column('quality_specs', JSONB, nullable=True),
|
||||
sa.Column('allergens', JSONB, nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('created_by', UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('updated_by', UUID(as_uuid=True), nullable=False),
|
||||
sa.ForeignKeyConstraint(['supplier_id'], ['suppliers.id'])
|
||||
)
|
||||
|
||||
# Create purchase_orders table
|
||||
op.create_table('purchase_orders',
|
||||
sa.Column('id', UUID(as_uuid=True), nullable=False, primary_key=True),
|
||||
sa.Column('tenant_id', UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('supplier_id', UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('po_number', sa.String(50), nullable=False),
|
||||
sa.Column('reference_number', sa.String(100), nullable=True),
|
||||
sa.Column('status', sa.Enum('DRAFT', 'PENDING_APPROVAL', 'APPROVED', 'SENT_TO_SUPPLIER', 'CONFIRMED', 'PARTIALLY_RECEIVED', 'COMPLETED', 'CANCELLED', 'DISPUTED', name='purchaseorderstatus'), nullable=False, default='DRAFT'),
|
||||
sa.Column('priority', sa.String(20), nullable=False, default='normal'),
|
||||
sa.Column('order_date', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('required_delivery_date', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('estimated_delivery_date', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('subtotal', sa.Numeric(12, 2), nullable=False, default=0.0),
|
||||
sa.Column('tax_amount', sa.Numeric(12, 2), nullable=False, default=0.0),
|
||||
sa.Column('shipping_cost', sa.Numeric(10, 2), nullable=False, default=0.0),
|
||||
sa.Column('discount_amount', sa.Numeric(10, 2), nullable=False, default=0.0),
|
||||
sa.Column('total_amount', sa.Numeric(12, 2), nullable=False, default=0.0),
|
||||
sa.Column('currency', sa.String(3), nullable=False, default='EUR'),
|
||||
sa.Column('delivery_address', sa.Text(), nullable=True),
|
||||
sa.Column('delivery_instructions', sa.Text(), nullable=True),
|
||||
sa.Column('delivery_contact', sa.String(200), nullable=True),
|
||||
sa.Column('delivery_phone', sa.String(30), nullable=True),
|
||||
sa.Column('requires_approval', sa.Boolean(), nullable=False, default=False),
|
||||
sa.Column('approved_by', UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('approved_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('rejection_reason', sa.Text(), nullable=True),
|
||||
sa.Column('sent_to_supplier_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('supplier_confirmation_date', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('supplier_reference', sa.String(100), nullable=True),
|
||||
sa.Column('notes', sa.Text(), nullable=True),
|
||||
sa.Column('internal_notes', sa.Text(), nullable=True),
|
||||
sa.Column('terms_and_conditions', sa.Text(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('created_by', UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('updated_by', UUID(as_uuid=True), nullable=False),
|
||||
sa.ForeignKeyConstraint(['supplier_id'], ['suppliers.id'])
|
||||
)
|
||||
|
||||
# Create purchase_order_items table
|
||||
op.create_table('purchase_order_items',
|
||||
sa.Column('id', UUID(as_uuid=True), nullable=False, primary_key=True),
|
||||
sa.Column('tenant_id', UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('purchase_order_id', UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('price_list_item_id', UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('ingredient_id', UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('product_code', sa.String(100), nullable=True),
|
||||
sa.Column('product_name', sa.String(255), nullable=False),
|
||||
sa.Column('ordered_quantity', sa.Integer(), nullable=False),
|
||||
sa.Column('unit_of_measure', sa.String(20), nullable=False),
|
||||
sa.Column('unit_price', sa.Numeric(10, 4), nullable=False),
|
||||
sa.Column('line_total', sa.Numeric(12, 2), nullable=False),
|
||||
sa.Column('received_quantity', sa.Integer(), nullable=False, default=0),
|
||||
sa.Column('remaining_quantity', sa.Integer(), nullable=False, default=0),
|
||||
sa.Column('quality_requirements', sa.Text(), nullable=True),
|
||||
sa.Column('item_notes', sa.Text(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.ForeignKeyConstraint(['purchase_order_id'], ['purchase_orders.id']),
|
||||
sa.ForeignKeyConstraint(['price_list_item_id'], ['supplier_price_lists.id'])
|
||||
)
|
||||
|
||||
# Create deliveries table
|
||||
op.create_table('deliveries',
|
||||
sa.Column('id', UUID(as_uuid=True), nullable=False, primary_key=True),
|
||||
sa.Column('tenant_id', UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('purchase_order_id', UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('supplier_id', UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('delivery_number', sa.String(50), nullable=False),
|
||||
sa.Column('supplier_delivery_note', sa.String(100), nullable=True),
|
||||
sa.Column('status', sa.Enum('SCHEDULED', 'IN_TRANSIT', 'OUT_FOR_DELIVERY', 'DELIVERED', 'PARTIALLY_DELIVERED', 'FAILED_DELIVERY', 'RETURNED', name='deliverystatus'), nullable=False, default='SCHEDULED'),
|
||||
sa.Column('scheduled_date', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('estimated_arrival', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('actual_arrival', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('completed_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('delivery_address', sa.Text(), nullable=True),
|
||||
sa.Column('delivery_contact', sa.String(200), nullable=True),
|
||||
sa.Column('delivery_phone', sa.String(30), nullable=True),
|
||||
sa.Column('carrier_name', sa.String(200), nullable=True),
|
||||
sa.Column('tracking_number', sa.String(100), nullable=True),
|
||||
sa.Column('inspection_passed', sa.Boolean(), nullable=True),
|
||||
sa.Column('inspection_notes', sa.Text(), nullable=True),
|
||||
sa.Column('quality_issues', JSONB, nullable=True),
|
||||
sa.Column('received_by', UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('received_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('notes', sa.Text(), nullable=True),
|
||||
sa.Column('photos', JSONB, nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('created_by', UUID(as_uuid=True), nullable=False),
|
||||
sa.ForeignKeyConstraint(['purchase_order_id'], ['purchase_orders.id']),
|
||||
sa.ForeignKeyConstraint(['supplier_id'], ['suppliers.id'])
|
||||
)
|
||||
|
||||
# Create delivery_items table
|
||||
op.create_table('delivery_items',
|
||||
sa.Column('id', UUID(as_uuid=True), nullable=False, primary_key=True),
|
||||
sa.Column('tenant_id', UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('delivery_id', UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('purchase_order_item_id', UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('ingredient_id', UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('product_name', sa.String(255), nullable=False),
|
||||
sa.Column('ordered_quantity', sa.Integer(), nullable=False),
|
||||
sa.Column('delivered_quantity', sa.Integer(), nullable=False),
|
||||
sa.Column('accepted_quantity', sa.Integer(), nullable=False),
|
||||
sa.Column('rejected_quantity', sa.Integer(), nullable=False, default=0),
|
||||
sa.Column('batch_lot_number', sa.String(100), nullable=True),
|
||||
sa.Column('expiry_date', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('quality_grade', sa.String(20), nullable=True),
|
||||
sa.Column('quality_issues', sa.Text(), nullable=True),
|
||||
sa.Column('rejection_reason', sa.Text(), nullable=True),
|
||||
sa.Column('item_notes', sa.Text(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.ForeignKeyConstraint(['delivery_id'], ['deliveries.id']),
|
||||
sa.ForeignKeyConstraint(['purchase_order_item_id'], ['purchase_order_items.id'])
|
||||
)
|
||||
|
||||
# Create supplier_quality_reviews table
|
||||
op.create_table('supplier_quality_reviews',
|
||||
sa.Column('id', UUID(as_uuid=True), nullable=False, primary_key=True),
|
||||
sa.Column('tenant_id', UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('supplier_id', UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('purchase_order_id', UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('delivery_id', UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('review_date', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('review_type', sa.String(50), nullable=False),
|
||||
sa.Column('quality_rating', sa.Enum('EXCELLENT', 'GOOD', 'AVERAGE', 'POOR', 'VERY_POOR', name='qualityrating'), nullable=False),
|
||||
sa.Column('delivery_rating', sa.Enum('EXCELLENT', 'GOOD', 'AVERAGE', 'POOR', 'VERY_POOR', name='deliveryrating'), nullable=False),
|
||||
sa.Column('communication_rating', sa.Integer(), nullable=False),
|
||||
sa.Column('overall_rating', sa.Float(), nullable=False),
|
||||
sa.Column('quality_comments', sa.Text(), nullable=True),
|
||||
sa.Column('delivery_comments', sa.Text(), nullable=True),
|
||||
sa.Column('communication_comments', sa.Text(), nullable=True),
|
||||
sa.Column('improvement_suggestions', sa.Text(), nullable=True),
|
||||
sa.Column('quality_issues', JSONB, nullable=True),
|
||||
sa.Column('corrective_actions', sa.Text(), nullable=True),
|
||||
sa.Column('follow_up_required', sa.Boolean(), nullable=False, default=False),
|
||||
sa.Column('follow_up_date', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('is_final', sa.Boolean(), nullable=False, default=True),
|
||||
sa.Column('approved_by', UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('reviewed_by', UUID(as_uuid=True), nullable=False),
|
||||
sa.ForeignKeyConstraint(['supplier_id'], ['suppliers.id']),
|
||||
sa.ForeignKeyConstraint(['purchase_order_id'], ['purchase_orders.id']),
|
||||
sa.ForeignKeyConstraint(['delivery_id'], ['deliveries.id'])
|
||||
)
|
||||
|
||||
# Create supplier_invoices table
|
||||
op.create_table('supplier_invoices',
|
||||
sa.Column('id', UUID(as_uuid=True), nullable=False, primary_key=True),
|
||||
sa.Column('tenant_id', UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('supplier_id', UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('purchase_order_id', UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('invoice_number', sa.String(50), nullable=False),
|
||||
sa.Column('supplier_invoice_number', sa.String(100), nullable=False),
|
||||
sa.Column('status', sa.Enum('PENDING', 'APPROVED', 'PAID', 'OVERDUE', 'DISPUTED', 'CANCELLED', name='invoicestatus'), nullable=False, default='PENDING'),
|
||||
sa.Column('invoice_date', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('due_date', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('received_date', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('subtotal', sa.Numeric(12, 2), nullable=False),
|
||||
sa.Column('tax_amount', sa.Numeric(12, 2), nullable=False, default=0.0),
|
||||
sa.Column('shipping_cost', sa.Numeric(10, 2), nullable=False, default=0.0),
|
||||
sa.Column('discount_amount', sa.Numeric(10, 2), nullable=False, default=0.0),
|
||||
sa.Column('total_amount', sa.Numeric(12, 2), nullable=False),
|
||||
sa.Column('currency', sa.String(3), nullable=False, default='EUR'),
|
||||
sa.Column('paid_amount', sa.Numeric(12, 2), nullable=False, default=0.0),
|
||||
sa.Column('payment_date', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('payment_reference', sa.String(100), nullable=True),
|
||||
sa.Column('approved_by', UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('approved_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('rejection_reason', sa.Text(), nullable=True),
|
||||
sa.Column('notes', sa.Text(), nullable=True),
|
||||
sa.Column('invoice_document_url', sa.String(500), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('created_by', UUID(as_uuid=True), nullable=False),
|
||||
sa.ForeignKeyConstraint(['supplier_id'], ['suppliers.id']),
|
||||
sa.ForeignKeyConstraint(['purchase_order_id'], ['purchase_orders.id'])
|
||||
)
|
||||
|
||||
# Create indexes
|
||||
op.create_index('ix_suppliers_tenant_id', 'suppliers', ['tenant_id'])
|
||||
op.create_index('ix_suppliers_name', 'suppliers', ['name'])
|
||||
op.create_index('ix_suppliers_tenant_name', 'suppliers', ['tenant_id', 'name'])
|
||||
op.create_index('ix_suppliers_tenant_status', 'suppliers', ['tenant_id', 'status'])
|
||||
op.create_index('ix_suppliers_tenant_type', 'suppliers', ['tenant_id', 'supplier_type'])
|
||||
op.create_index('ix_suppliers_quality_rating', 'suppliers', ['quality_rating'])
|
||||
op.create_index('ix_suppliers_status', 'suppliers', ['status'])
|
||||
op.create_index('ix_suppliers_supplier_type', 'suppliers', ['supplier_type'])
|
||||
|
||||
op.create_index('ix_price_lists_tenant_id', 'supplier_price_lists', ['tenant_id'])
|
||||
op.create_index('ix_price_lists_supplier_id', 'supplier_price_lists', ['supplier_id'])
|
||||
op.create_index('ix_price_lists_tenant_supplier', 'supplier_price_lists', ['tenant_id', 'supplier_id'])
|
||||
op.create_index('ix_price_lists_ingredient', 'supplier_price_lists', ['ingredient_id'])
|
||||
op.create_index('ix_price_lists_active', 'supplier_price_lists', ['is_active'])
|
||||
op.create_index('ix_price_lists_effective_date', 'supplier_price_lists', ['effective_date'])
|
||||
|
||||
op.create_index('ix_purchase_orders_tenant_id', 'purchase_orders', ['tenant_id'])
|
||||
op.create_index('ix_purchase_orders_supplier_id', 'purchase_orders', ['supplier_id'])
|
||||
op.create_index('ix_purchase_orders_tenant_supplier', 'purchase_orders', ['tenant_id', 'supplier_id'])
|
||||
op.create_index('ix_purchase_orders_tenant_status', 'purchase_orders', ['tenant_id', 'status'])
|
||||
op.create_index('ix_purchase_orders_po_number', 'purchase_orders', ['po_number'])
|
||||
op.create_index('ix_purchase_orders_order_date', 'purchase_orders', ['order_date'])
|
||||
op.create_index('ix_purchase_orders_delivery_date', 'purchase_orders', ['required_delivery_date'])
|
||||
op.create_index('ix_purchase_orders_status', 'purchase_orders', ['status'])
|
||||
|
||||
op.create_index('ix_po_items_tenant_id', 'purchase_order_items', ['tenant_id'])
|
||||
op.create_index('ix_po_items_purchase_order_id', 'purchase_order_items', ['purchase_order_id'])
|
||||
op.create_index('ix_po_items_tenant_po', 'purchase_order_items', ['tenant_id', 'purchase_order_id'])
|
||||
op.create_index('ix_po_items_ingredient', 'purchase_order_items', ['ingredient_id'])
|
||||
|
||||
op.create_index('ix_deliveries_tenant_id', 'deliveries', ['tenant_id'])
|
||||
op.create_index('ix_deliveries_tenant_status', 'deliveries', ['tenant_id', 'status'])
|
||||
op.create_index('ix_deliveries_scheduled_date', 'deliveries', ['scheduled_date'])
|
||||
op.create_index('ix_deliveries_delivery_number', 'deliveries', ['delivery_number'])
|
||||
|
||||
op.create_index('ix_delivery_items_tenant_id', 'delivery_items', ['tenant_id'])
|
||||
op.create_index('ix_delivery_items_delivery_id', 'delivery_items', ['delivery_id'])
|
||||
op.create_index('ix_delivery_items_tenant_delivery', 'delivery_items', ['tenant_id', 'delivery_id'])
|
||||
op.create_index('ix_delivery_items_ingredient', 'delivery_items', ['ingredient_id'])
|
||||
|
||||
op.create_index('ix_quality_reviews_tenant_id', 'supplier_quality_reviews', ['tenant_id'])
|
||||
op.create_index('ix_quality_reviews_supplier_id', 'supplier_quality_reviews', ['supplier_id'])
|
||||
op.create_index('ix_quality_reviews_tenant_supplier', 'supplier_quality_reviews', ['tenant_id', 'supplier_id'])
|
||||
op.create_index('ix_quality_reviews_date', 'supplier_quality_reviews', ['review_date'])
|
||||
op.create_index('ix_quality_reviews_overall_rating', 'supplier_quality_reviews', ['overall_rating'])
|
||||
|
||||
op.create_index('ix_invoices_tenant_id', 'supplier_invoices', ['tenant_id'])
|
||||
op.create_index('ix_invoices_supplier_id', 'supplier_invoices', ['supplier_id'])
|
||||
op.create_index('ix_invoices_tenant_supplier', 'supplier_invoices', ['tenant_id', 'supplier_id'])
|
||||
op.create_index('ix_invoices_tenant_status', 'supplier_invoices', ['tenant_id', 'status'])
|
||||
op.create_index('ix_invoices_due_date', 'supplier_invoices', ['due_date'])
|
||||
op.create_index('ix_invoices_invoice_number', 'supplier_invoices', ['invoice_number'])
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# Drop indexes
|
||||
op.drop_index('ix_invoices_invoice_number', 'supplier_invoices')
|
||||
op.drop_index('ix_invoices_due_date', 'supplier_invoices')
|
||||
op.drop_index('ix_invoices_tenant_status', 'supplier_invoices')
|
||||
op.drop_index('ix_invoices_tenant_supplier', 'supplier_invoices')
|
||||
op.drop_index('ix_invoices_supplier_id', 'supplier_invoices')
|
||||
op.drop_index('ix_invoices_tenant_id', 'supplier_invoices')
|
||||
|
||||
op.drop_index('ix_quality_reviews_overall_rating', 'supplier_quality_reviews')
|
||||
op.drop_index('ix_quality_reviews_date', 'supplier_quality_reviews')
|
||||
op.drop_index('ix_quality_reviews_tenant_supplier', 'supplier_quality_reviews')
|
||||
op.drop_index('ix_quality_reviews_supplier_id', 'supplier_quality_reviews')
|
||||
op.drop_index('ix_quality_reviews_tenant_id', 'supplier_quality_reviews')
|
||||
|
||||
op.drop_index('ix_delivery_items_ingredient', 'delivery_items')
|
||||
op.drop_index('ix_delivery_items_tenant_delivery', 'delivery_items')
|
||||
op.drop_index('ix_delivery_items_delivery_id', 'delivery_items')
|
||||
op.drop_index('ix_delivery_items_tenant_id', 'delivery_items')
|
||||
|
||||
op.drop_index('ix_deliveries_delivery_number', 'deliveries')
|
||||
op.drop_index('ix_deliveries_scheduled_date', 'deliveries')
|
||||
op.drop_index('ix_deliveries_tenant_status', 'deliveries')
|
||||
op.drop_index('ix_deliveries_tenant_id', 'deliveries')
|
||||
|
||||
op.drop_index('ix_po_items_ingredient', 'purchase_order_items')
|
||||
op.drop_index('ix_po_items_tenant_po', 'purchase_order_items')
|
||||
op.drop_index('ix_po_items_purchase_order_id', 'purchase_order_items')
|
||||
op.drop_index('ix_po_items_tenant_id', 'purchase_order_items')
|
||||
|
||||
op.drop_index('ix_purchase_orders_status', 'purchase_orders')
|
||||
op.drop_index('ix_purchase_orders_delivery_date', 'purchase_orders')
|
||||
op.drop_index('ix_purchase_orders_order_date', 'purchase_orders')
|
||||
op.drop_index('ix_purchase_orders_po_number', 'purchase_orders')
|
||||
op.drop_index('ix_purchase_orders_tenant_status', 'purchase_orders')
|
||||
op.drop_index('ix_purchase_orders_tenant_supplier', 'purchase_orders')
|
||||
op.drop_index('ix_purchase_orders_supplier_id', 'purchase_orders')
|
||||
op.drop_index('ix_purchase_orders_tenant_id', 'purchase_orders')
|
||||
|
||||
op.drop_index('ix_price_lists_effective_date', 'supplier_price_lists')
|
||||
op.drop_index('ix_price_lists_active', 'supplier_price_lists')
|
||||
op.drop_index('ix_price_lists_ingredient', 'supplier_price_lists')
|
||||
op.drop_index('ix_price_lists_tenant_supplier', 'supplier_price_lists')
|
||||
op.drop_index('ix_price_lists_supplier_id', 'supplier_price_lists')
|
||||
op.drop_index('ix_price_lists_tenant_id', 'supplier_price_lists')
|
||||
|
||||
op.drop_index('ix_suppliers_supplier_type', 'suppliers')
|
||||
op.drop_index('ix_suppliers_status', 'suppliers')
|
||||
op.drop_index('ix_suppliers_quality_rating', 'suppliers')
|
||||
op.drop_index('ix_suppliers_tenant_type', 'suppliers')
|
||||
op.drop_index('ix_suppliers_tenant_status', 'suppliers')
|
||||
op.drop_index('ix_suppliers_tenant_name', 'suppliers')
|
||||
op.drop_index('ix_suppliers_name', 'suppliers')
|
||||
op.drop_index('ix_suppliers_tenant_id', 'suppliers')
|
||||
|
||||
# Drop tables
|
||||
op.drop_table('supplier_invoices')
|
||||
op.drop_table('supplier_quality_reviews')
|
||||
op.drop_table('delivery_items')
|
||||
op.drop_table('deliveries')
|
||||
op.drop_table('purchase_order_items')
|
||||
op.drop_table('purchase_orders')
|
||||
op.drop_table('supplier_price_lists')
|
||||
op.drop_table('suppliers')
|
||||
|
||||
# Drop enums
|
||||
op.execute('DROP TYPE IF EXISTS invoicestatus')
|
||||
op.execute('DROP TYPE IF EXISTS deliveryrating')
|
||||
op.execute('DROP TYPE IF EXISTS qualityrating')
|
||||
op.execute('DROP TYPE IF EXISTS deliverystatus')
|
||||
op.execute('DROP TYPE IF EXISTS purchaseorderstatus')
|
||||
op.execute('DROP TYPE IF EXISTS paymentterms')
|
||||
op.execute('DROP TYPE IF EXISTS supplierstatus')
|
||||
op.execute('DROP TYPE IF EXISTS suppliertype')
|
||||
@@ -0,0 +1,151 @@
|
||||
"""Standardize product references to inventory_product_id
|
||||
|
||||
Revision ID: 001_standardize_product_references
|
||||
Revises:
|
||||
Create Date: 2025-01-15 12:00:00.000000
|
||||
|
||||
This migration standardizes product references across the suppliers service by:
|
||||
1. Renaming ingredient_id columns to inventory_product_id
|
||||
2. Removing redundant product_name columns where UUID references exist
|
||||
3. Updating indexes to match new column names
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects.postgresql import UUID
|
||||
|
||||
|
||||
# revision identifiers
|
||||
revision = '001_standardize_product_references'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
"""Apply the changes to standardize product references"""
|
||||
|
||||
# 1. Update supplier_price_lists table
|
||||
print("Updating supplier_price_lists table...")
|
||||
|
||||
# Rename ingredient_id to inventory_product_id
|
||||
op.alter_column('supplier_price_lists', 'ingredient_id',
|
||||
new_column_name='inventory_product_id')
|
||||
|
||||
# Drop the product_name column (redundant with UUID reference)
|
||||
op.drop_column('supplier_price_lists', 'product_name')
|
||||
|
||||
# Update index name
|
||||
op.drop_index('ix_price_lists_ingredient')
|
||||
op.create_index('ix_price_lists_inventory_product', 'supplier_price_lists',
|
||||
['inventory_product_id'])
|
||||
|
||||
|
||||
# 2. Update purchase_order_items table
|
||||
print("Updating purchase_order_items table...")
|
||||
|
||||
# Rename ingredient_id to inventory_product_id
|
||||
op.alter_column('purchase_order_items', 'ingredient_id',
|
||||
new_column_name='inventory_product_id')
|
||||
|
||||
# Drop the product_name column (redundant with UUID reference)
|
||||
op.drop_column('purchase_order_items', 'product_name')
|
||||
|
||||
# Update index name
|
||||
op.drop_index('ix_po_items_ingredient')
|
||||
op.create_index('ix_po_items_inventory_product', 'purchase_order_items',
|
||||
['inventory_product_id'])
|
||||
|
||||
|
||||
# 3. Update delivery_items table
|
||||
print("Updating delivery_items table...")
|
||||
|
||||
# Rename ingredient_id to inventory_product_id
|
||||
op.alter_column('delivery_items', 'ingredient_id',
|
||||
new_column_name='inventory_product_id')
|
||||
|
||||
# Drop the product_name column (redundant with UUID reference)
|
||||
op.drop_column('delivery_items', 'product_name')
|
||||
|
||||
# Update index name
|
||||
op.drop_index('ix_delivery_items_ingredient')
|
||||
op.create_index('ix_delivery_items_inventory_product', 'delivery_items',
|
||||
['inventory_product_id'])
|
||||
|
||||
print("Migration completed successfully!")
|
||||
|
||||
|
||||
def downgrade():
|
||||
"""Revert the changes (for rollback purposes)"""
|
||||
|
||||
print("Rolling back product reference standardization...")
|
||||
|
||||
# 1. Revert delivery_items table
|
||||
print("Reverting delivery_items table...")
|
||||
|
||||
# Revert index name
|
||||
op.drop_index('ix_delivery_items_inventory_product')
|
||||
op.create_index('ix_delivery_items_ingredient', 'delivery_items',
|
||||
['inventory_product_id']) # Will rename back to ingredient_id below
|
||||
|
||||
# Add back product_name column (will be empty initially)
|
||||
op.add_column('delivery_items',
|
||||
sa.Column('product_name', sa.String(255), nullable=False,
|
||||
server_default='Unknown Product'))
|
||||
|
||||
# Rename inventory_product_id back to ingredient_id
|
||||
op.alter_column('delivery_items', 'inventory_product_id',
|
||||
new_column_name='ingredient_id')
|
||||
|
||||
# Update index to use ingredient_id
|
||||
op.drop_index('ix_delivery_items_ingredient')
|
||||
op.create_index('ix_delivery_items_ingredient', 'delivery_items',
|
||||
['ingredient_id'])
|
||||
|
||||
|
||||
# 2. Revert purchase_order_items table
|
||||
print("Reverting purchase_order_items table...")
|
||||
|
||||
# Revert index name
|
||||
op.drop_index('ix_po_items_inventory_product')
|
||||
op.create_index('ix_po_items_ingredient', 'purchase_order_items',
|
||||
['inventory_product_id']) # Will rename back to ingredient_id below
|
||||
|
||||
# Add back product_name column (will be empty initially)
|
||||
op.add_column('purchase_order_items',
|
||||
sa.Column('product_name', sa.String(255), nullable=False,
|
||||
server_default='Unknown Product'))
|
||||
|
||||
# Rename inventory_product_id back to ingredient_id
|
||||
op.alter_column('purchase_order_items', 'inventory_product_id',
|
||||
new_column_name='ingredient_id')
|
||||
|
||||
# Update index to use ingredient_id
|
||||
op.drop_index('ix_po_items_ingredient')
|
||||
op.create_index('ix_po_items_ingredient', 'purchase_order_items',
|
||||
['ingredient_id'])
|
||||
|
||||
|
||||
# 3. Revert supplier_price_lists table
|
||||
print("Reverting supplier_price_lists table...")
|
||||
|
||||
# Revert index name
|
||||
op.drop_index('ix_price_lists_inventory_product')
|
||||
op.create_index('ix_price_lists_ingredient', 'supplier_price_lists',
|
||||
['inventory_product_id']) # Will rename back to ingredient_id below
|
||||
|
||||
# Add back product_name column (will be empty initially)
|
||||
op.add_column('supplier_price_lists',
|
||||
sa.Column('product_name', sa.String(255), nullable=False,
|
||||
server_default='Unknown Product'))
|
||||
|
||||
# Rename inventory_product_id back to ingredient_id
|
||||
op.alter_column('supplier_price_lists', 'inventory_product_id',
|
||||
new_column_name='ingredient_id')
|
||||
|
||||
# Update index to use ingredient_id
|
||||
op.drop_index('ix_price_lists_ingredient')
|
||||
op.create_index('ix_price_lists_ingredient', 'supplier_price_lists',
|
||||
['ingredient_id'])
|
||||
|
||||
print("Rollback completed successfully!")
|
||||
@@ -28,22 +28,22 @@ router = APIRouter()
|
||||
|
||||
training_service = TrainingService()
|
||||
|
||||
@router.get("/tenants/{tenant_id}/models/{product_name}/active")
|
||||
@router.get("/tenants/{tenant_id}/models/{inventory_product_id}/active")
|
||||
async def get_active_model(
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
product_name: str = Path(..., description="Product name"),
|
||||
inventory_product_id: str = Path(..., description="Inventory product UUID"),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get the active model for a product - used by forecasting service
|
||||
"""
|
||||
try:
|
||||
logger.debug("Getting active model", tenant_id=tenant_id, product_name=product_name)
|
||||
logger.debug("Getting active model", tenant_id=tenant_id, inventory_product_id=inventory_product_id)
|
||||
# ✅ FIX: Wrap SQL with text() for SQLAlchemy 2.0 and add case-insensitive product name matching
|
||||
query = text("""
|
||||
SELECT * FROM trained_models
|
||||
WHERE tenant_id = :tenant_id
|
||||
AND LOWER(product_name) = LOWER(:product_name)
|
||||
AND inventory_product_id = :inventory_product_id
|
||||
AND is_active = true
|
||||
AND is_production = true
|
||||
ORDER BY created_at DESC
|
||||
@@ -52,16 +52,16 @@ async def get_active_model(
|
||||
|
||||
result = await db.execute(query, {
|
||||
"tenant_id": tenant_id,
|
||||
"product_name": product_name
|
||||
"inventory_product_id": inventory_product_id
|
||||
})
|
||||
|
||||
model_record = result.fetchone()
|
||||
|
||||
if not model_record:
|
||||
logger.info("No active model found", tenant_id=tenant_id, product_name=product_name)
|
||||
logger.info("No active model found", tenant_id=tenant_id, inventory_product_id=inventory_product_id)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"No active model found for product {product_name}"
|
||||
detail=f"No active model found for product {inventory_product_id}"
|
||||
)
|
||||
|
||||
# ✅ FIX: Wrap update query with text() too
|
||||
@@ -99,11 +99,11 @@ async def get_active_model(
|
||||
raise
|
||||
except Exception as e:
|
||||
error_msg = str(e) if str(e) else f"{type(e).__name__}: {repr(e)}"
|
||||
logger.error(f"Failed to get active model: {error_msg}", tenant_id=tenant_id, product_name=product_name)
|
||||
logger.error(f"Failed to get active model: {error_msg}", tenant_id=tenant_id, inventory_product_id=inventory_product_id)
|
||||
|
||||
# Handle client disconnection gracefully
|
||||
if "EndOfStream" in str(type(e)) or "WouldBlock" in str(type(e)):
|
||||
logger.info("Client disconnected during model retrieval", tenant_id=tenant_id, product_name=product_name)
|
||||
logger.info("Client disconnected during model retrieval", tenant_id=tenant_id, inventory_product_id=inventory_product_id)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_408_REQUEST_TIMEOUT,
|
||||
detail="Request connection closed"
|
||||
@@ -205,7 +205,7 @@ async def list_models(
|
||||
models.append({
|
||||
"model_id": str(record.id),
|
||||
"tenant_id": str(record.tenant_id),
|
||||
"product_name": record.product_name,
|
||||
"inventory_product_id": str(record.inventory_product_id),
|
||||
"model_type": record.model_type,
|
||||
"model_path": record.model_path,
|
||||
"version": 1, # Default version
|
||||
|
||||
@@ -291,12 +291,12 @@ async def execute_enhanced_training_job_background(
|
||||
job_id=job_id)
|
||||
|
||||
|
||||
@router.post("/tenants/{tenant_id}/training/products/{product_name}", response_model=TrainingJobResponse)
|
||||
@router.post("/tenants/{tenant_id}/training/products/{inventory_product_id}", response_model=TrainingJobResponse)
|
||||
@track_execution_time("enhanced_single_product_training_duration_seconds", "training-service")
|
||||
async def start_enhanced_single_product_training(
|
||||
request: SingleProductTrainingRequest,
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
product_name: str = Path(..., description="Product name"),
|
||||
inventory_product_id: str = Path(..., description="Inventory product UUID"),
|
||||
request_obj: Request = None,
|
||||
current_tenant: str = Depends(get_current_tenant_id_dep),
|
||||
enhanced_training_service: EnhancedTrainingService = Depends(get_enhanced_training_service)
|
||||
@@ -323,7 +323,7 @@ async def start_enhanced_single_product_training(
|
||||
)
|
||||
|
||||
logger.info("Starting enhanced single product training",
|
||||
product_name=product_name,
|
||||
inventory_product_id=inventory_product_id,
|
||||
tenant_id=tenant_id)
|
||||
|
||||
# Record metrics
|
||||
@@ -331,12 +331,12 @@ async def start_enhanced_single_product_training(
|
||||
metrics.increment_counter("enhanced_single_product_training_total")
|
||||
|
||||
# Generate enhanced job ID
|
||||
job_id = f"enhanced_single_{tenant_id}_{product_name}_{uuid.uuid4().hex[:8]}"
|
||||
job_id = f"enhanced_single_{tenant_id}_{inventory_product_id}_{uuid.uuid4().hex[:8]}"
|
||||
|
||||
# Delegate to enhanced training service (single product method to be implemented)
|
||||
result = await enhanced_training_service.start_single_product_training(
|
||||
tenant_id=tenant_id,
|
||||
product_name=product_name,
|
||||
inventory_product_id=inventory_product_id,
|
||||
job_id=job_id,
|
||||
bakery_location=request.bakery_location or (40.4168, -3.7038)
|
||||
)
|
||||
@@ -345,7 +345,7 @@ async def start_enhanced_single_product_training(
|
||||
metrics.increment_counter("enhanced_single_product_training_success_total")
|
||||
|
||||
logger.info("Enhanced single product training completed",
|
||||
product_name=product_name,
|
||||
inventory_product_id=inventory_product_id,
|
||||
job_id=job_id)
|
||||
|
||||
return TrainingJobResponse(**result)
|
||||
@@ -355,7 +355,7 @@ async def start_enhanced_single_product_training(
|
||||
metrics.increment_counter("enhanced_single_product_validation_errors_total")
|
||||
logger.error("Enhanced single product training validation error",
|
||||
error=str(e),
|
||||
product_name=product_name)
|
||||
inventory_product_id=inventory_product_id)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=str(e)
|
||||
@@ -365,7 +365,7 @@ async def start_enhanced_single_product_training(
|
||||
metrics.increment_counter("enhanced_single_product_training_errors_total")
|
||||
logger.error("Enhanced single product training failed",
|
||||
error=str(e),
|
||||
product_name=product_name)
|
||||
inventory_product_id=inventory_product_id)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Enhanced single product training failed"
|
||||
|
||||
@@ -62,7 +62,7 @@ class EnhancedBakeryDataProcessor:
|
||||
sales_data: pd.DataFrame,
|
||||
weather_data: pd.DataFrame,
|
||||
traffic_data: pd.DataFrame,
|
||||
product_name: str,
|
||||
inventory_product_id: str,
|
||||
tenant_id: str = None,
|
||||
job_id: str = None,
|
||||
session=None) -> pd.DataFrame:
|
||||
@@ -73,7 +73,7 @@ class EnhancedBakeryDataProcessor:
|
||||
sales_data: Historical sales data for the product
|
||||
weather_data: Weather data
|
||||
traffic_data: Traffic data
|
||||
product_name: Product name for logging
|
||||
inventory_product_id: Inventory product UUID for logging
|
||||
tenant_id: Optional tenant ID for tracking
|
||||
job_id: Optional job ID for tracking
|
||||
|
||||
@@ -82,7 +82,7 @@ class EnhancedBakeryDataProcessor:
|
||||
"""
|
||||
try:
|
||||
logger.info("Preparing enhanced training data using repository pattern",
|
||||
product_name=product_name,
|
||||
inventory_product_id=inventory_product_id,
|
||||
tenant_id=tenant_id,
|
||||
job_id=job_id)
|
||||
|
||||
@@ -93,11 +93,11 @@ class EnhancedBakeryDataProcessor:
|
||||
# Log data preparation start if we have tracking info
|
||||
if job_id and tenant_id:
|
||||
await repos['training_log'].update_log_progress(
|
||||
job_id, 15, f"preparing_data_{product_name}", "running"
|
||||
job_id, 15, f"preparing_data_{inventory_product_id}", "running"
|
||||
)
|
||||
|
||||
# Step 1: Convert and validate sales data
|
||||
sales_clean = await self._process_sales_data(sales_data, product_name)
|
||||
sales_clean = await self._process_sales_data(sales_data, inventory_product_id)
|
||||
|
||||
# FIX: Ensure timezone awareness before any operations
|
||||
sales_clean = self._ensure_timezone_aware(sales_clean)
|
||||
@@ -129,32 +129,32 @@ class EnhancedBakeryDataProcessor:
|
||||
# Step 9: Store processing metadata if we have a tenant
|
||||
if tenant_id:
|
||||
await self._store_processing_metadata(
|
||||
repos, tenant_id, product_name, prophet_data, job_id
|
||||
repos, tenant_id, inventory_product_id, prophet_data, job_id
|
||||
)
|
||||
|
||||
logger.info("Enhanced training data prepared successfully",
|
||||
product_name=product_name,
|
||||
inventory_product_id=inventory_product_id,
|
||||
data_points=len(prophet_data))
|
||||
|
||||
return prophet_data
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error preparing enhanced training data",
|
||||
product_name=product_name,
|
||||
inventory_product_id=inventory_product_id,
|
||||
error=str(e))
|
||||
raise
|
||||
|
||||
async def _store_processing_metadata(self,
|
||||
repos: Dict,
|
||||
tenant_id: str,
|
||||
product_name: str,
|
||||
inventory_product_id: str,
|
||||
processed_data: pd.DataFrame,
|
||||
job_id: str = None):
|
||||
"""Store data processing metadata using repository"""
|
||||
try:
|
||||
# Create processing metadata
|
||||
metadata = {
|
||||
"product_name": product_name,
|
||||
"inventory_product_id": inventory_product_id,
|
||||
"data_points": len(processed_data),
|
||||
"date_range": {
|
||||
"start": processed_data['ds'].min().isoformat(),
|
||||
@@ -167,7 +167,7 @@ class EnhancedBakeryDataProcessor:
|
||||
# Log processing completion
|
||||
if job_id:
|
||||
await repos['training_log'].update_log_progress(
|
||||
job_id, 25, f"data_prepared_{product_name}", "running"
|
||||
job_id, 25, f"data_prepared_{inventory_product_id}", "running"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
@@ -270,7 +270,7 @@ class EnhancedBakeryDataProcessor:
|
||||
logger.warning("Date alignment failed, using original data", error=str(e))
|
||||
return sales_data
|
||||
|
||||
async def _process_sales_data(self, sales_data: pd.DataFrame, product_name: str) -> pd.DataFrame:
|
||||
async def _process_sales_data(self, sales_data: pd.DataFrame, inventory_product_id: str) -> pd.DataFrame:
|
||||
"""Process and clean sales data with enhanced validation"""
|
||||
sales_clean = sales_data.copy()
|
||||
|
||||
@@ -305,9 +305,9 @@ class EnhancedBakeryDataProcessor:
|
||||
sales_clean = sales_clean.dropna(subset=['quantity'])
|
||||
sales_clean = sales_clean[sales_clean['quantity'] >= 0] # No negative sales
|
||||
|
||||
# Filter for the specific product if product_name column exists
|
||||
if 'product_name' in sales_clean.columns:
|
||||
sales_clean = sales_clean[sales_clean['product_name'] == product_name]
|
||||
# Filter for the specific product if inventory_product_id column exists
|
||||
if 'inventory_product_id' in sales_clean.columns:
|
||||
sales_clean = sales_clean[sales_clean['inventory_product_id'] == inventory_product_id]
|
||||
|
||||
# Remove duplicate dates (keep the one with highest quantity)
|
||||
sales_clean = sales_clean.sort_values(['date', 'quantity'], ascending=[True, False])
|
||||
|
||||
@@ -52,7 +52,7 @@ class BakeryProphetManager:
|
||||
|
||||
async def train_bakery_model(self,
|
||||
tenant_id: str,
|
||||
product_name: str,
|
||||
inventory_product_id: str,
|
||||
df: pd.DataFrame,
|
||||
job_id: str) -> Dict[str, Any]:
|
||||
"""
|
||||
@@ -60,10 +60,10 @@ class BakeryProphetManager:
|
||||
Same interface as before - optimization happens automatically.
|
||||
"""
|
||||
try:
|
||||
logger.info(f"Training optimized bakery model for {product_name}")
|
||||
logger.info(f"Training optimized bakery model for {inventory_product_id}")
|
||||
|
||||
# Validate input data
|
||||
await self._validate_training_data(df, product_name)
|
||||
await self._validate_training_data(df, inventory_product_id)
|
||||
|
||||
# Prepare data for Prophet
|
||||
prophet_data = await self._prepare_prophet_data(df)
|
||||
@@ -72,8 +72,8 @@ class BakeryProphetManager:
|
||||
regressor_columns = self._extract_regressor_columns(prophet_data)
|
||||
|
||||
# Automatically optimize hyperparameters (this is the new part)
|
||||
logger.info(f"Optimizing hyperparameters for {product_name}...")
|
||||
best_params = await self._optimize_hyperparameters(prophet_data, product_name, regressor_columns)
|
||||
logger.info(f"Optimizing hyperparameters for {inventory_product_id}...")
|
||||
best_params = await self._optimize_hyperparameters(prophet_data, inventory_product_id, regressor_columns)
|
||||
|
||||
# Create optimized Prophet model
|
||||
model = self._create_optimized_prophet_model(best_params, regressor_columns)
|
||||
@@ -92,7 +92,7 @@ class BakeryProphetManager:
|
||||
# Store model and metrics - Generate proper UUID for model_id
|
||||
model_id = str(uuid.uuid4())
|
||||
model_path = await self._store_model(
|
||||
tenant_id, product_name, model, model_id, prophet_data, regressor_columns, best_params, training_metrics
|
||||
tenant_id, inventory_product_id, model, model_id, prophet_data, regressor_columns, best_params, training_metrics
|
||||
)
|
||||
|
||||
# Return same format as before, but with optimization info
|
||||
@@ -112,17 +112,17 @@ class BakeryProphetManager:
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(f"Optimized model trained successfully for {product_name}. "
|
||||
logger.info(f"Optimized model trained successfully for {inventory_product_id}. "
|
||||
f"MAPE: {training_metrics.get('optimized_mape', 'N/A')}%")
|
||||
return model_info
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to train optimized bakery model for {product_name}: {str(e)}")
|
||||
logger.error(f"Failed to train optimized bakery model for {inventory_product_id}: {str(e)}")
|
||||
raise
|
||||
|
||||
async def _optimize_hyperparameters(self,
|
||||
df: pd.DataFrame,
|
||||
product_name: str,
|
||||
inventory_product_id: str,
|
||||
regressor_columns: List[str]) -> Dict[str, Any]:
|
||||
"""
|
||||
Automatically optimize Prophet hyperparameters using Bayesian optimization.
|
||||
@@ -130,7 +130,7 @@ class BakeryProphetManager:
|
||||
"""
|
||||
|
||||
# Determine product category automatically
|
||||
product_category = self._classify_product(product_name, df)
|
||||
product_category = self._classify_product(inventory_product_id, df)
|
||||
|
||||
# Set optimization parameters based on category
|
||||
n_trials = {
|
||||
@@ -140,7 +140,7 @@ class BakeryProphetManager:
|
||||
'intermittent': 15 # Reduced from 25
|
||||
}.get(product_category, 25)
|
||||
|
||||
logger.info(f"Product {product_name} classified as {product_category}, using {n_trials} trials")
|
||||
logger.info(f"Product {inventory_product_id} classified as {product_category}, using {n_trials} trials")
|
||||
|
||||
# Check data quality and adjust strategy
|
||||
total_sales = df['y'].sum()
|
||||
@@ -148,12 +148,12 @@ class BakeryProphetManager:
|
||||
mean_sales = df['y'].mean()
|
||||
non_zero_days = len(df[df['y'] > 0])
|
||||
|
||||
logger.info(f"Data analysis for {product_name}: total_sales={total_sales:.1f}, "
|
||||
logger.info(f"Data analysis for {inventory_product_id}: total_sales={total_sales:.1f}, "
|
||||
f"zero_ratio={zero_ratio:.2f}, mean_sales={mean_sales:.2f}, non_zero_days={non_zero_days}")
|
||||
|
||||
# Adjust strategy based on data characteristics
|
||||
if zero_ratio > 0.8 or non_zero_days < 30:
|
||||
logger.warning(f"Very sparse data for {product_name}, using minimal optimization")
|
||||
logger.warning(f"Very sparse data for {inventory_product_id}, using minimal optimization")
|
||||
return {
|
||||
'changepoint_prior_scale': 0.001,
|
||||
'seasonality_prior_scale': 0.01,
|
||||
@@ -166,7 +166,7 @@ class BakeryProphetManager:
|
||||
'uncertainty_samples': 100 # ✅ FIX: Minimal uncertainty sampling for very sparse data
|
||||
}
|
||||
elif zero_ratio > 0.6:
|
||||
logger.info(f"Moderate sparsity for {product_name}, using conservative optimization")
|
||||
logger.info(f"Moderate sparsity for {inventory_product_id}, using conservative optimization")
|
||||
return {
|
||||
'changepoint_prior_scale': 0.01,
|
||||
'seasonality_prior_scale': 0.1,
|
||||
@@ -180,7 +180,7 @@ class BakeryProphetManager:
|
||||
}
|
||||
|
||||
# Use unique seed for each product to avoid identical results
|
||||
product_seed = hash(product_name) % 10000
|
||||
product_seed = hash(str(inventory_product_id)) % 10000
|
||||
|
||||
def objective(trial):
|
||||
try:
|
||||
@@ -284,13 +284,13 @@ class BakeryProphetManager:
|
||||
cv_scores.append(mape_like)
|
||||
|
||||
except Exception as fold_error:
|
||||
logger.debug(f"Fold failed for {product_name} trial {trial.number}: {str(fold_error)}")
|
||||
logger.debug(f"Fold failed for {inventory_product_id} trial {trial.number}: {str(fold_error)}")
|
||||
continue
|
||||
|
||||
return np.mean(cv_scores) if len(cv_scores) > 0 else 100.0
|
||||
|
||||
except Exception as trial_error:
|
||||
logger.debug(f"Trial {trial.number} failed for {product_name}: {str(trial_error)}")
|
||||
logger.debug(f"Trial {trial.number} failed for {inventory_product_id}: {str(trial_error)}")
|
||||
return 100.0
|
||||
|
||||
# Run optimization with product-specific seed
|
||||
@@ -304,19 +304,19 @@ class BakeryProphetManager:
|
||||
best_params = study.best_params
|
||||
best_score = study.best_value
|
||||
|
||||
logger.info(f"Optimization completed for {product_name}. Best score: {best_score:.2f}%. "
|
||||
logger.info(f"Optimization completed for {inventory_product_id}. Best score: {best_score:.2f}%. "
|
||||
f"Parameters: {best_params}")
|
||||
|
||||
# ✅ FIX: Log uncertainty sampling configuration for debugging confidence intervals
|
||||
uncertainty_samples = best_params.get('uncertainty_samples', 500)
|
||||
logger.info(f"Prophet model will use {uncertainty_samples} uncertainty samples for {product_name} "
|
||||
logger.info(f"Prophet model will use {uncertainty_samples} uncertainty samples for {inventory_product_id} "
|
||||
f"(category: {product_category}, zero_ratio: {zero_ratio:.2f})")
|
||||
|
||||
return best_params
|
||||
|
||||
def _classify_product(self, product_name: str, sales_data: pd.DataFrame) -> str:
|
||||
def _classify_product(self, inventory_product_id: str, sales_data: pd.DataFrame) -> str:
|
||||
"""Automatically classify product for optimization strategy - improved for bakery data"""
|
||||
product_lower = product_name.lower()
|
||||
product_lower = str(inventory_product_id).lower()
|
||||
|
||||
# Calculate sales statistics
|
||||
total_sales = sales_data['y'].sum()
|
||||
@@ -324,7 +324,7 @@ class BakeryProphetManager:
|
||||
zero_ratio = (sales_data['y'] == 0).sum() / len(sales_data)
|
||||
non_zero_days = len(sales_data[sales_data['y'] > 0])
|
||||
|
||||
logger.info(f"Product classification for {product_name}: total_sales={total_sales:.1f}, "
|
||||
logger.info(f"Product classification for {inventory_product_id}: total_sales={total_sales:.1f}, "
|
||||
f"mean_sales={mean_sales:.2f}, zero_ratio={zero_ratio:.2f}, non_zero_days={non_zero_days}")
|
||||
|
||||
# Improved classification logic for bakery products
|
||||
@@ -499,7 +499,7 @@ class BakeryProphetManager:
|
||||
|
||||
async def _store_model(self,
|
||||
tenant_id: str,
|
||||
product_name: str,
|
||||
inventory_product_id: str,
|
||||
model: Prophet,
|
||||
model_id: str,
|
||||
training_data: pd.DataFrame,
|
||||
@@ -520,7 +520,7 @@ class BakeryProphetManager:
|
||||
metadata = {
|
||||
"model_id": model_id,
|
||||
"tenant_id": tenant_id,
|
||||
"product_name": product_name,
|
||||
"inventory_product_id": inventory_product_id,
|
||||
"regressor_columns": regressor_columns,
|
||||
"training_samples": len(training_data),
|
||||
"data_period": {
|
||||
@@ -539,7 +539,7 @@ class BakeryProphetManager:
|
||||
json.dump(metadata, f, indent=2, default=str)
|
||||
|
||||
# Store in memory
|
||||
model_key = f"{tenant_id}:{product_name}"
|
||||
model_key = f"{tenant_id}:{inventory_product_id}"
|
||||
self.models[model_key] = model
|
||||
self.model_metadata[model_key] = metadata
|
||||
|
||||
@@ -547,13 +547,13 @@ class BakeryProphetManager:
|
||||
try:
|
||||
async with self.database_manager.get_session() as db_session:
|
||||
# Deactivate previous models for this product
|
||||
await self._deactivate_previous_models_with_session(db_session, tenant_id, product_name)
|
||||
await self._deactivate_previous_models_with_session(db_session, tenant_id, inventory_product_id)
|
||||
|
||||
# Create new database record
|
||||
db_model = TrainedModel(
|
||||
id=model_id,
|
||||
tenant_id=tenant_id,
|
||||
product_name=product_name,
|
||||
inventory_product_id=inventory_product_id,
|
||||
model_type="prophet_optimized",
|
||||
job_id=model_id.split('_')[0], # Extract job_id from model_id
|
||||
model_path=str(model_path),
|
||||
@@ -587,23 +587,23 @@ class BakeryProphetManager:
|
||||
logger.info(f"Optimized model stored at: {model_path}")
|
||||
return str(model_path)
|
||||
|
||||
async def _deactivate_previous_models_with_session(self, db_session, tenant_id: str, product_name: str):
|
||||
async def _deactivate_previous_models_with_session(self, db_session, tenant_id: str, inventory_product_id: str):
|
||||
"""Deactivate previous models for the same product using provided session"""
|
||||
try:
|
||||
# ✅ FIX: Wrap SQL string with text() for SQLAlchemy 2.0
|
||||
query = text("""
|
||||
UPDATE trained_models
|
||||
SET is_active = false, is_production = false
|
||||
WHERE tenant_id = :tenant_id AND product_name = :product_name
|
||||
WHERE tenant_id = :tenant_id AND inventory_product_id = :inventory_product_id
|
||||
""")
|
||||
|
||||
await db_session.execute(query, {
|
||||
"tenant_id": tenant_id,
|
||||
"product_name": product_name
|
||||
"inventory_product_id": inventory_product_id
|
||||
})
|
||||
|
||||
# Note: Don't commit here, let the calling method handle the transaction
|
||||
logger.info(f"Successfully deactivated previous models for {product_name}")
|
||||
logger.info(f"Successfully deactivated previous models for {inventory_product_id}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to deactivate previous models: {str(e)}")
|
||||
@@ -630,14 +630,14 @@ class BakeryProphetManager:
|
||||
logger.error(f"Failed to generate forecast: {str(e)}")
|
||||
raise
|
||||
|
||||
async def _validate_training_data(self, df: pd.DataFrame, product_name: str):
|
||||
async def _validate_training_data(self, df: pd.DataFrame, inventory_product_id: str):
|
||||
"""Validate training data quality (unchanged)"""
|
||||
if df.empty:
|
||||
raise ValueError(f"No training data available for {product_name}")
|
||||
raise ValueError(f"No training data available for {inventory_product_id}")
|
||||
|
||||
if len(df) < settings.MIN_TRAINING_DATA_DAYS:
|
||||
raise ValueError(
|
||||
f"Insufficient training data for {product_name}: "
|
||||
f"Insufficient training data for {inventory_product_id}: "
|
||||
f"{len(df)} days, minimum required: {settings.MIN_TRAINING_DATA_DAYS}"
|
||||
)
|
||||
|
||||
|
||||
@@ -91,7 +91,7 @@ class EnhancedBakeryMLTrainer:
|
||||
await self._validate_input_data(sales_df, tenant_id)
|
||||
|
||||
# Get unique products from the sales data
|
||||
products = sales_df['product_name'].unique().tolist()
|
||||
products = sales_df['inventory_product_id'].unique().tolist()
|
||||
logger.info("Training enhanced models",
|
||||
products_count=len(products),
|
||||
products=products)
|
||||
@@ -183,17 +183,17 @@ class EnhancedBakeryMLTrainer:
|
||||
"""Process data for all products using enhanced processor with repository tracking"""
|
||||
processed_data = {}
|
||||
|
||||
for product_name in products:
|
||||
for inventory_product_id in products:
|
||||
try:
|
||||
logger.info("Processing data for product using enhanced processor",
|
||||
product_name=product_name)
|
||||
inventory_product_id=inventory_product_id)
|
||||
|
||||
# Filter sales data for this product
|
||||
product_sales = sales_df[sales_df['product_name'] == product_name].copy()
|
||||
product_sales = sales_df[sales_df['inventory_product_id'] == inventory_product_id].copy()
|
||||
|
||||
if product_sales.empty:
|
||||
logger.warning("No sales data found for product",
|
||||
product_name=product_name)
|
||||
inventory_product_id=inventory_product_id)
|
||||
continue
|
||||
|
||||
# Use enhanced data processor with repository tracking
|
||||
@@ -201,19 +201,19 @@ class EnhancedBakeryMLTrainer:
|
||||
sales_data=product_sales,
|
||||
weather_data=weather_df,
|
||||
traffic_data=traffic_df,
|
||||
product_name=product_name,
|
||||
inventory_product_id=inventory_product_id,
|
||||
tenant_id=tenant_id,
|
||||
job_id=job_id
|
||||
)
|
||||
|
||||
processed_data[product_name] = processed_product_data
|
||||
processed_data[inventory_product_id] = processed_product_data
|
||||
logger.info("Enhanced processing completed",
|
||||
product_name=product_name,
|
||||
inventory_product_id=inventory_product_id,
|
||||
data_points=len(processed_product_data))
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to process data using enhanced processor",
|
||||
product_name=product_name,
|
||||
inventory_product_id=inventory_product_id,
|
||||
error=str(e))
|
||||
continue
|
||||
|
||||
@@ -231,15 +231,15 @@ class EnhancedBakeryMLTrainer:
|
||||
base_progress = 45
|
||||
max_progress = 85
|
||||
|
||||
for product_name, product_data in processed_data.items():
|
||||
for inventory_product_id, product_data in processed_data.items():
|
||||
product_start_time = time.time()
|
||||
try:
|
||||
logger.info("Training enhanced model",
|
||||
product_name=product_name)
|
||||
inventory_product_id=inventory_product_id)
|
||||
|
||||
# Check if we have enough data
|
||||
if len(product_data) < settings.MIN_TRAINING_DATA_DAYS:
|
||||
training_results[product_name] = {
|
||||
training_results[inventory_product_id] = {
|
||||
'status': 'skipped',
|
||||
'reason': 'insufficient_data',
|
||||
'data_points': len(product_data),
|
||||
@@ -247,7 +247,7 @@ class EnhancedBakeryMLTrainer:
|
||||
'message': f'Need at least {settings.MIN_TRAINING_DATA_DAYS} data points, got {len(product_data)}'
|
||||
}
|
||||
logger.warning("Skipping product due to insufficient data",
|
||||
product_name=product_name,
|
||||
inventory_product_id=inventory_product_id,
|
||||
data_points=len(product_data),
|
||||
min_required=settings.MIN_TRAINING_DATA_DAYS)
|
||||
continue
|
||||
@@ -255,24 +255,24 @@ class EnhancedBakeryMLTrainer:
|
||||
# Train the model using Prophet manager
|
||||
model_info = await self.prophet_manager.train_bakery_model(
|
||||
tenant_id=tenant_id,
|
||||
product_name=product_name,
|
||||
inventory_product_id=inventory_product_id,
|
||||
df=product_data,
|
||||
job_id=job_id
|
||||
)
|
||||
|
||||
# Store model record using repository
|
||||
model_record = await self._create_model_record(
|
||||
repos, tenant_id, product_name, model_info, job_id, product_data
|
||||
repos, tenant_id, inventory_product_id, model_info, job_id, product_data
|
||||
)
|
||||
|
||||
# Create performance metrics record
|
||||
if model_info.get('training_metrics'):
|
||||
await self._create_performance_metrics(
|
||||
repos, model_record.id if model_record else None,
|
||||
tenant_id, product_name, model_info['training_metrics']
|
||||
tenant_id, inventory_product_id, model_info['training_metrics']
|
||||
)
|
||||
|
||||
training_results[product_name] = {
|
||||
training_results[inventory_product_id] = {
|
||||
'status': 'success',
|
||||
'model_info': model_info,
|
||||
'model_record_id': model_record.id if model_record else None,
|
||||
@@ -282,7 +282,7 @@ class EnhancedBakeryMLTrainer:
|
||||
}
|
||||
|
||||
logger.info("Successfully trained enhanced model",
|
||||
product_name=product_name,
|
||||
inventory_product_id=inventory_product_id,
|
||||
model_record_id=model_record.id if model_record else None)
|
||||
|
||||
completed_products = i + 1
|
||||
@@ -295,15 +295,15 @@ class EnhancedBakeryMLTrainer:
|
||||
await self.status_publisher.progress_update(
|
||||
progress=progress,
|
||||
step="model_training",
|
||||
current_product=product_name,
|
||||
step_details=f"Enhanced training completed for {product_name}"
|
||||
current_product=inventory_product_id,
|
||||
step_details=f"Enhanced training completed for {inventory_product_id}"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to train enhanced model",
|
||||
product_name=product_name,
|
||||
inventory_product_id=inventory_product_id,
|
||||
error=str(e))
|
||||
training_results[product_name] = {
|
||||
training_results[inventory_product_id] = {
|
||||
'status': 'error',
|
||||
'error_message': str(e),
|
||||
'data_points': len(product_data) if product_data is not None else 0,
|
||||
@@ -320,8 +320,8 @@ class EnhancedBakeryMLTrainer:
|
||||
await self.status_publisher.progress_update(
|
||||
progress=progress,
|
||||
step="model_training",
|
||||
current_product=product_name,
|
||||
step_details=f"Enhanced training failed for {product_name}: {str(e)}"
|
||||
current_product=inventory_product_id,
|
||||
step_details=f"Enhanced training failed for {inventory_product_id}: {str(e)}"
|
||||
)
|
||||
|
||||
return training_results
|
||||
@@ -329,7 +329,7 @@ class EnhancedBakeryMLTrainer:
|
||||
async def _create_model_record(self,
|
||||
repos: Dict,
|
||||
tenant_id: str,
|
||||
product_name: str,
|
||||
inventory_product_id: str,
|
||||
model_info: Dict,
|
||||
job_id: str,
|
||||
processed_data: pd.DataFrame):
|
||||
@@ -337,7 +337,7 @@ class EnhancedBakeryMLTrainer:
|
||||
try:
|
||||
model_data = {
|
||||
"tenant_id": tenant_id,
|
||||
"product_name": product_name,
|
||||
"inventory_product_id": inventory_product_id,
|
||||
"job_id": job_id,
|
||||
"model_type": "enhanced_prophet",
|
||||
"model_path": model_info.get("model_path"),
|
||||
@@ -357,7 +357,7 @@ class EnhancedBakeryMLTrainer:
|
||||
|
||||
model_record = await repos['model'].create_model(model_data)
|
||||
logger.info("Created enhanced model record",
|
||||
product_name=product_name,
|
||||
inventory_product_id=inventory_product_id,
|
||||
model_id=model_record.id)
|
||||
|
||||
# Create artifacts for model files
|
||||
@@ -374,7 +374,7 @@ class EnhancedBakeryMLTrainer:
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to create enhanced model record",
|
||||
product_name=product_name,
|
||||
inventory_product_id=inventory_product_id,
|
||||
error=str(e))
|
||||
return None
|
||||
|
||||
@@ -382,14 +382,14 @@ class EnhancedBakeryMLTrainer:
|
||||
repos: Dict,
|
||||
model_id: str,
|
||||
tenant_id: str,
|
||||
product_name: str,
|
||||
inventory_product_id: str,
|
||||
metrics: Dict):
|
||||
"""Create performance metrics record using repository"""
|
||||
try:
|
||||
metric_data = {
|
||||
"model_id": str(model_id),
|
||||
"tenant_id": tenant_id,
|
||||
"product_name": product_name,
|
||||
"inventory_product_id": inventory_product_id,
|
||||
"mae": metrics.get("mae"),
|
||||
"mse": metrics.get("mse"),
|
||||
"rmse": metrics.get("rmse"),
|
||||
@@ -401,12 +401,12 @@ class EnhancedBakeryMLTrainer:
|
||||
|
||||
await repos['performance'].create_performance_metric(metric_data)
|
||||
logger.info("Created enhanced performance metrics",
|
||||
product_name=product_name,
|
||||
inventory_product_id=inventory_product_id,
|
||||
model_id=model_id)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to create enhanced performance metrics",
|
||||
product_name=product_name,
|
||||
inventory_product_id=inventory_product_id,
|
||||
error=str(e))
|
||||
|
||||
async def _calculate_enhanced_training_summary(self,
|
||||
@@ -532,7 +532,7 @@ class EnhancedBakeryMLTrainer:
|
||||
|
||||
async def evaluate_model_performance_enhanced(self,
|
||||
tenant_id: str,
|
||||
product_name: str,
|
||||
inventory_product_id: str,
|
||||
model_path: str,
|
||||
test_dataset: TrainingDataSet) -> Dict[str, Any]:
|
||||
"""
|
||||
@@ -553,17 +553,17 @@ class EnhancedBakeryMLTrainer:
|
||||
test_traffic_df = pd.DataFrame(test_dataset.traffic_data)
|
||||
|
||||
# Filter for specific product
|
||||
product_test_sales = test_sales_df[test_sales_df['product_name'] == product_name].copy()
|
||||
product_test_sales = test_sales_df[test_sales_df['inventory_product_id'] == inventory_product_id].copy()
|
||||
|
||||
if product_test_sales.empty:
|
||||
raise ValueError(f"No test data found for product: {product_name}")
|
||||
raise ValueError(f"No test data found for product: {inventory_product_id}")
|
||||
|
||||
# Process test data using enhanced processor
|
||||
processed_test_data = await self.enhanced_data_processor.prepare_training_data(
|
||||
sales_data=product_test_sales,
|
||||
weather_data=test_weather_df,
|
||||
traffic_data=test_traffic_df,
|
||||
product_name=product_name,
|
||||
inventory_product_id=inventory_product_id,
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
|
||||
@@ -608,16 +608,16 @@ class EnhancedBakeryMLTrainer:
|
||||
metrics["mape"] = 100.0
|
||||
|
||||
# Store evaluation metrics in repository
|
||||
model_records = await repos['model'].get_models_by_product(tenant_id, product_name)
|
||||
model_records = await repos['model'].get_models_by_product(tenant_id, inventory_product_id)
|
||||
if model_records:
|
||||
latest_model = max(model_records, key=lambda x: x.created_at)
|
||||
await self._create_performance_metrics(
|
||||
repos, latest_model.id, tenant_id, product_name, metrics
|
||||
repos, latest_model.id, tenant_id, inventory_product_id, metrics
|
||||
)
|
||||
|
||||
result = {
|
||||
"tenant_id": tenant_id,
|
||||
"product_name": product_name,
|
||||
"inventory_product_id": inventory_product_id,
|
||||
"enhanced_evaluation_metrics": metrics,
|
||||
"test_samples": len(processed_test_data),
|
||||
"prediction_samples": len(forecast),
|
||||
|
||||
@@ -46,7 +46,7 @@ class ModelPerformanceMetric(Base):
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
model_id = Column(String(255), index=True, nullable=False)
|
||||
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
product_name = Column(String(255), index=True, nullable=False)
|
||||
inventory_product_id = Column(UUID(as_uuid=True), index=True, nullable=False)
|
||||
|
||||
# Performance metrics
|
||||
mae = Column(Float, nullable=True) # Mean Absolute Error
|
||||
@@ -128,7 +128,7 @@ class TrainedModel(Base):
|
||||
# Primary identification - Updated to use UUID properly
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
product_name = Column(String, nullable=False, index=True)
|
||||
inventory_product_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
|
||||
# Model information
|
||||
model_type = Column(String, default="prophet_optimized")
|
||||
@@ -174,7 +174,7 @@ class TrainedModel(Base):
|
||||
"id": str(self.id),
|
||||
"model_id": str(self.id),
|
||||
"tenant_id": str(self.tenant_id),
|
||||
"product_name": self.product_name,
|
||||
"inventory_product_id": str(self.inventory_product_id),
|
||||
"model_type": self.model_type,
|
||||
"model_version": self.model_version,
|
||||
"model_path": self.model_path,
|
||||
|
||||
@@ -29,7 +29,7 @@ class ModelRepository(TrainingBaseRepository):
|
||||
# Validate model data
|
||||
validation_result = self._validate_training_data(
|
||||
model_data,
|
||||
["tenant_id", "product_name", "model_path", "job_id"]
|
||||
["tenant_id", "inventory_product_id", "model_path", "job_id"]
|
||||
)
|
||||
|
||||
if not validation_result["is_valid"]:
|
||||
@@ -38,7 +38,7 @@ class ModelRepository(TrainingBaseRepository):
|
||||
# Check for duplicate active models for same tenant+product
|
||||
existing_model = await self.get_active_model_for_product(
|
||||
model_data["tenant_id"],
|
||||
model_data["product_name"]
|
||||
model_data["inventory_product_id"]
|
||||
)
|
||||
|
||||
# If there's an existing active model, we may want to deactivate it
|
||||
@@ -46,7 +46,7 @@ class ModelRepository(TrainingBaseRepository):
|
||||
logger.info("Deactivating previous production model",
|
||||
previous_model_id=existing_model.id,
|
||||
tenant_id=model_data["tenant_id"],
|
||||
product_name=model_data["product_name"])
|
||||
inventory_product_id=model_data["inventory_product_id"])
|
||||
await self.update(existing_model.id, {"is_production": False})
|
||||
|
||||
# Create new model
|
||||
@@ -55,7 +55,7 @@ class ModelRepository(TrainingBaseRepository):
|
||||
logger.info("Trained model created successfully",
|
||||
model_id=model.id,
|
||||
tenant_id=model.tenant_id,
|
||||
product_name=model.product_name,
|
||||
inventory_product_id=str(model.inventory_product_id),
|
||||
model_type=model.model_type)
|
||||
|
||||
return model
|
||||
@@ -65,21 +65,21 @@ class ModelRepository(TrainingBaseRepository):
|
||||
except Exception as e:
|
||||
logger.error("Failed to create trained model",
|
||||
tenant_id=model_data.get("tenant_id"),
|
||||
product_name=model_data.get("product_name"),
|
||||
inventory_product_id=model_data.get("inventory_product_id"),
|
||||
error=str(e))
|
||||
raise DatabaseError(f"Failed to create model: {str(e)}")
|
||||
|
||||
async def get_model_by_tenant_and_product(
|
||||
self,
|
||||
tenant_id: str,
|
||||
product_name: str
|
||||
inventory_product_id: str
|
||||
) -> List[TrainedModel]:
|
||||
"""Get all models for a tenant and product"""
|
||||
try:
|
||||
return await self.get_multi(
|
||||
filters={
|
||||
"tenant_id": tenant_id,
|
||||
"product_name": product_name
|
||||
"inventory_product_id": inventory_product_id
|
||||
},
|
||||
order_by="created_at",
|
||||
order_desc=True
|
||||
@@ -87,21 +87,21 @@ class ModelRepository(TrainingBaseRepository):
|
||||
except Exception as e:
|
||||
logger.error("Failed to get models by tenant and product",
|
||||
tenant_id=tenant_id,
|
||||
product_name=product_name,
|
||||
inventory_product_id=inventory_product_id,
|
||||
error=str(e))
|
||||
raise DatabaseError(f"Failed to get models: {str(e)}")
|
||||
|
||||
async def get_active_model_for_product(
|
||||
self,
|
||||
tenant_id: str,
|
||||
product_name: str
|
||||
inventory_product_id: str
|
||||
) -> Optional[TrainedModel]:
|
||||
"""Get the active production model for a product"""
|
||||
try:
|
||||
models = await self.get_multi(
|
||||
filters={
|
||||
"tenant_id": tenant_id,
|
||||
"product_name": product_name,
|
||||
"inventory_product_id": inventory_product_id,
|
||||
"is_active": True,
|
||||
"is_production": True
|
||||
},
|
||||
@@ -113,7 +113,7 @@ class ModelRepository(TrainingBaseRepository):
|
||||
except Exception as e:
|
||||
logger.error("Failed to get active model for product",
|
||||
tenant_id=tenant_id,
|
||||
product_name=product_name,
|
||||
inventory_product_id=inventory_product_id,
|
||||
error=str(e))
|
||||
raise DatabaseError(f"Failed to get active model: {str(e)}")
|
||||
|
||||
@@ -137,7 +137,7 @@ class ModelRepository(TrainingBaseRepository):
|
||||
# Deactivate other production models for the same tenant+product
|
||||
await self._deactivate_other_production_models(
|
||||
model.tenant_id,
|
||||
model.product_name,
|
||||
str(model.inventory_product_id),
|
||||
model_id
|
||||
)
|
||||
|
||||
@@ -150,7 +150,7 @@ class ModelRepository(TrainingBaseRepository):
|
||||
logger.info("Model promoted to production",
|
||||
model_id=model_id,
|
||||
tenant_id=model.tenant_id,
|
||||
product_name=model.product_name)
|
||||
inventory_product_id=str(model.inventory_product_id))
|
||||
|
||||
return updated_model
|
||||
|
||||
@@ -223,16 +223,16 @@ class ModelRepository(TrainingBaseRepository):
|
||||
|
||||
# Get models by product using raw query
|
||||
product_query = text("""
|
||||
SELECT product_name, COUNT(*) as count
|
||||
SELECT inventory_product_id, COUNT(*) as count
|
||||
FROM trained_models
|
||||
WHERE tenant_id = :tenant_id
|
||||
AND is_active = true
|
||||
GROUP BY product_name
|
||||
GROUP BY inventory_product_id
|
||||
ORDER BY count DESC
|
||||
""")
|
||||
|
||||
result = await self.session.execute(product_query, {"tenant_id": tenant_id})
|
||||
product_stats = {row.product_name: row.count for row in result.fetchall()}
|
||||
product_stats = {row.inventory_product_id: row.count for row in result.fetchall()}
|
||||
|
||||
# Recent activity (models created in last 30 days)
|
||||
thirty_days_ago = datetime.utcnow() - timedelta(days=30)
|
||||
@@ -274,7 +274,7 @@ class ModelRepository(TrainingBaseRepository):
|
||||
async def _deactivate_other_production_models(
|
||||
self,
|
||||
tenant_id: str,
|
||||
product_name: str,
|
||||
inventory_product_id: str,
|
||||
exclude_model_id: str
|
||||
) -> int:
|
||||
"""Deactivate other production models for the same tenant+product"""
|
||||
@@ -283,14 +283,14 @@ class ModelRepository(TrainingBaseRepository):
|
||||
UPDATE trained_models
|
||||
SET is_production = false
|
||||
WHERE tenant_id = :tenant_id
|
||||
AND product_name = :product_name
|
||||
AND inventory_product_id = :inventory_product_id
|
||||
AND id != :exclude_model_id
|
||||
AND is_production = true
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {
|
||||
"tenant_id": tenant_id,
|
||||
"product_name": product_name,
|
||||
"inventory_product_id": inventory_product_id,
|
||||
"exclude_model_id": exclude_model_id
|
||||
})
|
||||
|
||||
@@ -299,7 +299,7 @@ class ModelRepository(TrainingBaseRepository):
|
||||
except Exception as e:
|
||||
logger.error("Failed to deactivate other production models",
|
||||
tenant_id=tenant_id,
|
||||
product_name=product_name,
|
||||
inventory_product_id=inventory_product_id,
|
||||
error=str(e))
|
||||
raise DatabaseError(f"Failed to deactivate models: {str(e)}")
|
||||
|
||||
@@ -313,7 +313,7 @@ class ModelRepository(TrainingBaseRepository):
|
||||
return {
|
||||
"model_id": model.id,
|
||||
"tenant_id": model.tenant_id,
|
||||
"product_name": model.product_name,
|
||||
"inventory_product_id": str(model.inventory_product_id),
|
||||
"model_type": model.model_type,
|
||||
"metrics": {
|
||||
"mape": model.mape,
|
||||
|
||||
@@ -29,7 +29,7 @@ class PerformanceRepository(TrainingBaseRepository):
|
||||
# Validate metric data
|
||||
validation_result = self._validate_training_data(
|
||||
metric_data,
|
||||
["model_id", "tenant_id", "product_name"]
|
||||
["model_id", "tenant_id", "inventory_product_id"]
|
||||
)
|
||||
|
||||
if not validation_result["is_valid"]:
|
||||
@@ -45,7 +45,7 @@ class PerformanceRepository(TrainingBaseRepository):
|
||||
logger.info("Performance metric created",
|
||||
model_id=metric.model_id,
|
||||
tenant_id=metric.tenant_id,
|
||||
product_name=metric.product_name)
|
||||
inventory_product_id=str(metric.inventory_product_id))
|
||||
|
||||
return metric
|
||||
|
||||
@@ -97,7 +97,7 @@ class PerformanceRepository(TrainingBaseRepository):
|
||||
async def get_metrics_by_tenant_and_product(
|
||||
self,
|
||||
tenant_id: str,
|
||||
product_name: str,
|
||||
inventory_product_id: str,
|
||||
skip: int = 0,
|
||||
limit: int = 100
|
||||
) -> List[ModelPerformanceMetric]:
|
||||
@@ -106,7 +106,7 @@ class PerformanceRepository(TrainingBaseRepository):
|
||||
return await self.get_multi(
|
||||
filters={
|
||||
"tenant_id": tenant_id,
|
||||
"product_name": product_name
|
||||
"inventory_product_id": inventory_product_id
|
||||
},
|
||||
skip=skip,
|
||||
limit=limit,
|
||||
@@ -116,7 +116,7 @@ class PerformanceRepository(TrainingBaseRepository):
|
||||
except Exception as e:
|
||||
logger.error("Failed to get metrics by tenant and product",
|
||||
tenant_id=tenant_id,
|
||||
product_name=product_name,
|
||||
inventory_product_id=inventory_product_id,
|
||||
error=str(e))
|
||||
raise DatabaseError(f"Failed to get metrics: {str(e)}")
|
||||
|
||||
@@ -172,7 +172,7 @@ class PerformanceRepository(TrainingBaseRepository):
|
||||
async def get_performance_trends(
|
||||
self,
|
||||
tenant_id: str,
|
||||
product_name: str = None,
|
||||
inventory_product_id: str = None,
|
||||
days: int = 30
|
||||
) -> Dict[str, Any]:
|
||||
"""Get performance trends for analysis"""
|
||||
@@ -184,13 +184,13 @@ class PerformanceRepository(TrainingBaseRepository):
|
||||
conditions = ["tenant_id = :tenant_id", "measured_at >= :start_date"]
|
||||
params = {"tenant_id": tenant_id, "start_date": start_date}
|
||||
|
||||
if product_name:
|
||||
conditions.append("product_name = :product_name")
|
||||
params["product_name"] = product_name
|
||||
if inventory_product_id:
|
||||
conditions.append("inventory_product_id = :inventory_product_id")
|
||||
params["inventory_product_id"] = inventory_product_id
|
||||
|
||||
query_text = f"""
|
||||
SELECT
|
||||
product_name,
|
||||
inventory_product_id,
|
||||
AVG(mae) as avg_mae,
|
||||
AVG(mse) as avg_mse,
|
||||
AVG(rmse) as avg_rmse,
|
||||
@@ -202,7 +202,7 @@ class PerformanceRepository(TrainingBaseRepository):
|
||||
MAX(measured_at) as last_measurement
|
||||
FROM model_performance_metrics
|
||||
WHERE {' AND '.join(conditions)}
|
||||
GROUP BY product_name
|
||||
GROUP BY inventory_product_id
|
||||
ORDER BY avg_accuracy DESC
|
||||
"""
|
||||
|
||||
@@ -211,7 +211,7 @@ class PerformanceRepository(TrainingBaseRepository):
|
||||
trends = []
|
||||
for row in result.fetchall():
|
||||
trends.append({
|
||||
"product_name": row.product_name,
|
||||
"inventory_product_id": row.inventory_product_id,
|
||||
"metrics": {
|
||||
"avg_mae": float(row.avg_mae) if row.avg_mae else None,
|
||||
"avg_mse": float(row.avg_mse) if row.avg_mse else None,
|
||||
@@ -230,7 +230,7 @@ class PerformanceRepository(TrainingBaseRepository):
|
||||
|
||||
return {
|
||||
"tenant_id": tenant_id,
|
||||
"product_name": product_name,
|
||||
"inventory_product_id": inventory_product_id,
|
||||
"trends": trends,
|
||||
"period_days": days,
|
||||
"total_products": len(trends)
|
||||
@@ -239,11 +239,11 @@ class PerformanceRepository(TrainingBaseRepository):
|
||||
except Exception as e:
|
||||
logger.error("Failed to get performance trends",
|
||||
tenant_id=tenant_id,
|
||||
product_name=product_name,
|
||||
inventory_product_id=inventory_product_id,
|
||||
error=str(e))
|
||||
return {
|
||||
"tenant_id": tenant_id,
|
||||
"product_name": product_name,
|
||||
"inventory_product_id": inventory_product_id,
|
||||
"trends": [],
|
||||
"period_days": days,
|
||||
"total_products": 0
|
||||
@@ -268,16 +268,16 @@ class PerformanceRepository(TrainingBaseRepository):
|
||||
order_direction = "DESC" if order_desc else "ASC"
|
||||
|
||||
query_text = f"""
|
||||
SELECT DISTINCT ON (product_name, model_id)
|
||||
SELECT DISTINCT ON (inventory_product_id, model_id)
|
||||
model_id,
|
||||
product_name,
|
||||
inventory_product_id,
|
||||
{metric_type},
|
||||
measured_at,
|
||||
evaluation_samples
|
||||
FROM model_performance_metrics
|
||||
WHERE tenant_id = :tenant_id
|
||||
AND {metric_type} IS NOT NULL
|
||||
ORDER BY product_name, model_id, measured_at DESC, {metric_type} {order_direction}
|
||||
ORDER BY inventory_product_id, model_id, measured_at DESC, {metric_type} {order_direction}
|
||||
LIMIT :limit
|
||||
"""
|
||||
|
||||
@@ -290,7 +290,7 @@ class PerformanceRepository(TrainingBaseRepository):
|
||||
for row in result.fetchall():
|
||||
best_models.append({
|
||||
"model_id": row.model_id,
|
||||
"product_name": row.product_name,
|
||||
"inventory_product_id": row.inventory_product_id,
|
||||
"metric_value": float(getattr(row, metric_type)),
|
||||
"metric_type": metric_type,
|
||||
"measured_at": row.measured_at.isoformat() if row.measured_at else None,
|
||||
@@ -319,12 +319,12 @@ class PerformanceRepository(TrainingBaseRepository):
|
||||
# Get metrics by product using raw query
|
||||
product_query = text("""
|
||||
SELECT
|
||||
product_name,
|
||||
inventory_product_id,
|
||||
COUNT(*) as metric_count,
|
||||
AVG(accuracy_percentage) as avg_accuracy
|
||||
FROM model_performance_metrics
|
||||
WHERE tenant_id = :tenant_id
|
||||
GROUP BY product_name
|
||||
GROUP BY inventory_product_id
|
||||
ORDER BY avg_accuracy DESC
|
||||
""")
|
||||
|
||||
@@ -332,7 +332,7 @@ class PerformanceRepository(TrainingBaseRepository):
|
||||
product_stats = {}
|
||||
|
||||
for row in result.fetchall():
|
||||
product_stats[row.product_name] = {
|
||||
product_stats[row.inventory_product_id] = {
|
||||
"metric_count": row.metric_count,
|
||||
"avg_accuracy": float(row.avg_accuracy) if row.avg_accuracy else None
|
||||
}
|
||||
@@ -383,7 +383,7 @@ class PerformanceRepository(TrainingBaseRepository):
|
||||
query_text = f"""
|
||||
SELECT
|
||||
model_id,
|
||||
product_name,
|
||||
inventory_product_id,
|
||||
AVG({metric_type}) as avg_metric,
|
||||
MIN({metric_type}) as min_metric,
|
||||
MAX({metric_type}) as max_metric,
|
||||
@@ -392,7 +392,7 @@ class PerformanceRepository(TrainingBaseRepository):
|
||||
FROM model_performance_metrics
|
||||
WHERE model_id IN ('{model_ids_str}')
|
||||
AND {metric_type} IS NOT NULL
|
||||
GROUP BY model_id, product_name
|
||||
GROUP BY model_id, inventory_product_id
|
||||
ORDER BY avg_metric DESC
|
||||
"""
|
||||
|
||||
@@ -402,7 +402,7 @@ class PerformanceRepository(TrainingBaseRepository):
|
||||
for row in result.fetchall():
|
||||
comparisons.append({
|
||||
"model_id": row.model_id,
|
||||
"product_name": row.product_name,
|
||||
"inventory_product_id": row.inventory_product_id,
|
||||
"avg_metric": float(row.avg_metric),
|
||||
"min_metric": float(row.min_metric),
|
||||
"max_metric": float(row.max_metric),
|
||||
|
||||
@@ -54,7 +54,7 @@ class DataSummary(BaseModel):
|
||||
|
||||
class ProductTrainingResult(BaseModel):
|
||||
"""Schema for individual product training results"""
|
||||
product_name: str = Field(..., description="Product name")
|
||||
inventory_product_id: UUID = Field(..., description="Inventory product UUID")
|
||||
status: str = Field(..., description="Training status for this product")
|
||||
model_id: Optional[str] = Field(None, description="Trained model identifier")
|
||||
data_points: int = Field(..., description="Number of data points used for training")
|
||||
@@ -188,7 +188,7 @@ class ModelInfo(BaseModel):
|
||||
|
||||
class ProductTrainingResult(BaseModel):
|
||||
"""Schema for individual product training result"""
|
||||
product_name: str = Field(..., description="Product name")
|
||||
inventory_product_id: UUID = Field(..., description="Inventory product UUID")
|
||||
status: str = Field(..., description="Training status for this product")
|
||||
model_info: Optional[ModelInfo] = Field(None, description="Model information if successful")
|
||||
data_points: int = Field(..., description="Number of data points used")
|
||||
@@ -281,7 +281,7 @@ class TrainedModelResponse(BaseModel):
|
||||
"""Response schema for trained model information"""
|
||||
model_id: str = Field(..., description="Unique model identifier")
|
||||
tenant_id: str = Field(..., description="Tenant identifier")
|
||||
product_name: str = Field(..., description="Product name")
|
||||
inventory_product_id: UUID = Field(..., description="Inventory product UUID")
|
||||
model_type: str = Field(..., description="Type of ML model")
|
||||
model_path: str = Field(..., description="Path to stored model")
|
||||
version: int = Field(..., description="Model version")
|
||||
|
||||
@@ -262,7 +262,7 @@ async def publish_job_cancelled(job_id: str, tenant_id: str, reason: str = "User
|
||||
# PRODUCT-LEVEL TRAINING EVENTS
|
||||
# =========================================
|
||||
|
||||
async def publish_product_training_started(job_id: str, tenant_id: str, product_name: str) -> bool:
|
||||
async def publish_product_training_started(job_id: str, tenant_id: str, inventory_product_id: str) -> bool:
|
||||
"""Publish single product training started event"""
|
||||
return await training_publisher.publish_event(
|
||||
exchange_name="training.events",
|
||||
@@ -274,7 +274,7 @@ async def publish_product_training_started(job_id: str, tenant_id: str, product_
|
||||
"data": {
|
||||
"job_id": job_id,
|
||||
"tenant_id": tenant_id,
|
||||
"product_name": product_name,
|
||||
"inventory_product_id": inventory_product_id,
|
||||
"started_at": datetime.now().isoformat()
|
||||
}
|
||||
}
|
||||
@@ -283,7 +283,7 @@ async def publish_product_training_started(job_id: str, tenant_id: str, product_
|
||||
async def publish_product_training_completed(
|
||||
job_id: str,
|
||||
tenant_id: str,
|
||||
product_name: str,
|
||||
inventory_product_id: str,
|
||||
model_id: str,
|
||||
metrics: Optional[Dict[str, float]] = None
|
||||
) -> bool:
|
||||
@@ -298,7 +298,7 @@ async def publish_product_training_completed(
|
||||
"data": {
|
||||
"job_id": job_id,
|
||||
"tenant_id": tenant_id,
|
||||
"product_name": product_name,
|
||||
"inventory_product_id": inventory_product_id,
|
||||
"model_id": model_id,
|
||||
"metrics": metrics or {},
|
||||
"completed_at": datetime.now().isoformat()
|
||||
@@ -309,7 +309,7 @@ async def publish_product_training_completed(
|
||||
async def publish_product_training_failed(
|
||||
job_id: str,
|
||||
tenant_id: str,
|
||||
product_name: str,
|
||||
inventory_product_id: str,
|
||||
error: str
|
||||
) -> bool:
|
||||
"""Publish single product training failed event"""
|
||||
@@ -323,7 +323,7 @@ async def publish_product_training_failed(
|
||||
"data": {
|
||||
"job_id": job_id,
|
||||
"tenant_id": tenant_id,
|
||||
"product_name": product_name,
|
||||
"inventory_product_id": inventory_product_id,
|
||||
"error": error,
|
||||
"failed_at": datetime.now().isoformat()
|
||||
}
|
||||
@@ -334,7 +334,7 @@ async def publish_product_training_failed(
|
||||
# MODEL LIFECYCLE EVENTS
|
||||
# =========================================
|
||||
|
||||
async def publish_model_trained(model_id: str, tenant_id: str, product_name: str, metrics: Dict[str, float]) -> bool:
|
||||
async def publish_model_trained(model_id: str, tenant_id: str, inventory_product_id: str, metrics: Dict[str, float]) -> bool:
|
||||
"""Publish model trained event with safe metric serialization"""
|
||||
|
||||
# Clean metrics to ensure JSON serialization
|
||||
@@ -347,7 +347,7 @@ async def publish_model_trained(model_id: str, tenant_id: str, product_name: str
|
||||
"data": {
|
||||
"model_id": model_id,
|
||||
"tenant_id": tenant_id,
|
||||
"product_name": product_name,
|
||||
"inventory_product_id": inventory_product_id,
|
||||
"training_metrics": clean_metrics, # Now safe for JSON
|
||||
"trained_at": datetime.now().isoformat()
|
||||
}
|
||||
@@ -360,7 +360,7 @@ async def publish_model_trained(model_id: str, tenant_id: str, product_name: str
|
||||
)
|
||||
|
||||
|
||||
async def publish_model_validated(model_id: str, tenant_id: str, product_name: str, validation_results: Dict[str, Any]) -> bool:
|
||||
async def publish_model_validated(model_id: str, tenant_id: str, inventory_product_id: str, validation_results: Dict[str, Any]) -> bool:
|
||||
"""Publish model validation event"""
|
||||
return await training_publisher.publish_event(
|
||||
exchange_name="training.events",
|
||||
@@ -372,14 +372,14 @@ async def publish_model_validated(model_id: str, tenant_id: str, product_name: s
|
||||
"data": {
|
||||
"model_id": model_id,
|
||||
"tenant_id": tenant_id,
|
||||
"product_name": product_name,
|
||||
"inventory_product_id": inventory_product_id,
|
||||
"validation_results": validation_results,
|
||||
"validated_at": datetime.now().isoformat()
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
async def publish_model_saved(model_id: str, tenant_id: str, product_name: str, model_path: str) -> bool:
|
||||
async def publish_model_saved(model_id: str, tenant_id: str, inventory_product_id: str, model_path: str) -> bool:
|
||||
"""Publish model saved event"""
|
||||
return await training_publisher.publish_event(
|
||||
exchange_name="training.events",
|
||||
@@ -391,7 +391,7 @@ async def publish_model_saved(model_id: str, tenant_id: str, product_name: str,
|
||||
"data": {
|
||||
"model_id": model_id,
|
||||
"tenant_id": tenant_id,
|
||||
"product_name": product_name,
|
||||
"inventory_product_id": inventory_product_id,
|
||||
"model_path": model_path,
|
||||
"saved_at": datetime.now().isoformat()
|
||||
}
|
||||
@@ -571,7 +571,7 @@ class TrainingStatusPublisher:
|
||||
|
||||
return 0
|
||||
|
||||
async def product_completed(self, product_name: str, model_id: str, metrics: Optional[Dict] = None):
|
||||
async def product_completed(self, inventory_product_id: str, model_id: str, metrics: Optional[Dict] = None):
|
||||
"""Mark a product as completed and update progress"""
|
||||
self.products_completed += 1
|
||||
|
||||
@@ -579,7 +579,7 @@ class TrainingStatusPublisher:
|
||||
clean_metrics = safe_json_serialize(metrics) if metrics else None
|
||||
|
||||
await publish_product_training_completed(
|
||||
self.job_id, self.tenant_id, product_name, model_id, clean_metrics
|
||||
self.job_id, self.tenant_id, inventory_product_id, model_id, clean_metrics
|
||||
)
|
||||
|
||||
# Update overall progress
|
||||
@@ -587,7 +587,7 @@ class TrainingStatusPublisher:
|
||||
progress = int((self.products_completed / self.products_total) * 90) # Save 10% for final steps
|
||||
await self.progress_update(
|
||||
progress=progress,
|
||||
step=f"Completed training for {product_name}",
|
||||
step=f"Completed training for {inventory_product_id}",
|
||||
current_product=None
|
||||
)
|
||||
|
||||
|
||||
@@ -234,7 +234,7 @@ class TrainingDataOrchestrator:
|
||||
|
||||
def _validate_sales_record(self, record: Dict[str, Any]) -> bool:
|
||||
"""Validate individual sales record"""
|
||||
required_fields = ['date', 'product_name']
|
||||
required_fields = ['date', 'inventory_product_id']
|
||||
quantity_fields = ['quantity', 'quantity_sold', 'sales', 'units_sold']
|
||||
|
||||
# Check required fields
|
||||
@@ -755,8 +755,8 @@ class TrainingDataOrchestrator:
|
||||
# Check data consistency
|
||||
unique_products = set()
|
||||
for record in dataset.sales_data:
|
||||
if 'product_name' in record:
|
||||
unique_products.add(record['product_name'])
|
||||
if 'inventory_product_id' in record:
|
||||
unique_products.add(record['inventory_product_id'])
|
||||
|
||||
if len(unique_products) == 0:
|
||||
validation_results["errors"].append("No product names found in sales data")
|
||||
@@ -822,7 +822,7 @@ class TrainingDataOrchestrator:
|
||||
"required": True,
|
||||
"priority": "high",
|
||||
"expected_records": "variable",
|
||||
"data_points": ["date", "product_name", "quantity"],
|
||||
"data_points": ["date", "inventory_product_id", "quantity"],
|
||||
"validation": "required_fields_check"
|
||||
}
|
||||
|
||||
|
||||
@@ -223,7 +223,7 @@ class EnhancedTrainingService:
|
||||
"training_results": training_results,
|
||||
"stored_models": [{
|
||||
"id": str(model.id),
|
||||
"product_name": model.product_name,
|
||||
"inventory_product_id": str(model.inventory_product_id),
|
||||
"model_type": model.model_type,
|
||||
"model_path": model.model_path,
|
||||
"is_active": model.is_active,
|
||||
@@ -292,11 +292,11 @@ class EnhancedTrainingService:
|
||||
models_trained_type=type(models_trained).__name__,
|
||||
models_trained_keys=list(models_trained.keys()) if isinstance(models_trained, dict) else "not_dict")
|
||||
|
||||
for product_name, model_result in models_trained.items():
|
||||
for inventory_product_id, model_result in models_trained.items():
|
||||
# Defensive check: ensure model_result is a dictionary
|
||||
if not isinstance(model_result, dict):
|
||||
logger.warning("Skipping invalid model_result for product",
|
||||
product_name=product_name,
|
||||
inventory_product_id=inventory_product_id,
|
||||
model_result_type=type(model_result).__name__,
|
||||
model_result_value=str(model_result)[:100])
|
||||
continue
|
||||
@@ -306,12 +306,12 @@ class EnhancedTrainingService:
|
||||
metrics = model_result.get("metrics", {})
|
||||
if not isinstance(metrics, dict):
|
||||
logger.warning("Invalid metrics object, using empty dict",
|
||||
product_name=product_name,
|
||||
inventory_product_id=inventory_product_id,
|
||||
metrics_type=type(metrics).__name__)
|
||||
metrics = {}
|
||||
model_data = {
|
||||
"tenant_id": tenant_id,
|
||||
"product_name": product_name,
|
||||
"inventory_product_id": inventory_product_id,
|
||||
"job_id": job_id,
|
||||
"model_type": "prophet_optimized",
|
||||
"model_path": model_result.get("model_path"),
|
||||
@@ -371,14 +371,14 @@ class EnhancedTrainingService:
|
||||
"""Create performance metrics for stored models"""
|
||||
try:
|
||||
for model in stored_models:
|
||||
model_result = training_results.get("models_trained", {}).get(model.product_name)
|
||||
model_result = training_results.get("models_trained", {}).get(str(model.inventory_product_id))
|
||||
if model_result and model_result.get("metrics"):
|
||||
metrics = model_result["metrics"]
|
||||
|
||||
metric_data = {
|
||||
"model_id": str(model.id),
|
||||
"tenant_id": tenant_id,
|
||||
"product_name": model.product_name,
|
||||
"inventory_product_id": str(model.inventory_product_id),
|
||||
"mae": metrics.get("mae"),
|
||||
"mse": metrics.get("mse"),
|
||||
"rmse": metrics.get("rmse"),
|
||||
@@ -556,14 +556,14 @@ class EnhancedTrainingService:
|
||||
|
||||
async def start_single_product_training(self,
|
||||
tenant_id: str,
|
||||
product_name: str,
|
||||
inventory_product_id: str,
|
||||
job_id: str,
|
||||
bakery_location: tuple = (40.4168, -3.7038)) -> Dict[str, Any]:
|
||||
"""Start enhanced single product training using repository pattern"""
|
||||
try:
|
||||
logger.info("Starting enhanced single product training",
|
||||
tenant_id=tenant_id,
|
||||
product_name=product_name,
|
||||
inventory_product_id=inventory_product_id,
|
||||
job_id=job_id)
|
||||
|
||||
# This would use the data client to fetch data for the specific product
|
||||
@@ -573,7 +573,7 @@ class EnhancedTrainingService:
|
||||
return {
|
||||
"job_id": job_id,
|
||||
"tenant_id": tenant_id,
|
||||
"product_name": product_name,
|
||||
"inventory_product_id": inventory_product_id,
|
||||
"status": "completed",
|
||||
"message": "Enhanced single product training completed successfully",
|
||||
"created_at": datetime.now(),
|
||||
@@ -582,9 +582,9 @@ class EnhancedTrainingService:
|
||||
"successful_trainings": 1,
|
||||
"failed_trainings": 0,
|
||||
"products": [{
|
||||
"product_name": product_name,
|
||||
"inventory_product_id": inventory_product_id,
|
||||
"status": "completed",
|
||||
"model_id": f"model_{product_name}_{job_id[:8]}",
|
||||
"model_id": f"model_{inventory_product_id}_{job_id[:8]}",
|
||||
"data_points": 100,
|
||||
"metrics": {"mape": 15.5, "mae": 2.3, "rmse": 3.1, "r2_score": 0.85}
|
||||
}],
|
||||
@@ -597,7 +597,7 @@ class EnhancedTrainingService:
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Enhanced single product training failed",
|
||||
product_name=product_name,
|
||||
inventory_product_id=inventory_product_id,
|
||||
error=str(e))
|
||||
raise
|
||||
|
||||
@@ -611,7 +611,7 @@ class EnhancedTrainingService:
|
||||
products = []
|
||||
for model in stored_models:
|
||||
products.append({
|
||||
"product_name": model.get("product_name"),
|
||||
"inventory_product_id": model.get("inventory_product_id"),
|
||||
"status": "completed",
|
||||
"model_id": model.get("id"),
|
||||
"data_points": model.get("training_samples", 0),
|
||||
|
||||
Reference in New Issue
Block a user