Add AI insights feature
This commit is contained in:
@@ -15,6 +15,7 @@
|
||||
*/
|
||||
|
||||
import { useQuery, useMutation, useQueryClient, UseQueryOptions, UseMutationOptions } from '@tanstack/react-query';
|
||||
import { useState } from 'react';
|
||||
import {
|
||||
aiInsightsService,
|
||||
AIInsight,
|
||||
@@ -213,13 +214,13 @@ export function useApplyInsight(
|
||||
* Mutation hook to dismiss an insight
|
||||
*/
|
||||
export function useDismissInsight(
|
||||
options?: UseMutationOptions<AIInsight, Error, { tenantId: string; insightId: string; reason?: string }>
|
||||
options?: UseMutationOptions<void, Error, { tenantId: string; insightId: string }>
|
||||
) {
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
return useMutation({
|
||||
mutationFn: ({ tenantId, insightId, reason }) =>
|
||||
aiInsightsService.dismissInsight(tenantId, insightId, reason),
|
||||
mutationFn: ({ tenantId, insightId }) =>
|
||||
aiInsightsService.dismissInsight(tenantId, insightId),
|
||||
onSuccess: (_, variables) => {
|
||||
queryClient.invalidateQueries({ queryKey: aiInsightsKeys.lists() });
|
||||
queryClient.invalidateQueries({ queryKey: aiInsightsKeys.detail(variables.tenantId, variables.insightId) });
|
||||
@@ -231,16 +232,16 @@ export function useDismissInsight(
|
||||
}
|
||||
|
||||
/**
|
||||
* Mutation hook to resolve an insight
|
||||
* Mutation hook to update insight status
|
||||
*/
|
||||
export function useResolveInsight(
|
||||
options?: UseMutationOptions<AIInsight, Error, { tenantId: string; insightId: string; resolution?: string }>
|
||||
export function useUpdateInsightStatus(
|
||||
options?: UseMutationOptions<AIInsight, Error, { tenantId: string; insightId: string; status: 'acknowledged' | 'in_progress' | 'applied' | 'expired' }>
|
||||
) {
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
return useMutation({
|
||||
mutationFn: ({ tenantId, insightId, resolution }) =>
|
||||
aiInsightsService.resolveInsight(tenantId, insightId, resolution),
|
||||
mutationFn: ({ tenantId, insightId, status }) =>
|
||||
aiInsightsService.updateInsightStatus(tenantId, insightId, status),
|
||||
onSuccess: (_, variables) => {
|
||||
queryClient.invalidateQueries({ queryKey: aiInsightsKeys.lists() });
|
||||
queryClient.invalidateQueries({ queryKey: aiInsightsKeys.detail(variables.tenantId, variables.insightId) });
|
||||
@@ -300,6 +301,3 @@ export function useInsightSelection() {
|
||||
isSelected: (insightId: string) => selectedInsights.includes(insightId),
|
||||
};
|
||||
}
|
||||
|
||||
// Import useState for utility hook
|
||||
import { useState } from 'react';
|
||||
|
||||
@@ -13,10 +13,41 @@ import { productionService } from '../services/production';
|
||||
import { ProcurementService } from '../services/procurement-service';
|
||||
import * as orchestratorService from '../services/orchestrator';
|
||||
import { suppliersService } from '../services/suppliers';
|
||||
import { aiInsightsService } from '../services/aiInsights';
|
||||
import { useBatchNotifications, useDeliveryNotifications, useOrchestrationNotifications } from '../../hooks/useEventNotifications';
|
||||
import { useSSEEvents } from '../../hooks/useSSE';
|
||||
import { parseISO } from 'date-fns';
|
||||
|
||||
// ============================================================
|
||||
// Helper Functions
|
||||
// ============================================================
|
||||
|
||||
/**
|
||||
* Map AI insight category to dashboard block type
|
||||
*/
|
||||
function mapInsightTypeToBlockType(category: string): string {
|
||||
const mapping: Record<string, string> = {
|
||||
'inventory': 'safety_stock',
|
||||
'forecasting': 'demand_forecast',
|
||||
'demand': 'demand_forecast',
|
||||
'procurement': 'cost_optimization',
|
||||
'cost': 'cost_optimization',
|
||||
'production': 'waste_reduction',
|
||||
'quality': 'risk_alert',
|
||||
'efficiency': 'waste_reduction',
|
||||
};
|
||||
return mapping[category] || 'demand_forecast';
|
||||
}
|
||||
|
||||
/**
|
||||
* Map AI insight priority to dashboard impact level
|
||||
*/
|
||||
function mapPriorityToImpact(priority: string): 'high' | 'medium' | 'low' {
|
||||
if (priority === 'critical' || priority === 'high') return 'high';
|
||||
if (priority === 'medium') return 'medium';
|
||||
return 'low';
|
||||
}
|
||||
|
||||
// ============================================================
|
||||
// Types
|
||||
// ============================================================
|
||||
@@ -75,20 +106,26 @@ export function useDashboardData(tenantId: string) {
|
||||
const now = new Date(); // Keep for local time display
|
||||
const nowUTC = new Date(); // UTC time for accurate comparison with API dates
|
||||
|
||||
// Parallel fetch ALL data needed by all 4 blocks (including suppliers for PO enrichment)
|
||||
const [alertsResponse, pendingPOs, productionResponse, deliveriesResponse, orchestration, suppliers] = await Promise.all([
|
||||
// Parallel fetch ALL data needed by all 4 blocks (including suppliers for PO enrichment and AI insights)
|
||||
const [alertsResponse, pendingPOs, productionResponse, deliveriesResponse, orchestration, suppliers, aiInsightsResponse] = await Promise.all([
|
||||
alertService.getEvents(tenantId, { status: 'active', limit: 100 }).catch(() => []),
|
||||
getPendingApprovalPurchaseOrders(tenantId, 100).catch(() => []),
|
||||
productionService.getBatches(tenantId, { start_date: today, page_size: 100 }).catch(() => ({ batches: [] })),
|
||||
ProcurementService.getExpectedDeliveries(tenantId, { days_ahead: 1, include_overdue: true }).catch(() => ({ deliveries: [] })),
|
||||
orchestratorService.getLastOrchestrationRun(tenantId).catch(() => null),
|
||||
suppliersService.getSuppliers(tenantId).catch(() => []),
|
||||
aiInsightsService.getInsights(tenantId, {
|
||||
status: 'new',
|
||||
priority: 'high',
|
||||
limit: 5
|
||||
}).catch(() => ({ items: [], total: 0, limit: 5, offset: 0, has_more: false })),
|
||||
]);
|
||||
|
||||
// Normalize alerts (API returns array directly or {items: []})
|
||||
const alerts = Array.isArray(alertsResponse) ? alertsResponse : (alertsResponse?.items || []);
|
||||
const productionBatches = productionResponse?.batches || [];
|
||||
const deliveries = deliveriesResponse?.deliveries || [];
|
||||
const aiInsights = aiInsightsResponse?.items || [];
|
||||
|
||||
// Create supplier ID -> supplier name map for quick lookup
|
||||
const supplierMap = new Map<string, string>();
|
||||
@@ -246,6 +283,19 @@ export function useDashboardData(tenantId: string) {
|
||||
};
|
||||
}
|
||||
|
||||
// Map AI insights to dashboard format
|
||||
const mappedAiInsights = aiInsights.map((insight: any) => ({
|
||||
id: insight.id,
|
||||
title: insight.title,
|
||||
description: insight.description,
|
||||
type: mapInsightTypeToBlockType(insight.category),
|
||||
impact: mapPriorityToImpact(insight.priority),
|
||||
impact_value: insight.impact_value?.toString(),
|
||||
impact_currency: insight.impact_unit === 'euros' ? '€' : '',
|
||||
created_at: insight.created_at,
|
||||
recommendation_actions: insight.recommendation_actions || [],
|
||||
}));
|
||||
|
||||
return {
|
||||
// Raw data
|
||||
alerts: deduplicatedAlerts,
|
||||
@@ -253,7 +303,7 @@ export function useDashboardData(tenantId: string) {
|
||||
productionBatches,
|
||||
deliveries,
|
||||
orchestrationSummary,
|
||||
aiInsights: [], // AI-generated insights for professional/enterprise tiers
|
||||
aiInsights: mappedAiInsights,
|
||||
|
||||
// Computed
|
||||
preventedIssues,
|
||||
@@ -295,6 +345,7 @@ export function useDashboardRealtimeSync(tenantId: string) {
|
||||
const { notifications: deliveryNotifications } = useDeliveryNotifications();
|
||||
const { recentNotifications: orchestrationNotifications } = useOrchestrationNotifications();
|
||||
const { events: alertEvents } = useSSEEvents({ channels: ['*.alerts'] });
|
||||
const { events: aiInsightEvents } = useSSEEvents({ channels: ['*.ai_insights'] });
|
||||
|
||||
// Invalidate dashboard data on batch events
|
||||
useEffect(() => {
|
||||
@@ -345,4 +396,15 @@ export function useDashboardRealtimeSync(tenantId: string) {
|
||||
refetchType: 'active',
|
||||
});
|
||||
}, [alertEvents, tenantId, queryClient]);
|
||||
|
||||
// Invalidate dashboard data on AI insight events
|
||||
useEffect(() => {
|
||||
if (!aiInsightEvents || aiInsightEvents.length === 0 || !tenantId) return;
|
||||
|
||||
// Any new AI insight should trigger a refresh
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: ['dashboard-data', tenantId],
|
||||
refetchType: 'active',
|
||||
});
|
||||
}, [aiInsightEvents, tenantId, queryClient]);
|
||||
}
|
||||
|
||||
@@ -20,17 +20,12 @@ import { apiClient } from '../client';
|
||||
export interface AIInsight {
|
||||
id: string;
|
||||
tenant_id: string;
|
||||
type: 'forecast' | 'warning' | 'opportunity' | 'positive' | 'optimization' | 'rule';
|
||||
priority: 'urgent' | 'high' | 'medium' | 'low';
|
||||
category: 'demand' | 'procurement' | 'inventory' | 'production' | 'sales' | 'system' | 'business';
|
||||
type: 'optimization' | 'alert' | 'prediction' | 'recommendation' | 'insight' | 'anomaly';
|
||||
priority: 'low' | 'medium' | 'high' | 'critical';
|
||||
category: 'forecasting' | 'inventory' | 'production' | 'procurement' | 'customer' | 'cost' | 'quality' | 'efficiency' | 'demand' | 'maintenance' | 'energy' | 'scheduling';
|
||||
title: string;
|
||||
description: string;
|
||||
reasoning_data?: {
|
||||
type: string;
|
||||
parameters: Record<string, any>;
|
||||
[key: string]: any;
|
||||
};
|
||||
impact_type: 'cost_savings' | 'waste_reduction' | 'yield_improvement' | 'revenue' | 'system_health' | 'process_improvement';
|
||||
impact_type?: 'cost_savings' | 'revenue_increase' | 'waste_reduction' | 'efficiency_gain' | 'quality_improvement' | 'risk_mitigation';
|
||||
impact_value?: number;
|
||||
impact_unit?: string;
|
||||
confidence: number;
|
||||
@@ -39,31 +34,27 @@ export interface AIInsight {
|
||||
recommendation_actions?: Array<{
|
||||
label: string;
|
||||
action: string;
|
||||
params: Record<string, any>;
|
||||
endpoint?: string;
|
||||
}>;
|
||||
source_service: string;
|
||||
source_model: string;
|
||||
detected_at: string;
|
||||
resolved_at?: string;
|
||||
resolved_by?: string;
|
||||
status: 'active' | 'applied' | 'dismissed' | 'resolved';
|
||||
feedback_count?: number;
|
||||
avg_feedback_rating?: number;
|
||||
source_service?: string;
|
||||
source_data_id?: string;
|
||||
status: 'new' | 'acknowledged' | 'in_progress' | 'applied' | 'dismissed' | 'expired';
|
||||
created_at: string;
|
||||
updated_at: string;
|
||||
applied_at?: string;
|
||||
expired_at?: string;
|
||||
}
|
||||
|
||||
export interface AIInsightFilters {
|
||||
type?: string;
|
||||
priority?: string;
|
||||
category?: string;
|
||||
source_model?: string;
|
||||
status?: string;
|
||||
min_confidence?: number;
|
||||
type?: 'optimization' | 'alert' | 'prediction' | 'recommendation' | 'insight' | 'anomaly';
|
||||
priority?: 'low' | 'medium' | 'high' | 'critical';
|
||||
status?: 'new' | 'acknowledged' | 'in_progress' | 'applied' | 'dismissed' | 'expired';
|
||||
category?: 'forecasting' | 'inventory' | 'production' | 'procurement' | 'customer' | 'cost' | 'quality' | 'efficiency' | 'demand' | 'maintenance' | 'energy' | 'scheduling';
|
||||
actionable_only?: boolean;
|
||||
start_date?: string;
|
||||
end_date?: string;
|
||||
search?: string;
|
||||
min_confidence?: number;
|
||||
source_service?: string;
|
||||
from_date?: string;
|
||||
to_date?: string;
|
||||
limit?: number;
|
||||
offset?: number;
|
||||
}
|
||||
@@ -78,14 +69,15 @@ export interface AIInsightListResponse {
|
||||
|
||||
export interface AIInsightStatsResponse {
|
||||
total_insights: number;
|
||||
insights_by_type: Record<string, number>;
|
||||
insights_by_priority: Record<string, number>;
|
||||
insights_by_category: Record<string, number>;
|
||||
insights_by_status: Record<string, number>;
|
||||
avg_confidence: number;
|
||||
total_impact_value: number;
|
||||
actionable_insights: number;
|
||||
resolved_insights: number;
|
||||
average_confidence: number;
|
||||
high_priority_count: number;
|
||||
medium_priority_count: number;
|
||||
low_priority_count: number;
|
||||
critical_priority_count: number;
|
||||
by_category: Record<string, number>;
|
||||
by_status: Record<string, number>;
|
||||
total_potential_impact?: number;
|
||||
}
|
||||
|
||||
export interface FeedbackRequest {
|
||||
@@ -139,13 +131,12 @@ export class AIInsightsService {
|
||||
if (filters?.type) queryParams.append('type', filters.type);
|
||||
if (filters?.priority) queryParams.append('priority', filters.priority);
|
||||
if (filters?.category) queryParams.append('category', filters.category);
|
||||
if (filters?.source_model) queryParams.append('source_model', filters.source_model);
|
||||
if (filters?.status) queryParams.append('status', filters.status);
|
||||
if (filters?.min_confidence) queryParams.append('min_confidence', filters.min_confidence.toString());
|
||||
if (filters?.actionable_only) queryParams.append('actionable_only', 'true');
|
||||
if (filters?.start_date) queryParams.append('start_date', filters.start_date);
|
||||
if (filters?.end_date) queryParams.append('end_date', filters.end_date);
|
||||
if (filters?.search) queryParams.append('search', filters.search);
|
||||
if (filters?.source_service) queryParams.append('source_service', filters.source_service);
|
||||
if (filters?.from_date) queryParams.append('from_date', filters.from_date);
|
||||
if (filters?.to_date) queryParams.append('to_date', filters.to_date);
|
||||
if (filters?.limit) queryParams.append('limit', filters.limit.toString());
|
||||
if (filters?.offset) queryParams.append('offset', filters.offset.toString());
|
||||
|
||||
@@ -235,23 +226,22 @@ export class AIInsightsService {
|
||||
*/
|
||||
async dismissInsight(
|
||||
tenantId: string,
|
||||
insightId: string,
|
||||
reason?: string
|
||||
): Promise<AIInsight> {
|
||||
const url = `${this.baseUrl}/${tenantId}/insights/${insightId}/dismiss`;
|
||||
return apiClient.post<AIInsight>(url, { reason });
|
||||
insightId: string
|
||||
): Promise<void> {
|
||||
const url = `${this.baseUrl}/${tenantId}/insights/${insightId}`;
|
||||
return apiClient.delete(url);
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve an insight
|
||||
* Update an insight status (acknowledge, apply, etc.)
|
||||
*/
|
||||
async resolveInsight(
|
||||
async updateInsightStatus(
|
||||
tenantId: string,
|
||||
insightId: string,
|
||||
resolution?: string
|
||||
status: 'acknowledged' | 'in_progress' | 'applied' | 'expired'
|
||||
): Promise<AIInsight> {
|
||||
const url = `${this.baseUrl}/${tenantId}/insights/${insightId}/resolve`;
|
||||
return apiClient.post<AIInsight>(url, { resolution });
|
||||
const url = `${this.baseUrl}/${tenantId}/insights/${insightId}`;
|
||||
return apiClient.patch<AIInsight>(url, { status });
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -261,17 +251,18 @@ export class AIInsightsService {
|
||||
tenantId: string,
|
||||
limit: number = 10
|
||||
): Promise<AIInsight[]> {
|
||||
// Fetch critical priority insights first
|
||||
const response = await this.getInsights(tenantId, {
|
||||
priority: 'urgent',
|
||||
status: 'active',
|
||||
priority: 'critical',
|
||||
status: 'new',
|
||||
limit,
|
||||
});
|
||||
|
||||
if (response.items.length < limit) {
|
||||
// Add high priority if not enough urgent
|
||||
// Add high priority if not enough critical
|
||||
const highPriorityResponse = await this.getInsights(tenantId, {
|
||||
priority: 'high',
|
||||
status: 'active',
|
||||
status: 'new',
|
||||
limit: limit - response.items.length,
|
||||
});
|
||||
return [...response.items, ...highPriorityResponse.items];
|
||||
@@ -289,7 +280,7 @@ export class AIInsightsService {
|
||||
): Promise<AIInsight[]> {
|
||||
const response = await this.getInsights(tenantId, {
|
||||
actionable_only: true,
|
||||
status: 'active',
|
||||
status: 'new',
|
||||
limit,
|
||||
});
|
||||
|
||||
@@ -305,8 +296,8 @@ export class AIInsightsService {
|
||||
limit: number = 20
|
||||
): Promise<AIInsight[]> {
|
||||
const response = await this.getInsights(tenantId, {
|
||||
category,
|
||||
status: 'active',
|
||||
category: category as any, // Category comes from user input
|
||||
status: 'new',
|
||||
limit,
|
||||
});
|
||||
|
||||
@@ -321,13 +312,20 @@ export class AIInsightsService {
|
||||
query: string,
|
||||
filters?: Partial<AIInsightFilters>
|
||||
): Promise<AIInsight[]> {
|
||||
// Note: search parameter not supported by backend API
|
||||
// This is a client-side workaround - fetch all and filter
|
||||
const response = await this.getInsights(tenantId, {
|
||||
...filters,
|
||||
search: query,
|
||||
limit: filters?.limit || 50,
|
||||
});
|
||||
|
||||
return response.items;
|
||||
// Filter by query on client side
|
||||
const lowerQuery = query.toLowerCase();
|
||||
return response.items.filter(
|
||||
(insight) =>
|
||||
insight.title.toLowerCase().includes(lowerQuery) ||
|
||||
insight.description.toLowerCase().includes(lowerQuery)
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -18,6 +18,10 @@ interface AIInsight {
|
||||
impact_value?: string;
|
||||
impact_currency?: string;
|
||||
created_at: string;
|
||||
recommendation_actions?: Array<{
|
||||
label: string;
|
||||
action: string;
|
||||
}>;
|
||||
}
|
||||
|
||||
interface AIInsightsBlockProps {
|
||||
@@ -143,17 +147,44 @@ export function AIInsightsBlock({ insights = [], loading = false, onViewAll }: A
|
||||
{insight.description}
|
||||
</p>
|
||||
|
||||
{/* Recommendations */}
|
||||
{insight.recommendation_actions && insight.recommendation_actions.length > 0 && (
|
||||
<div className="mb-2 p-2 bg-[var(--color-primary-50)] border border-[var(--color-primary-100)] rounded">
|
||||
<div className="flex items-start gap-1.5">
|
||||
<Lightbulb className="w-4 h-4 text-[var(--color-primary-600)] flex-shrink-0 mt-0.5" />
|
||||
<div className="flex-1 min-w-0">
|
||||
<p className="text-xs font-medium text-[var(--color-primary-700)] mb-1">
|
||||
{t('dashboard:ai_insights.recommendations', 'Recomendaciones')}:
|
||||
</p>
|
||||
<ul className="space-y-1">
|
||||
{insight.recommendation_actions.slice(0, 2).map((action, idx) => (
|
||||
<li key={idx} className="text-xs text-[var(--color-primary-700)] flex items-start gap-1">
|
||||
<span className="flex-shrink-0">•</span>
|
||||
<span className="flex-1">{action.label || action.action}</span>
|
||||
</li>
|
||||
))}
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Impact Value */}
|
||||
{insight.impact_value && (
|
||||
<div className="flex items-center gap-2">
|
||||
<div className="flex items-center gap-2 mt-2">
|
||||
{insight.type === 'cost_optimization' && (
|
||||
<span className="text-sm font-semibold text-[var(--color-success-600)]">
|
||||
{insight.impact_currency}{insight.impact_value} {t('dashboard:ai_insights.savings')}
|
||||
💰 {insight.impact_currency}{insight.impact_value} {t('dashboard:ai_insights.savings')}
|
||||
</span>
|
||||
)}
|
||||
{insight.type === 'waste_reduction' && (
|
||||
<span className="text-sm font-semibold text-[var(--color-success-600)]">
|
||||
{insight.impact_value} {t('dashboard:ai_insights.reduction')}
|
||||
♻️ {insight.impact_value} {t('dashboard:ai_insights.reduction')}
|
||||
</span>
|
||||
)}
|
||||
{!['cost_optimization', 'waste_reduction'].includes(insight.type) && (
|
||||
<span className="text-sm font-semibold text-[var(--color-success-600)]">
|
||||
💰 {insight.impact_currency}{insight.impact_value}
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
|
||||
@@ -16,10 +16,11 @@ const AIInsightsPage: React.FC = () => {
|
||||
const { t } = useTranslation('reasoning');
|
||||
|
||||
// Fetch real insights from API
|
||||
// Note: Backend expects status values: 'new', 'acknowledged', 'in_progress', 'applied', 'dismissed', 'expired'
|
||||
// We fetch 'new' and 'acknowledged' insights (not dismissed, applied, or expired)
|
||||
const { data: insightsData, isLoading, refetch } = useAIInsights(
|
||||
tenantId || '',
|
||||
{
|
||||
status: 'active',
|
||||
category: selectedCategory === 'all' ? undefined : selectedCategory,
|
||||
limit: 100,
|
||||
},
|
||||
@@ -44,20 +45,20 @@ const AIInsightsPage: React.FC = () => {
|
||||
|
||||
const categories = [
|
||||
{ value: 'all', label: 'Todas las Categorías', count: stats?.total_insights || 0 },
|
||||
{ value: 'production', label: 'Producción', count: stats?.insights_by_category?.production || 0 },
|
||||
{ value: 'sales', label: 'Ventas', count: stats?.insights_by_category?.sales || 0 },
|
||||
{ value: 'demand', label: 'Pronósticos', count: stats?.insights_by_category?.demand || 0 },
|
||||
{ value: 'inventory', label: 'Inventario', count: stats?.insights_by_category?.inventory || 0 },
|
||||
{ value: 'procurement', label: 'Compras', count: stats?.insights_by_category?.procurement || 0 },
|
||||
{ value: 'production', label: 'Producción', count: stats?.by_category?.production || 0 },
|
||||
{ value: 'sales', label: 'Ventas', count: stats?.by_category?.sales || 0 },
|
||||
{ value: 'demand', label: 'Pronósticos', count: stats?.by_category?.demand || stats?.by_category?.forecasting || 0 },
|
||||
{ value: 'inventory', label: 'Inventario', count: stats?.by_category?.inventory || 0 },
|
||||
{ value: 'procurement', label: 'Compras', count: stats?.by_category?.procurement || 0 },
|
||||
];
|
||||
|
||||
const aiMetrics = {
|
||||
totalInsights: stats?.total_insights || 0,
|
||||
actionableInsights: stats?.actionable_insights || 0,
|
||||
averageConfidence: stats?.avg_confidence ? Math.round(stats.avg_confidence) : 0,
|
||||
highPriorityInsights: stats?.insights_by_priority?.high || stats?.insights_by_priority?.urgent || 0,
|
||||
mediumPriorityInsights: stats?.insights_by_priority?.medium || 0,
|
||||
lowPriorityInsights: stats?.insights_by_priority?.low || 0,
|
||||
averageConfidence: stats?.average_confidence ? Math.round(stats.average_confidence) : 0,
|
||||
highPriorityInsights: (stats?.high_priority_count || 0) + (stats?.critical_priority_count || 0),
|
||||
mediumPriorityInsights: stats?.medium_priority_count || 0,
|
||||
lowPriorityInsights: stats?.low_priority_count || 0,
|
||||
};
|
||||
|
||||
const getTypeIcon = (type: string) => {
|
||||
@@ -258,22 +259,73 @@ const AIInsightsPage: React.FC = () => {
|
||||
|
||||
<h3 className="text-lg font-semibold text-[var(--text-primary)] mb-2">{insight.title}</h3>
|
||||
<p className="text-[var(--text-secondary)] mb-3">{getInsightDescription(insight)}</p>
|
||||
<p className="text-sm font-medium text-[var(--color-success)] mb-4">{insight.impact}</p>
|
||||
|
||||
{/* Metrics */}
|
||||
<div className="grid grid-cols-1 md:grid-cols-3 gap-4 mb-4">
|
||||
{Object.entries(insight.metrics).map(([key, value]) => (
|
||||
<div key={key} className="bg-[var(--bg-secondary)] p-3 rounded-lg">
|
||||
<p className="text-xs text-[var(--text-tertiary)] uppercase tracking-wider">
|
||||
{key.replace(/([A-Z])/g, ' $1').replace(/^./, str => str.toUpperCase())}
|
||||
</p>
|
||||
<p className="text-sm font-semibold text-[var(--text-primary)]">{value}</p>
|
||||
{/* Impact */}
|
||||
{insight.impact_value && insight.impact_type && (
|
||||
<div className="mb-4 p-3 bg-[var(--color-success)]/10 border border-[var(--color-success)]/20 rounded-lg">
|
||||
<p className="text-sm font-medium text-[var(--color-success)]">
|
||||
💰 Impacto: {insight.impact_type.replace(/_/g, ' ')} - {insight.impact_value} {insight.impact_unit || ''}
|
||||
</p>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Recommendation */}
|
||||
{insight.recommendation_actions && insight.recommendation_actions.length > 0 && (
|
||||
<div className="mb-4 p-4 bg-[var(--color-primary)]/5 border border-[var(--color-primary)]/20 rounded-lg">
|
||||
<div className="flex items-start gap-2 mb-2">
|
||||
<Lightbulb className="w-5 h-5 text-[var(--color-primary)] flex-shrink-0 mt-0.5" />
|
||||
<h4 className="text-sm font-semibold text-[var(--text-primary)]">Recomendaciones</h4>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
<ul className="space-y-2 ml-7">
|
||||
{insight.recommendation_actions.map((action, idx) => (
|
||||
<li key={idx} className="text-sm text-[var(--text-secondary)] flex items-start gap-2">
|
||||
<span className="text-[var(--color-primary)] flex-shrink-0">•</span>
|
||||
<span>{action.label || action.action}</span>
|
||||
</li>
|
||||
))}
|
||||
</ul>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Metrics - Only show non-redundant metrics */}
|
||||
{insight.metrics_json && Object.keys(insight.metrics_json).length > 0 && (
|
||||
<div className="grid grid-cols-1 md:grid-cols-3 gap-4 mb-4">
|
||||
{Object.entries(insight.metrics_json)
|
||||
.filter(([key]) => !['pattern', 'recommendation'].includes(key)) // Filter out already displayed data
|
||||
.map(([key, value]) => {
|
||||
// Format the value for display
|
||||
let displayValue: string;
|
||||
if (typeof value === 'object' && value !== null) {
|
||||
// For objects, try to display them nicely
|
||||
if (Object.keys(value).length < 5) {
|
||||
displayValue = Object.entries(value)
|
||||
.map(([k, v]) => `${k}: ${v}`)
|
||||
.join(', ');
|
||||
} else {
|
||||
displayValue = JSON.stringify(value, null, 2);
|
||||
}
|
||||
} else if (typeof value === 'number') {
|
||||
displayValue = value.toLocaleString();
|
||||
} else {
|
||||
displayValue = String(value);
|
||||
}
|
||||
|
||||
return (
|
||||
<div key={key} className="bg-[var(--bg-secondary)] p-3 rounded-lg">
|
||||
<p className="text-xs text-[var(--text-tertiary)] uppercase tracking-wider">
|
||||
{key.replace(/_/g, ' ').replace(/([A-Z])/g, ' $1').replace(/^./, str => str.toUpperCase())}
|
||||
</p>
|
||||
<p className="text-sm font-semibold text-[var(--text-primary)] break-words">
|
||||
{displayValue}
|
||||
</p>
|
||||
</div>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className="flex items-center justify-between">
|
||||
<p className="text-xs text-[var(--text-tertiary)]">{insight.timestamp}</p>
|
||||
<p className="text-xs text-[var(--text-tertiary)]">{insight.created_at ? new Date(insight.created_at).toLocaleString() : ''}</p>
|
||||
{insight.actionable && (
|
||||
<div className="flex gap-2">
|
||||
<Button
|
||||
|
||||
@@ -394,7 +394,7 @@ export const routesConfig: RouteConfig[] = [
|
||||
icon: 'insights',
|
||||
requiresAuth: true,
|
||||
requiredRoles: ROLE_COMBINATIONS.MANAGEMENT_ACCESS,
|
||||
requiredAnalyticsLevel: 'predictive',
|
||||
requiredAnalyticsLevel: 'advanced', // Available for Professional and Enterprise tiers
|
||||
showInNavigation: true,
|
||||
showInBreadcrumbs: true,
|
||||
},
|
||||
|
||||
@@ -62,6 +62,9 @@ class OrchestratorClient:
|
||||
params={
|
||||
"tenant_id": tenant_id,
|
||||
**query_params
|
||||
},
|
||||
headers={
|
||||
"x-internal-service": "alert-intelligence"
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@@ -450,5 +450,107 @@ RECOMMENDATION_TEMPLATES = {
|
||||
"potential_time_saved_minutes": "time_saved",
|
||||
"suggestion": "suggestion"
|
||||
}
|
||||
},
|
||||
|
||||
# ==================== AI INSIGHTS RECOMMENDATIONS ====================
|
||||
|
||||
"ai_yield_prediction": {
|
||||
"title_key": "recommendations.ai_yield_prediction.title",
|
||||
"title_params": {
|
||||
"recipe_name": "recipe_name"
|
||||
},
|
||||
"message_variants": {
|
||||
"generic": "recommendations.ai_yield_prediction.message"
|
||||
},
|
||||
"message_params": {
|
||||
"recipe_name": "recipe_name",
|
||||
"predicted_yield_percent": "predicted_yield",
|
||||
"confidence_percent": "confidence",
|
||||
"recommendation": "recommendation"
|
||||
}
|
||||
},
|
||||
|
||||
"ai_safety_stock_optimization": {
|
||||
"title_key": "recommendations.ai_safety_stock_optimization.title",
|
||||
"title_params": {
|
||||
"ingredient_name": "ingredient_name"
|
||||
},
|
||||
"message_variants": {
|
||||
"generic": "recommendations.ai_safety_stock_optimization.message"
|
||||
},
|
||||
"message_params": {
|
||||
"ingredient_name": "ingredient_name",
|
||||
"suggested_safety_stock_kg": "suggested_safety_stock",
|
||||
"current_safety_stock_kg": "current_safety_stock",
|
||||
"estimated_savings_eur": "estimated_savings",
|
||||
"confidence_percent": "confidence"
|
||||
}
|
||||
},
|
||||
|
||||
"ai_supplier_recommendation": {
|
||||
"title_key": "recommendations.ai_supplier_recommendation.title",
|
||||
"title_params": {
|
||||
"supplier_name": "supplier_name"
|
||||
},
|
||||
"message_variants": {
|
||||
"generic": "recommendations.ai_supplier_recommendation.message"
|
||||
},
|
||||
"message_params": {
|
||||
"supplier_name": "supplier_name",
|
||||
"reliability_score": "reliability_score",
|
||||
"recommendation": "recommendation",
|
||||
"confidence_percent": "confidence"
|
||||
}
|
||||
},
|
||||
|
||||
"ai_price_forecast": {
|
||||
"title_key": "recommendations.ai_price_forecast.title",
|
||||
"title_params": {
|
||||
"ingredient_name": "ingredient_name"
|
||||
},
|
||||
"message_variants": {
|
||||
"generic": "recommendations.ai_price_forecast.message"
|
||||
},
|
||||
"message_params": {
|
||||
"ingredient_name": "ingredient_name",
|
||||
"predicted_price_eur": "predicted_price",
|
||||
"current_price_eur": "current_price",
|
||||
"price_trend": "price_trend",
|
||||
"recommendation": "recommendation",
|
||||
"confidence_percent": "confidence"
|
||||
}
|
||||
},
|
||||
|
||||
"ai_demand_forecast": {
|
||||
"title_key": "recommendations.ai_demand_forecast.title",
|
||||
"title_params": {
|
||||
"product_name": "product_name"
|
||||
},
|
||||
"message_variants": {
|
||||
"generic": "recommendations.ai_demand_forecast.message"
|
||||
},
|
||||
"message_params": {
|
||||
"product_name": "product_name",
|
||||
"predicted_demand": "predicted_demand",
|
||||
"forecast_period": "forecast_period",
|
||||
"confidence_percent": "confidence",
|
||||
"recommendation": "recommendation"
|
||||
}
|
||||
},
|
||||
|
||||
"ai_business_rule": {
|
||||
"title_key": "recommendations.ai_business_rule.title",
|
||||
"title_params": {
|
||||
"rule_category": "rule_category"
|
||||
},
|
||||
"message_variants": {
|
||||
"generic": "recommendations.ai_business_rule.message"
|
||||
},
|
||||
"message_params": {
|
||||
"rule_category": "rule_category",
|
||||
"rule_description": "rule_description",
|
||||
"confidence_percent": "confidence",
|
||||
"recommendation": "recommendation"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -344,7 +344,7 @@ async def generate_batch_forecast(
|
||||
return BatchForecastResponse(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=tenant_id,
|
||||
batch_name=getattr(request, 'batch_name', f"orchestrator-batch-{datetime.now().strftime('%Y%m%d')}"),
|
||||
batch_name=request.batch_name,
|
||||
status="completed",
|
||||
total_products=0,
|
||||
completed_products=0,
|
||||
@@ -358,8 +358,8 @@ async def generate_batch_forecast(
|
||||
|
||||
# IMPROVEMENT: For large batches (>5 products), use background task
|
||||
# For small batches, execute synchronously for immediate results
|
||||
batch_name = getattr(request, 'batch_name', f"batch-{datetime.now().strftime('%Y%m%d_%H%M%S')}")
|
||||
forecast_days = getattr(request, 'forecast_days', 7)
|
||||
batch_name = request.batch_name
|
||||
forecast_days = request.forecast_days
|
||||
|
||||
# Create batch record first
|
||||
batch_id = str(uuid.uuid4())
|
||||
|
||||
@@ -7,7 +7,7 @@ Provides endpoints to trigger ML insight generation for:
|
||||
- Seasonal trend detection
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, BackgroundTasks
|
||||
from fastapi import APIRouter, Depends, HTTPException, BackgroundTasks, Request
|
||||
from pydantic import BaseModel, Field
|
||||
from typing import Optional, List
|
||||
from uuid import UUID
|
||||
@@ -62,6 +62,70 @@ class RulesGenerationResponse(BaseModel):
|
||||
errors: List[str] = []
|
||||
|
||||
|
||||
class DemandAnalysisRequest(BaseModel):
|
||||
"""Request schema for demand analysis"""
|
||||
product_ids: Optional[List[str]] = Field(
|
||||
None,
|
||||
description="Specific product IDs to analyze. If None, analyzes all products"
|
||||
)
|
||||
lookback_days: int = Field(
|
||||
90,
|
||||
description="Days of historical data to analyze",
|
||||
ge=30,
|
||||
le=365
|
||||
)
|
||||
forecast_horizon_days: int = Field(
|
||||
30,
|
||||
description="Days to forecast ahead",
|
||||
ge=7,
|
||||
le=90
|
||||
)
|
||||
|
||||
|
||||
class DemandAnalysisResponse(BaseModel):
|
||||
"""Response schema for demand analysis"""
|
||||
success: bool
|
||||
message: str
|
||||
tenant_id: str
|
||||
products_analyzed: int
|
||||
total_insights_generated: int
|
||||
total_insights_posted: int
|
||||
insights_by_product: dict
|
||||
errors: List[str] = []
|
||||
|
||||
|
||||
class BusinessRulesAnalysisRequest(BaseModel):
|
||||
"""Request schema for business rules analysis"""
|
||||
product_ids: Optional[List[str]] = Field(
|
||||
None,
|
||||
description="Specific product IDs to analyze. If None, analyzes all products"
|
||||
)
|
||||
lookback_days: int = Field(
|
||||
90,
|
||||
description="Days of historical data to analyze",
|
||||
ge=30,
|
||||
le=365
|
||||
)
|
||||
min_samples: int = Field(
|
||||
10,
|
||||
description="Minimum samples required for rule analysis",
|
||||
ge=5,
|
||||
le=100
|
||||
)
|
||||
|
||||
|
||||
class BusinessRulesAnalysisResponse(BaseModel):
|
||||
"""Response schema for business rules analysis"""
|
||||
success: bool
|
||||
message: str
|
||||
tenant_id: str
|
||||
products_analyzed: int
|
||||
total_insights_generated: int
|
||||
total_insights_posted: int
|
||||
insights_by_product: dict
|
||||
errors: List[str] = []
|
||||
|
||||
|
||||
# ================================================================
|
||||
# API ENDPOINTS
|
||||
# ================================================================
|
||||
@@ -70,6 +134,7 @@ class RulesGenerationResponse(BaseModel):
|
||||
async def trigger_rules_generation(
|
||||
tenant_id: str,
|
||||
request_data: RulesGenerationRequest,
|
||||
request: Request,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
@@ -103,8 +168,11 @@ async def trigger_rules_generation(
|
||||
from shared.clients.inventory_client import InventoryServiceClient
|
||||
from app.core.config import settings
|
||||
|
||||
# Get event publisher from app state
|
||||
event_publisher = getattr(request.app.state, 'event_publisher', None)
|
||||
|
||||
# Initialize orchestrator and clients
|
||||
orchestrator = RulesOrchestrator()
|
||||
orchestrator = RulesOrchestrator(event_publisher=event_publisher)
|
||||
inventory_client = InventoryServiceClient(settings)
|
||||
|
||||
# Get products to analyze from inventory service via API
|
||||
@@ -278,6 +346,415 @@ async def trigger_rules_generation(
|
||||
)
|
||||
|
||||
|
||||
@router.post("/analyze-demand", response_model=DemandAnalysisResponse)
|
||||
async def trigger_demand_analysis(
|
||||
tenant_id: str,
|
||||
request_data: DemandAnalysisRequest,
|
||||
request: Request,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Trigger demand pattern analysis from historical sales data.
|
||||
|
||||
This endpoint:
|
||||
1. Fetches historical sales data for specified products
|
||||
2. Runs the DemandInsightsOrchestrator to analyze patterns
|
||||
3. Generates insights about demand forecasting optimization
|
||||
4. Posts insights to AI Insights Service
|
||||
5. Publishes events to RabbitMQ
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
request_data: Demand analysis parameters
|
||||
request: FastAPI request object to access app state
|
||||
db: Database session
|
||||
|
||||
Returns:
|
||||
DemandAnalysisResponse with analysis results
|
||||
"""
|
||||
logger.info(
|
||||
"ML insights demand analysis requested",
|
||||
tenant_id=tenant_id,
|
||||
product_ids=request_data.product_ids,
|
||||
lookback_days=request_data.lookback_days
|
||||
)
|
||||
|
||||
try:
|
||||
# Import ML orchestrator and clients
|
||||
from app.ml.demand_insights_orchestrator import DemandInsightsOrchestrator
|
||||
from shared.clients.sales_client import SalesServiceClient
|
||||
from shared.clients.inventory_client import InventoryServiceClient
|
||||
from app.core.config import settings
|
||||
|
||||
# Get event publisher from app state
|
||||
event_publisher = getattr(request.app.state, 'event_publisher', None)
|
||||
|
||||
# Initialize orchestrator and clients
|
||||
orchestrator = DemandInsightsOrchestrator(event_publisher=event_publisher)
|
||||
inventory_client = InventoryServiceClient(settings)
|
||||
|
||||
# Get products to analyze from inventory service via API
|
||||
if request_data.product_ids:
|
||||
# Fetch specific products
|
||||
products = []
|
||||
for product_id in request_data.product_ids:
|
||||
product = await inventory_client.get_ingredient_by_id(
|
||||
ingredient_id=UUID(product_id),
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
if product:
|
||||
products.append(product)
|
||||
else:
|
||||
# Fetch all products for tenant (limit to 10)
|
||||
all_products = await inventory_client.get_all_ingredients(tenant_id=tenant_id)
|
||||
products = all_products[:10] # Limit to prevent timeout
|
||||
|
||||
if not products:
|
||||
return DemandAnalysisResponse(
|
||||
success=False,
|
||||
message="No products found for analysis",
|
||||
tenant_id=tenant_id,
|
||||
products_analyzed=0,
|
||||
total_insights_generated=0,
|
||||
total_insights_posted=0,
|
||||
insights_by_product={},
|
||||
errors=["No products found"]
|
||||
)
|
||||
|
||||
# Initialize sales client to fetch historical data
|
||||
sales_client = SalesServiceClient(config=settings, calling_service_name="forecasting")
|
||||
|
||||
# Calculate date range
|
||||
end_date = datetime.utcnow()
|
||||
start_date = end_date - timedelta(days=request_data.lookback_days)
|
||||
|
||||
# Process each product
|
||||
total_insights_generated = 0
|
||||
total_insights_posted = 0
|
||||
insights_by_product = {}
|
||||
errors = []
|
||||
|
||||
for product in products:
|
||||
try:
|
||||
product_id = str(product['id'])
|
||||
product_name = product.get('name', 'Unknown')
|
||||
logger.info(f"Analyzing product {product_name} ({product_id})")
|
||||
|
||||
# Fetch sales data for product
|
||||
sales_data = await sales_client.get_sales_data(
|
||||
tenant_id=tenant_id,
|
||||
product_id=product_id,
|
||||
start_date=start_date.strftime('%Y-%m-%d'),
|
||||
end_date=end_date.strftime('%Y-%m-%d')
|
||||
)
|
||||
|
||||
if not sales_data:
|
||||
logger.warning(f"No sales data for product {product_id}")
|
||||
continue
|
||||
|
||||
# Convert to DataFrame
|
||||
sales_df = pd.DataFrame(sales_data)
|
||||
|
||||
if len(sales_df) < 30: # Minimum for demand analysis
|
||||
logger.warning(
|
||||
f"Insufficient data for product {product_id}: "
|
||||
f"{len(sales_df)} samples < 30 required"
|
||||
)
|
||||
continue
|
||||
|
||||
# Check what columns are available and map to expected format
|
||||
logger.debug(f"Sales data columns for product {product_id}: {sales_df.columns.tolist()}")
|
||||
|
||||
# Map common field names to 'quantity' and 'date'
|
||||
if 'quantity' not in sales_df.columns:
|
||||
if 'total_quantity' in sales_df.columns:
|
||||
sales_df['quantity'] = sales_df['total_quantity']
|
||||
elif 'amount' in sales_df.columns:
|
||||
sales_df['quantity'] = sales_df['amount']
|
||||
else:
|
||||
logger.warning(f"No quantity field found for product {product_id}, skipping")
|
||||
continue
|
||||
|
||||
if 'date' not in sales_df.columns:
|
||||
if 'sale_date' in sales_df.columns:
|
||||
sales_df['date'] = sales_df['sale_date']
|
||||
else:
|
||||
logger.warning(f"No date field found for product {product_id}, skipping")
|
||||
continue
|
||||
|
||||
# Prepare sales data with required columns
|
||||
sales_df['date'] = pd.to_datetime(sales_df['date'])
|
||||
sales_df['quantity'] = sales_df['quantity'].astype(float)
|
||||
sales_df['day_of_week'] = sales_df['date'].dt.dayofweek
|
||||
|
||||
# Run demand analysis
|
||||
results = await orchestrator.analyze_and_post_demand_insights(
|
||||
tenant_id=tenant_id,
|
||||
inventory_product_id=product_id,
|
||||
sales_data=sales_df,
|
||||
forecast_horizon_days=request_data.forecast_horizon_days,
|
||||
min_history_days=request_data.lookback_days
|
||||
)
|
||||
|
||||
# Track results
|
||||
total_insights_generated += results['insights_generated']
|
||||
total_insights_posted += results['insights_posted']
|
||||
insights_by_product[product_id] = {
|
||||
'product_name': product_name,
|
||||
'insights_posted': results['insights_posted'],
|
||||
'trend_analysis': results.get('trend_analysis', {})
|
||||
}
|
||||
|
||||
logger.info(
|
||||
f"Product {product_id} demand analysis complete",
|
||||
insights_posted=results['insights_posted']
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
error_msg = f"Error analyzing product {product_id}: {str(e)}"
|
||||
logger.error(error_msg, exc_info=True)
|
||||
errors.append(error_msg)
|
||||
|
||||
# Close orchestrator
|
||||
await orchestrator.close()
|
||||
|
||||
# Build response
|
||||
response = DemandAnalysisResponse(
|
||||
success=total_insights_posted > 0,
|
||||
message=f"Successfully generated {total_insights_posted} insights from {len(products)} products",
|
||||
tenant_id=tenant_id,
|
||||
products_analyzed=len(products),
|
||||
total_insights_generated=total_insights_generated,
|
||||
total_insights_posted=total_insights_posted,
|
||||
insights_by_product=insights_by_product,
|
||||
errors=errors
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"ML insights demand analysis complete",
|
||||
tenant_id=tenant_id,
|
||||
total_insights=total_insights_posted
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"ML insights demand analysis failed",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Demand analysis failed: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.post("/analyze-business-rules", response_model=BusinessRulesAnalysisResponse)
|
||||
async def trigger_business_rules_analysis(
|
||||
tenant_id: str,
|
||||
request_data: BusinessRulesAnalysisRequest,
|
||||
request: Request,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Trigger business rules optimization analysis from historical sales data.
|
||||
|
||||
This endpoint:
|
||||
1. Fetches historical sales data for specified products
|
||||
2. Runs the BusinessRulesInsightsOrchestrator to analyze rules
|
||||
3. Generates insights about business rule optimization
|
||||
4. Posts insights to AI Insights Service
|
||||
5. Publishes events to RabbitMQ
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
request_data: Business rules analysis parameters
|
||||
request: FastAPI request object to access app state
|
||||
db: Database session
|
||||
|
||||
Returns:
|
||||
BusinessRulesAnalysisResponse with analysis results
|
||||
"""
|
||||
logger.info(
|
||||
"ML insights business rules analysis requested",
|
||||
tenant_id=tenant_id,
|
||||
product_ids=request_data.product_ids,
|
||||
lookback_days=request_data.lookback_days
|
||||
)
|
||||
|
||||
try:
|
||||
# Import ML orchestrator and clients
|
||||
from app.ml.business_rules_insights_orchestrator import BusinessRulesInsightsOrchestrator
|
||||
from shared.clients.sales_client import SalesServiceClient
|
||||
from shared.clients.inventory_client import InventoryServiceClient
|
||||
from app.core.config import settings
|
||||
|
||||
# Get event publisher from app state
|
||||
event_publisher = getattr(request.app.state, 'event_publisher', None)
|
||||
|
||||
# Initialize orchestrator and clients
|
||||
orchestrator = BusinessRulesInsightsOrchestrator(event_publisher=event_publisher)
|
||||
inventory_client = InventoryServiceClient(settings)
|
||||
|
||||
# Get products to analyze from inventory service via API
|
||||
if request_data.product_ids:
|
||||
# Fetch specific products
|
||||
products = []
|
||||
for product_id in request_data.product_ids:
|
||||
product = await inventory_client.get_ingredient_by_id(
|
||||
ingredient_id=UUID(product_id),
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
if product:
|
||||
products.append(product)
|
||||
else:
|
||||
# Fetch all products for tenant (limit to 10)
|
||||
all_products = await inventory_client.get_all_ingredients(tenant_id=tenant_id)
|
||||
products = all_products[:10] # Limit to prevent timeout
|
||||
|
||||
if not products:
|
||||
return BusinessRulesAnalysisResponse(
|
||||
success=False,
|
||||
message="No products found for analysis",
|
||||
tenant_id=tenant_id,
|
||||
products_analyzed=0,
|
||||
total_insights_generated=0,
|
||||
total_insights_posted=0,
|
||||
insights_by_product={},
|
||||
errors=["No products found"]
|
||||
)
|
||||
|
||||
# Initialize sales client to fetch historical data
|
||||
sales_client = SalesServiceClient(config=settings, calling_service_name="forecasting")
|
||||
|
||||
# Calculate date range
|
||||
end_date = datetime.utcnow()
|
||||
start_date = end_date - timedelta(days=request_data.lookback_days)
|
||||
|
||||
# Process each product
|
||||
total_insights_generated = 0
|
||||
total_insights_posted = 0
|
||||
insights_by_product = {}
|
||||
errors = []
|
||||
|
||||
for product in products:
|
||||
try:
|
||||
product_id = str(product['id'])
|
||||
product_name = product.get('name', 'Unknown')
|
||||
logger.info(f"Analyzing product {product_name} ({product_id})")
|
||||
|
||||
# Fetch sales data for product
|
||||
sales_data = await sales_client.get_sales_data(
|
||||
tenant_id=tenant_id,
|
||||
product_id=product_id,
|
||||
start_date=start_date.strftime('%Y-%m-%d'),
|
||||
end_date=end_date.strftime('%Y-%m-%d')
|
||||
)
|
||||
|
||||
if not sales_data:
|
||||
logger.warning(f"No sales data for product {product_id}")
|
||||
continue
|
||||
|
||||
# Convert to DataFrame
|
||||
sales_df = pd.DataFrame(sales_data)
|
||||
|
||||
if len(sales_df) < request_data.min_samples:
|
||||
logger.warning(
|
||||
f"Insufficient data for product {product_id}: "
|
||||
f"{len(sales_df)} samples < {request_data.min_samples} required"
|
||||
)
|
||||
continue
|
||||
|
||||
# Check what columns are available and map to expected format
|
||||
logger.debug(f"Sales data columns for product {product_id}: {sales_df.columns.tolist()}")
|
||||
|
||||
# Map common field names to 'quantity' and 'date'
|
||||
if 'quantity' not in sales_df.columns:
|
||||
if 'total_quantity' in sales_df.columns:
|
||||
sales_df['quantity'] = sales_df['total_quantity']
|
||||
elif 'amount' in sales_df.columns:
|
||||
sales_df['quantity'] = sales_df['amount']
|
||||
else:
|
||||
logger.warning(f"No quantity field found for product {product_id}, skipping")
|
||||
continue
|
||||
|
||||
if 'date' not in sales_df.columns:
|
||||
if 'sale_date' in sales_df.columns:
|
||||
sales_df['date'] = sales_df['sale_date']
|
||||
else:
|
||||
logger.warning(f"No date field found for product {product_id}, skipping")
|
||||
continue
|
||||
|
||||
# Prepare sales data with required columns
|
||||
sales_df['date'] = pd.to_datetime(sales_df['date'])
|
||||
sales_df['quantity'] = sales_df['quantity'].astype(float)
|
||||
sales_df['day_of_week'] = sales_df['date'].dt.dayofweek
|
||||
|
||||
# Run business rules analysis
|
||||
results = await orchestrator.analyze_and_post_business_rules_insights(
|
||||
tenant_id=tenant_id,
|
||||
inventory_product_id=product_id,
|
||||
sales_data=sales_df,
|
||||
min_samples=request_data.min_samples
|
||||
)
|
||||
|
||||
# Track results
|
||||
total_insights_generated += results['insights_generated']
|
||||
total_insights_posted += results['insights_posted']
|
||||
insights_by_product[product_id] = {
|
||||
'product_name': product_name,
|
||||
'insights_posted': results['insights_posted'],
|
||||
'rules_learned': len(results.get('rules', {}))
|
||||
}
|
||||
|
||||
logger.info(
|
||||
f"Product {product_id} business rules analysis complete",
|
||||
insights_posted=results['insights_posted']
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
error_msg = f"Error analyzing product {product_id}: {str(e)}"
|
||||
logger.error(error_msg, exc_info=True)
|
||||
errors.append(error_msg)
|
||||
|
||||
# Close orchestrator
|
||||
await orchestrator.close()
|
||||
|
||||
# Build response
|
||||
response = BusinessRulesAnalysisResponse(
|
||||
success=total_insights_posted > 0,
|
||||
message=f"Successfully generated {total_insights_posted} insights from {len(products)} products",
|
||||
tenant_id=tenant_id,
|
||||
products_analyzed=len(products),
|
||||
total_insights_generated=total_insights_generated,
|
||||
total_insights_posted=total_insights_posted,
|
||||
insights_by_product=insights_by_product,
|
||||
errors=errors
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"ML insights business rules analysis complete",
|
||||
tenant_id=tenant_id,
|
||||
total_insights=total_insights_posted
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"ML insights business rules analysis failed",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Business rules analysis failed: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get("/health")
|
||||
async def ml_insights_health():
|
||||
"""Health check for ML insights endpoints"""
|
||||
@@ -285,6 +762,8 @@ async def ml_insights_health():
|
||||
"status": "healthy",
|
||||
"service": "forecasting-ml-insights",
|
||||
"endpoints": [
|
||||
"POST /ml/insights/generate-rules"
|
||||
"POST /ml/insights/generate-rules",
|
||||
"POST /ml/insights/analyze-demand",
|
||||
"POST /ml/insights/analyze-business-rules"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -137,6 +137,9 @@ class ForecastingService(StandardFastAPIService):
|
||||
else:
|
||||
self.logger.error("Event publisher not initialized, alert service unavailable")
|
||||
|
||||
# Store the event publisher in app state for internal API access
|
||||
app.state.event_publisher = self.event_publisher
|
||||
|
||||
|
||||
async def on_shutdown(self, app: FastAPI):
|
||||
"""Custom shutdown logic for forecasting service"""
|
||||
|
||||
@@ -0,0 +1,393 @@
|
||||
"""
|
||||
Business Rules Insights Orchestrator
|
||||
Coordinates business rules optimization and insight posting
|
||||
"""
|
||||
|
||||
import pandas as pd
|
||||
from typing import Dict, List, Any, Optional
|
||||
import structlog
|
||||
from datetime import datetime
|
||||
from uuid import UUID
|
||||
import sys
|
||||
import os
|
||||
|
||||
# Add shared clients to path
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), '../../../..'))
|
||||
from shared.clients.ai_insights_client import AIInsightsClient
|
||||
from shared.messaging import UnifiedEventPublisher
|
||||
|
||||
from app.ml.dynamic_rules_engine import DynamicRulesEngine
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class BusinessRulesInsightsOrchestrator:
|
||||
"""
|
||||
Orchestrates business rules analysis and insight generation workflow.
|
||||
|
||||
Workflow:
|
||||
1. Analyze dynamic business rule performance
|
||||
2. Generate insights for rule optimization
|
||||
3. Post insights to AI Insights Service
|
||||
4. Publish recommendation events to RabbitMQ
|
||||
5. Provide rule optimization for forecasting
|
||||
6. Track rule effectiveness and improvements
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
ai_insights_base_url: str = "http://ai-insights-service:8000",
|
||||
event_publisher: Optional[UnifiedEventPublisher] = None
|
||||
):
|
||||
self.rules_engine = DynamicRulesEngine()
|
||||
self.ai_insights_client = AIInsightsClient(ai_insights_base_url)
|
||||
self.event_publisher = event_publisher
|
||||
|
||||
async def analyze_and_post_business_rules_insights(
|
||||
self,
|
||||
tenant_id: str,
|
||||
inventory_product_id: str,
|
||||
sales_data: pd.DataFrame,
|
||||
min_samples: int = 10
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Complete workflow: Analyze business rules and post insights.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant identifier
|
||||
inventory_product_id: Product identifier
|
||||
sales_data: Historical sales data
|
||||
min_samples: Minimum samples for rule analysis
|
||||
|
||||
Returns:
|
||||
Workflow results with analysis and posted insights
|
||||
"""
|
||||
logger.info(
|
||||
"Starting business rules analysis workflow",
|
||||
tenant_id=tenant_id,
|
||||
inventory_product_id=inventory_product_id,
|
||||
samples=len(sales_data)
|
||||
)
|
||||
|
||||
# Step 1: Learn and analyze rules
|
||||
rules_results = await self.rules_engine.learn_all_rules(
|
||||
tenant_id=tenant_id,
|
||||
inventory_product_id=inventory_product_id,
|
||||
sales_data=sales_data,
|
||||
external_data=None,
|
||||
min_samples=min_samples
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Business rules analysis complete",
|
||||
insights_generated=len(rules_results.get('insights', [])),
|
||||
rules_learned=len(rules_results.get('rules', {}))
|
||||
)
|
||||
|
||||
# Step 2: Enrich insights with tenant_id and product context
|
||||
enriched_insights = self._enrich_insights(
|
||||
rules_results.get('insights', []),
|
||||
tenant_id,
|
||||
inventory_product_id
|
||||
)
|
||||
|
||||
# Step 3: Post insights to AI Insights Service
|
||||
if enriched_insights:
|
||||
post_results = await self.ai_insights_client.create_insights_bulk(
|
||||
tenant_id=UUID(tenant_id),
|
||||
insights=enriched_insights
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Business rules insights posted to AI Insights Service",
|
||||
inventory_product_id=inventory_product_id,
|
||||
total=post_results['total'],
|
||||
successful=post_results['successful'],
|
||||
failed=post_results['failed']
|
||||
)
|
||||
else:
|
||||
post_results = {'total': 0, 'successful': 0, 'failed': 0}
|
||||
logger.info("No insights to post for product", inventory_product_id=inventory_product_id)
|
||||
|
||||
# Step 4: Publish insight events to RabbitMQ
|
||||
created_insights = post_results.get('created_insights', [])
|
||||
if created_insights:
|
||||
product_context = {'inventory_product_id': inventory_product_id}
|
||||
await self._publish_insight_events(
|
||||
tenant_id=tenant_id,
|
||||
insights=created_insights,
|
||||
product_context=product_context
|
||||
)
|
||||
|
||||
# Step 5: Return comprehensive results
|
||||
return {
|
||||
'tenant_id': tenant_id,
|
||||
'inventory_product_id': inventory_product_id,
|
||||
'learned_at': rules_results['learned_at'],
|
||||
'rules': rules_results.get('rules', {}),
|
||||
'insights_generated': len(enriched_insights),
|
||||
'insights_posted': post_results['successful'],
|
||||
'insights_failed': post_results['failed'],
|
||||
'created_insights': post_results.get('created_insights', [])
|
||||
}
|
||||
|
||||
def _enrich_insights(
|
||||
self,
|
||||
insights: List[Dict[str, Any]],
|
||||
tenant_id: str,
|
||||
inventory_product_id: str
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Enrich insights with required fields for AI Insights Service.
|
||||
|
||||
Args:
|
||||
insights: Raw insights from rules engine
|
||||
tenant_id: Tenant identifier
|
||||
inventory_product_id: Product identifier
|
||||
|
||||
Returns:
|
||||
Enriched insights ready for posting
|
||||
"""
|
||||
enriched = []
|
||||
|
||||
for insight in insights:
|
||||
# Add required tenant_id
|
||||
enriched_insight = insight.copy()
|
||||
enriched_insight['tenant_id'] = tenant_id
|
||||
|
||||
# Add product context to metrics
|
||||
if 'metrics_json' not in enriched_insight:
|
||||
enriched_insight['metrics_json'] = {}
|
||||
|
||||
enriched_insight['metrics_json']['inventory_product_id'] = inventory_product_id
|
||||
|
||||
# Add source metadata
|
||||
enriched_insight['source_service'] = 'forecasting'
|
||||
enriched_insight['source_model'] = 'dynamic_rules_engine'
|
||||
enriched_insight['detected_at'] = datetime.utcnow().isoformat()
|
||||
|
||||
enriched.append(enriched_insight)
|
||||
|
||||
return enriched
|
||||
|
||||
async def analyze_all_business_rules(
|
||||
self,
|
||||
tenant_id: str,
|
||||
products_data: Dict[str, pd.DataFrame],
|
||||
min_samples: int = 10
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Analyze all products for business rules optimization and generate comparative insights.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant identifier
|
||||
products_data: Dict of {inventory_product_id: sales_data DataFrame}
|
||||
min_samples: Minimum samples for rule analysis
|
||||
|
||||
Returns:
|
||||
Comprehensive analysis with rule optimization insights
|
||||
"""
|
||||
logger.info(
|
||||
"Analyzing business rules for all products",
|
||||
tenant_id=tenant_id,
|
||||
products=len(products_data)
|
||||
)
|
||||
|
||||
all_results = []
|
||||
total_insights_posted = 0
|
||||
|
||||
# Analyze each product
|
||||
for inventory_product_id, sales_data in products_data.items():
|
||||
try:
|
||||
results = await self.analyze_and_post_business_rules_insights(
|
||||
tenant_id=tenant_id,
|
||||
inventory_product_id=inventory_product_id,
|
||||
sales_data=sales_data,
|
||||
min_samples=min_samples
|
||||
)
|
||||
|
||||
all_results.append(results)
|
||||
total_insights_posted += results['insights_posted']
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error analyzing business rules for product",
|
||||
inventory_product_id=inventory_product_id,
|
||||
error=str(e)
|
||||
)
|
||||
|
||||
# Generate summary insight
|
||||
if total_insights_posted > 0:
|
||||
summary_insight = self._generate_portfolio_summary_insight(
|
||||
tenant_id, all_results
|
||||
)
|
||||
|
||||
if summary_insight:
|
||||
enriched_summary = self._enrich_insights(
|
||||
[summary_insight], tenant_id, 'all_products'
|
||||
)
|
||||
|
||||
post_results = await self.ai_insights_client.create_insights_bulk(
|
||||
tenant_id=UUID(tenant_id),
|
||||
insights=enriched_summary
|
||||
)
|
||||
|
||||
total_insights_posted += post_results['successful']
|
||||
|
||||
logger.info(
|
||||
"All business rules analysis complete",
|
||||
tenant_id=tenant_id,
|
||||
products_analyzed=len(all_results),
|
||||
total_insights_posted=total_insights_posted
|
||||
)
|
||||
|
||||
return {
|
||||
'tenant_id': tenant_id,
|
||||
'analyzed_at': datetime.utcnow().isoformat(),
|
||||
'products_analyzed': len(all_results),
|
||||
'product_results': all_results,
|
||||
'total_insights_posted': total_insights_posted
|
||||
}
|
||||
|
||||
def _generate_portfolio_summary_insight(
|
||||
self,
|
||||
tenant_id: str,
|
||||
all_results: List[Dict[str, Any]]
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Generate portfolio-level business rules summary insight.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant identifier
|
||||
all_results: All product analysis results
|
||||
|
||||
Returns:
|
||||
Summary insight or None
|
||||
"""
|
||||
if not all_results:
|
||||
return None
|
||||
|
||||
# Calculate summary statistics
|
||||
total_products = len(all_results)
|
||||
total_rules = sum(len(r.get('rules', {})) for r in all_results)
|
||||
|
||||
# Count products with significant rule improvements
|
||||
significant_improvements = sum(1 for r in all_results
|
||||
if any('improvement' in str(v).lower() for v in r.get('rules', {}).values()))
|
||||
|
||||
return {
|
||||
'type': 'recommendation',
|
||||
'priority': 'high' if significant_improvements > total_products * 0.3 else 'medium',
|
||||
'category': 'forecasting',
|
||||
'title': f'Business Rule Optimization: {total_products} Products Analyzed',
|
||||
'description': f'Learned {total_rules} dynamic rules across {total_products} products. Identified {significant_improvements} products with significant rule improvements.',
|
||||
'impact_type': 'operational_efficiency',
|
||||
'impact_value': total_rules,
|
||||
'impact_unit': 'rules',
|
||||
'confidence': 80,
|
||||
'metrics_json': {
|
||||
'total_products': total_products,
|
||||
'total_rules': total_rules,
|
||||
'significant_improvements': significant_improvements,
|
||||
'rules_per_product': round(total_rules / total_products, 2)
|
||||
},
|
||||
'actionable': True,
|
||||
'recommendation_actions': [
|
||||
{
|
||||
'label': 'Review Learned Rules',
|
||||
'action': 'review_business_rules',
|
||||
'params': {'tenant_id': tenant_id}
|
||||
},
|
||||
{
|
||||
'label': 'Implement Optimized Rules',
|
||||
'action': 'implement_business_rules',
|
||||
'params': {'tenant_id': tenant_id}
|
||||
}
|
||||
],
|
||||
'source_service': 'forecasting',
|
||||
'source_model': 'dynamic_rules_engine'
|
||||
}
|
||||
|
||||
async def get_learned_rules(
|
||||
self,
|
||||
inventory_product_id: str
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get cached learned rules for a product.
|
||||
|
||||
Args:
|
||||
inventory_product_id: Product identifier
|
||||
|
||||
Returns:
|
||||
Learned rules or None if not analyzed
|
||||
"""
|
||||
return self.rules_engine.get_all_rules(inventory_product_id)
|
||||
|
||||
async def _publish_insight_events(self, tenant_id, insights, product_context=None):
|
||||
"""
|
||||
Publish insight events to RabbitMQ for alert processing.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant identifier
|
||||
insights: List of created insights
|
||||
product_context: Additional context about the product
|
||||
"""
|
||||
if not self.event_publisher:
|
||||
logger.warning("No event publisher available for business rules insights")
|
||||
return
|
||||
|
||||
for insight in insights:
|
||||
# Determine severity based on confidence and priority
|
||||
confidence = insight.get('confidence', 0)
|
||||
priority = insight.get('priority', 'medium')
|
||||
|
||||
# Map priority to severity, with confidence as tiebreaker
|
||||
if priority == 'critical' or (priority == 'high' and confidence >= 70):
|
||||
severity = 'high'
|
||||
elif priority == 'high' or (priority == 'medium' and confidence >= 80):
|
||||
severity = 'medium'
|
||||
else:
|
||||
severity = 'low'
|
||||
|
||||
# Prepare the event data
|
||||
event_data = {
|
||||
'insight_id': insight.get('id'),
|
||||
'type': insight.get('type'),
|
||||
'title': insight.get('title'),
|
||||
'description': insight.get('description'),
|
||||
'category': insight.get('category'),
|
||||
'priority': insight.get('priority'),
|
||||
'confidence': confidence,
|
||||
'recommendation': insight.get('recommendation_actions', []),
|
||||
'impact_type': insight.get('impact_type'),
|
||||
'impact_value': insight.get('impact_value'),
|
||||
'inventory_product_id': product_context.get('inventory_product_id') if product_context else None,
|
||||
'timestamp': insight.get('detected_at', datetime.utcnow().isoformat()),
|
||||
'source_service': 'forecasting',
|
||||
'source_model': 'dynamic_rules_engine'
|
||||
}
|
||||
|
||||
try:
|
||||
await self.event_publisher.publish_recommendation(
|
||||
event_type='ai_business_rule',
|
||||
tenant_id=tenant_id,
|
||||
severity=severity,
|
||||
data=event_data
|
||||
)
|
||||
logger.info(
|
||||
"Published business rules insight event",
|
||||
tenant_id=tenant_id,
|
||||
insight_id=insight.get('id'),
|
||||
severity=severity
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to publish business rules insight event",
|
||||
tenant_id=tenant_id,
|
||||
insight_id=insight.get('id'),
|
||||
error=str(e)
|
||||
)
|
||||
|
||||
async def close(self):
|
||||
"""Close HTTP client connections."""
|
||||
await self.ai_insights_client.close()
|
||||
403
services/forecasting/app/ml/demand_insights_orchestrator.py
Normal file
403
services/forecasting/app/ml/demand_insights_orchestrator.py
Normal file
@@ -0,0 +1,403 @@
|
||||
"""
|
||||
Demand Insights Orchestrator
|
||||
Coordinates demand forecasting analysis and insight posting
|
||||
"""
|
||||
|
||||
import pandas as pd
|
||||
from typing import Dict, List, Any, Optional
|
||||
import structlog
|
||||
from datetime import datetime
|
||||
from uuid import UUID
|
||||
import sys
|
||||
import os
|
||||
|
||||
# Add shared clients to path
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), '../../../..'))
|
||||
from shared.clients.ai_insights_client import AIInsightsClient
|
||||
from shared.messaging import UnifiedEventPublisher
|
||||
|
||||
from app.ml.predictor import BakeryForecaster
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class DemandInsightsOrchestrator:
|
||||
"""
|
||||
Orchestrates demand forecasting analysis and insight generation workflow.
|
||||
|
||||
Workflow:
|
||||
1. Analyze historical demand patterns from sales data
|
||||
2. Generate insights for demand optimization
|
||||
3. Post insights to AI Insights Service
|
||||
4. Publish recommendation events to RabbitMQ
|
||||
5. Provide demand pattern analysis for forecasting
|
||||
6. Track demand forecasting performance
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
ai_insights_base_url: str = "http://ai-insights-service:8000",
|
||||
event_publisher: Optional[UnifiedEventPublisher] = None
|
||||
):
|
||||
self.forecaster = BakeryForecaster()
|
||||
self.ai_insights_client = AIInsightsClient(ai_insights_base_url)
|
||||
self.event_publisher = event_publisher
|
||||
|
||||
async def analyze_and_post_demand_insights(
|
||||
self,
|
||||
tenant_id: str,
|
||||
inventory_product_id: str,
|
||||
sales_data: pd.DataFrame,
|
||||
forecast_horizon_days: int = 30,
|
||||
min_history_days: int = 90
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Complete workflow: Analyze demand and post insights.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant identifier
|
||||
inventory_product_id: Product identifier
|
||||
sales_data: Historical sales data
|
||||
forecast_horizon_days: Days to forecast ahead
|
||||
min_history_days: Minimum days of history required
|
||||
|
||||
Returns:
|
||||
Workflow results with analysis and posted insights
|
||||
"""
|
||||
logger.info(
|
||||
"Starting demand forecasting analysis workflow",
|
||||
tenant_id=tenant_id,
|
||||
inventory_product_id=inventory_product_id,
|
||||
history_days=len(sales_data)
|
||||
)
|
||||
|
||||
# Step 1: Analyze demand patterns
|
||||
analysis_results = await self.forecaster.analyze_demand_patterns(
|
||||
tenant_id=tenant_id,
|
||||
inventory_product_id=inventory_product_id,
|
||||
sales_data=sales_data,
|
||||
forecast_horizon_days=forecast_horizon_days,
|
||||
min_history_days=min_history_days
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Demand analysis complete",
|
||||
inventory_product_id=inventory_product_id,
|
||||
insights_generated=len(analysis_results.get('insights', []))
|
||||
)
|
||||
|
||||
# Step 2: Enrich insights with tenant_id and product context
|
||||
enriched_insights = self._enrich_insights(
|
||||
analysis_results.get('insights', []),
|
||||
tenant_id,
|
||||
inventory_product_id
|
||||
)
|
||||
|
||||
# Step 3: Post insights to AI Insights Service
|
||||
if enriched_insights:
|
||||
post_results = await self.ai_insights_client.create_insights_bulk(
|
||||
tenant_id=UUID(tenant_id),
|
||||
insights=enriched_insights
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Demand insights posted to AI Insights Service",
|
||||
inventory_product_id=inventory_product_id,
|
||||
total=post_results['total'],
|
||||
successful=post_results['successful'],
|
||||
failed=post_results['failed']
|
||||
)
|
||||
else:
|
||||
post_results = {'total': 0, 'successful': 0, 'failed': 0}
|
||||
logger.info("No insights to post for product", inventory_product_id=inventory_product_id)
|
||||
|
||||
# Step 4: Publish insight events to RabbitMQ
|
||||
created_insights = post_results.get('created_insights', [])
|
||||
if created_insights:
|
||||
product_context = {'inventory_product_id': inventory_product_id}
|
||||
await self._publish_insight_events(
|
||||
tenant_id=tenant_id,
|
||||
insights=created_insights,
|
||||
product_context=product_context
|
||||
)
|
||||
|
||||
# Step 5: Return comprehensive results
|
||||
return {
|
||||
'tenant_id': tenant_id,
|
||||
'inventory_product_id': inventory_product_id,
|
||||
'analyzed_at': analysis_results['analyzed_at'],
|
||||
'history_days': analysis_results['history_days'],
|
||||
'demand_patterns': analysis_results.get('patterns', {}),
|
||||
'trend_analysis': analysis_results.get('trend_analysis', {}),
|
||||
'seasonal_factors': analysis_results.get('seasonal_factors', {}),
|
||||
'insights_generated': len(enriched_insights),
|
||||
'insights_posted': post_results['successful'],
|
||||
'insights_failed': post_results['failed'],
|
||||
'created_insights': post_results.get('created_insights', [])
|
||||
}
|
||||
|
||||
def _enrich_insights(
|
||||
self,
|
||||
insights: List[Dict[str, Any]],
|
||||
tenant_id: str,
|
||||
inventory_product_id: str
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Enrich insights with required fields for AI Insights Service.
|
||||
|
||||
Args:
|
||||
insights: Raw insights from forecaster
|
||||
tenant_id: Tenant identifier
|
||||
inventory_product_id: Product identifier
|
||||
|
||||
Returns:
|
||||
Enriched insights ready for posting
|
||||
"""
|
||||
enriched = []
|
||||
|
||||
for insight in insights:
|
||||
# Add required tenant_id
|
||||
enriched_insight = insight.copy()
|
||||
enriched_insight['tenant_id'] = tenant_id
|
||||
|
||||
# Add product context to metrics
|
||||
if 'metrics_json' not in enriched_insight:
|
||||
enriched_insight['metrics_json'] = {}
|
||||
|
||||
enriched_insight['metrics_json']['inventory_product_id'] = inventory_product_id
|
||||
|
||||
# Add source metadata
|
||||
enriched_insight['source_service'] = 'forecasting'
|
||||
enriched_insight['source_model'] = 'demand_analyzer'
|
||||
enriched_insight['detected_at'] = datetime.utcnow().isoformat()
|
||||
|
||||
enriched.append(enriched_insight)
|
||||
|
||||
return enriched
|
||||
|
||||
async def analyze_all_products(
|
||||
self,
|
||||
tenant_id: str,
|
||||
products_data: Dict[str, pd.DataFrame],
|
||||
forecast_horizon_days: int = 30,
|
||||
min_history_days: int = 90
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Analyze all products for a tenant and generate comparative insights.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant identifier
|
||||
products_data: Dict of {inventory_product_id: sales_data DataFrame}
|
||||
forecast_horizon_days: Days to forecast
|
||||
min_history_days: Minimum history required
|
||||
|
||||
Returns:
|
||||
Comprehensive analysis with product comparison
|
||||
"""
|
||||
logger.info(
|
||||
"Analyzing all products for tenant",
|
||||
tenant_id=tenant_id,
|
||||
products=len(products_data)
|
||||
)
|
||||
|
||||
all_results = []
|
||||
total_insights_posted = 0
|
||||
|
||||
# Analyze each product
|
||||
for inventory_product_id, sales_data in products_data.items():
|
||||
try:
|
||||
results = await self.analyze_and_post_demand_insights(
|
||||
tenant_id=tenant_id,
|
||||
inventory_product_id=inventory_product_id,
|
||||
sales_data=sales_data,
|
||||
forecast_horizon_days=forecast_horizon_days,
|
||||
min_history_days=min_history_days
|
||||
)
|
||||
|
||||
all_results.append(results)
|
||||
total_insights_posted += results['insights_posted']
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error analyzing product",
|
||||
inventory_product_id=inventory_product_id,
|
||||
error=str(e)
|
||||
)
|
||||
|
||||
# Generate summary insight
|
||||
if total_insights_posted > 0:
|
||||
summary_insight = self._generate_portfolio_summary_insight(
|
||||
tenant_id, all_results
|
||||
)
|
||||
|
||||
if summary_insight:
|
||||
enriched_summary = self._enrich_insights(
|
||||
[summary_insight], tenant_id, 'all_products'
|
||||
)
|
||||
|
||||
post_results = await self.ai_insights_client.create_insights_bulk(
|
||||
tenant_id=UUID(tenant_id),
|
||||
insights=enriched_summary
|
||||
)
|
||||
|
||||
total_insights_posted += post_results['successful']
|
||||
|
||||
logger.info(
|
||||
"All products analysis complete",
|
||||
tenant_id=tenant_id,
|
||||
products_analyzed=len(all_results),
|
||||
total_insights_posted=total_insights_posted
|
||||
)
|
||||
|
||||
return {
|
||||
'tenant_id': tenant_id,
|
||||
'analyzed_at': datetime.utcnow().isoformat(),
|
||||
'products_analyzed': len(all_results),
|
||||
'product_results': all_results,
|
||||
'total_insights_posted': total_insights_posted
|
||||
}
|
||||
|
||||
def _generate_portfolio_summary_insight(
|
||||
self,
|
||||
tenant_id: str,
|
||||
all_results: List[Dict[str, Any]]
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Generate portfolio-level summary insight.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant identifier
|
||||
all_results: All product analysis results
|
||||
|
||||
Returns:
|
||||
Summary insight or None
|
||||
"""
|
||||
if not all_results:
|
||||
return None
|
||||
|
||||
# Calculate summary statistics
|
||||
total_products = len(all_results)
|
||||
high_demand_products = sum(1 for r in all_results if r.get('trend_analysis', {}).get('is_increasing', False))
|
||||
|
||||
avg_seasonal_factor = sum(
|
||||
r.get('seasonal_factors', {}).get('peak_ratio', 1.0)
|
||||
for r in all_results
|
||||
if r.get('seasonal_factors', {}).get('peak_ratio')
|
||||
) / max(1, len(all_results))
|
||||
|
||||
return {
|
||||
'type': 'recommendation',
|
||||
'priority': 'medium' if high_demand_products > total_products * 0.5 else 'low',
|
||||
'category': 'forecasting',
|
||||
'title': f'Demand Pattern Summary: {total_products} Products Analyzed',
|
||||
'description': f'Detected {high_demand_products} products with increasing demand trends. Average seasonal peak ratio: {avg_seasonal_factor:.2f}x.',
|
||||
'impact_type': 'demand_optimization',
|
||||
'impact_value': high_demand_products,
|
||||
'impact_unit': 'products',
|
||||
'confidence': 75,
|
||||
'metrics_json': {
|
||||
'total_products': total_products,
|
||||
'high_demand_products': high_demand_products,
|
||||
'avg_seasonal_factor': round(avg_seasonal_factor, 2),
|
||||
'trend_strength': 'strong' if high_demand_products > total_products * 0.7 else 'moderate'
|
||||
},
|
||||
'actionable': True,
|
||||
'recommendation_actions': [
|
||||
{
|
||||
'label': 'Review Production Schedule',
|
||||
'action': 'review_production_schedule',
|
||||
'params': {'tenant_id': tenant_id}
|
||||
},
|
||||
{
|
||||
'label': 'Adjust Inventory Levels',
|
||||
'action': 'adjust_inventory_levels',
|
||||
'params': {'tenant_id': tenant_id}
|
||||
}
|
||||
],
|
||||
'source_service': 'forecasting',
|
||||
'source_model': 'demand_analyzer'
|
||||
}
|
||||
|
||||
async def get_demand_patterns(
|
||||
self,
|
||||
inventory_product_id: str
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get cached demand patterns for a product.
|
||||
|
||||
Args:
|
||||
inventory_product_id: Product identifier
|
||||
|
||||
Returns:
|
||||
Demand patterns or None if not analyzed
|
||||
"""
|
||||
return self.forecaster.get_cached_demand_patterns(inventory_product_id)
|
||||
|
||||
async def _publish_insight_events(self, tenant_id, insights, product_context=None):
|
||||
"""
|
||||
Publish insight events to RabbitMQ for alert processing.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant identifier
|
||||
insights: List of created insights
|
||||
product_context: Additional context about the product
|
||||
"""
|
||||
if not self.event_publisher:
|
||||
logger.warning("No event publisher available for demand insights")
|
||||
return
|
||||
|
||||
for insight in insights:
|
||||
# Determine severity based on confidence and priority
|
||||
confidence = insight.get('confidence', 0)
|
||||
priority = insight.get('priority', 'medium')
|
||||
|
||||
# Map priority to severity, with confidence as tiebreaker
|
||||
if priority == 'critical' or (priority == 'high' and confidence >= 70):
|
||||
severity = 'high'
|
||||
elif priority == 'high' or (priority == 'medium' and confidence >= 80):
|
||||
severity = 'medium'
|
||||
else:
|
||||
severity = 'low'
|
||||
|
||||
# Prepare the event data
|
||||
event_data = {
|
||||
'insight_id': insight.get('id'),
|
||||
'type': insight.get('type'),
|
||||
'title': insight.get('title'),
|
||||
'description': insight.get('description'),
|
||||
'category': insight.get('category'),
|
||||
'priority': insight.get('priority'),
|
||||
'confidence': confidence,
|
||||
'recommendation': insight.get('recommendation_actions', []),
|
||||
'impact_type': insight.get('impact_type'),
|
||||
'impact_value': insight.get('impact_value'),
|
||||
'inventory_product_id': product_context.get('inventory_product_id') if product_context else None,
|
||||
'timestamp': insight.get('detected_at', datetime.utcnow().isoformat()),
|
||||
'source_service': 'forecasting',
|
||||
'source_model': 'demand_analyzer'
|
||||
}
|
||||
|
||||
try:
|
||||
await self.event_publisher.publish_recommendation(
|
||||
event_type='ai_demand_forecast',
|
||||
tenant_id=tenant_id,
|
||||
severity=severity,
|
||||
data=event_data
|
||||
)
|
||||
logger.info(
|
||||
"Published demand insight event",
|
||||
tenant_id=tenant_id,
|
||||
insight_id=insight.get('id'),
|
||||
severity=severity
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to publish demand insight event",
|
||||
tenant_id=tenant_id,
|
||||
insight_id=insight.get('id'),
|
||||
error=str(e)
|
||||
)
|
||||
|
||||
async def close(self):
|
||||
"""Close HTTP client connections."""
|
||||
await self.ai_insights_client.close()
|
||||
@@ -11,6 +11,7 @@ from uuid import UUID
|
||||
|
||||
from app.ml.dynamic_rules_engine import DynamicRulesEngine
|
||||
from app.clients.ai_insights_client import AIInsightsClient
|
||||
from shared.messaging import UnifiedEventPublisher
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
@@ -29,10 +30,12 @@ class RulesOrchestrator:
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
ai_insights_base_url: str = "http://ai-insights-service:8000"
|
||||
ai_insights_base_url: str = "http://ai-insights-service:8000",
|
||||
event_publisher: Optional[UnifiedEventPublisher] = None
|
||||
):
|
||||
self.rules_engine = DynamicRulesEngine()
|
||||
self.ai_insights_client = AIInsightsClient(ai_insights_base_url)
|
||||
self.event_publisher = event_publisher
|
||||
|
||||
async def learn_and_post_rules(
|
||||
self,
|
||||
@@ -100,7 +103,17 @@ class RulesOrchestrator:
|
||||
post_results = {'total': 0, 'successful': 0, 'failed': 0}
|
||||
logger.info("No insights to post")
|
||||
|
||||
# Step 4: Return comprehensive results
|
||||
# Step 4: Publish insight events to RabbitMQ
|
||||
created_insights = post_results.get('created_insights', [])
|
||||
if created_insights:
|
||||
product_context = {'inventory_product_id': inventory_product_id}
|
||||
await self._publish_insight_events(
|
||||
tenant_id=tenant_id,
|
||||
insights=created_insights,
|
||||
product_context=product_context
|
||||
)
|
||||
|
||||
# Step 5: Return comprehensive results
|
||||
return {
|
||||
'tenant_id': tenant_id,
|
||||
'inventory_product_id': inventory_product_id,
|
||||
@@ -229,6 +242,71 @@ class RulesOrchestrator:
|
||||
|
||||
return results
|
||||
|
||||
async def _publish_insight_events(self, tenant_id, insights, product_context=None):
|
||||
"""
|
||||
Publish insight events to RabbitMQ for alert processing.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant identifier
|
||||
insights: List of created insights
|
||||
product_context: Additional context about the product
|
||||
"""
|
||||
if not self.event_publisher:
|
||||
logger.warning("No event publisher available for business rules insights")
|
||||
return
|
||||
|
||||
for insight in insights:
|
||||
# Determine severity based on confidence and priority
|
||||
confidence = insight.get('confidence', 0)
|
||||
priority = insight.get('priority', 'medium')
|
||||
|
||||
# Map priority to severity, with confidence as tiebreaker
|
||||
if priority == 'critical' or (priority == 'high' and confidence >= 70):
|
||||
severity = 'high'
|
||||
elif priority == 'high' or (priority == 'medium' and confidence >= 80):
|
||||
severity = 'medium'
|
||||
else:
|
||||
severity = 'low'
|
||||
|
||||
# Prepare the event data
|
||||
event_data = {
|
||||
'insight_id': insight.get('id'),
|
||||
'type': insight.get('type'),
|
||||
'title': insight.get('title'),
|
||||
'description': insight.get('description'),
|
||||
'category': insight.get('category'),
|
||||
'priority': insight.get('priority'),
|
||||
'confidence': confidence,
|
||||
'recommendation': insight.get('recommendation_actions', []),
|
||||
'impact_type': insight.get('impact_type'),
|
||||
'impact_value': insight.get('impact_value'),
|
||||
'inventory_product_id': product_context.get('inventory_product_id') if product_context else None,
|
||||
'timestamp': insight.get('detected_at', datetime.utcnow().isoformat()),
|
||||
'source_service': 'forecasting',
|
||||
'source_model': 'dynamic_rules_engine'
|
||||
}
|
||||
|
||||
try:
|
||||
await self.event_publisher.publish_recommendation(
|
||||
event_type='ai_business_rule',
|
||||
tenant_id=tenant_id,
|
||||
severity=severity,
|
||||
data=event_data
|
||||
)
|
||||
logger.info(
|
||||
"Published business rules insight event",
|
||||
tenant_id=tenant_id,
|
||||
insight_id=insight.get('id'),
|
||||
severity=severity
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to publish business rules insight event",
|
||||
tenant_id=tenant_id,
|
||||
insight_id=insight.get('id'),
|
||||
error=str(e)
|
||||
)
|
||||
|
||||
async def close(self):
|
||||
"""Close HTTP client connections."""
|
||||
await self.ai_insights_client.close()
|
||||
|
||||
@@ -7,7 +7,7 @@ Provides endpoints to trigger ML insight generation for:
|
||||
- Demand pattern analysis
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from fastapi import APIRouter, Depends, HTTPException, Request
|
||||
from pydantic import BaseModel, Field
|
||||
from typing import Optional, List
|
||||
from uuid import UUID
|
||||
@@ -71,6 +71,7 @@ class SafetyStockOptimizationResponse(BaseModel):
|
||||
async def trigger_safety_stock_optimization(
|
||||
tenant_id: str,
|
||||
request_data: SafetyStockOptimizationRequest,
|
||||
request: Request,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
@@ -81,10 +82,12 @@ async def trigger_safety_stock_optimization(
|
||||
2. Runs the SafetyStockInsightsOrchestrator to optimize levels
|
||||
3. Generates insights about safety stock recommendations
|
||||
4. Posts insights to AI Insights Service
|
||||
5. Publishes recommendation events to RabbitMQ
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
request_data: Optimization parameters
|
||||
request: FastAPI request (for app state access)
|
||||
db: Database session
|
||||
|
||||
Returns:
|
||||
@@ -103,8 +106,13 @@ async def trigger_safety_stock_optimization(
|
||||
from app.models.inventory import Ingredient
|
||||
from sqlalchemy import select
|
||||
|
||||
# Get event publisher from app state (if available)
|
||||
event_publisher = getattr(request.app.state, 'event_publisher', None) if hasattr(request, 'app') else None
|
||||
|
||||
# Initialize orchestrator
|
||||
orchestrator = SafetyStockInsightsOrchestrator()
|
||||
orchestrator = SafetyStockInsightsOrchestrator(
|
||||
event_publisher=event_publisher
|
||||
)
|
||||
|
||||
# Get products to optimize
|
||||
if request_data.product_ids:
|
||||
@@ -378,6 +386,7 @@ async def generate_safety_stock_insights_internal(
|
||||
result = await trigger_safety_stock_optimization(
|
||||
tenant_id=tenant_id,
|
||||
request_data=request_data,
|
||||
request=request,
|
||||
db=db
|
||||
)
|
||||
|
||||
|
||||
@@ -126,6 +126,7 @@ class InventoryService(StandardFastAPIService):
|
||||
# Store services in app state
|
||||
app.state.alert_service = alert_service
|
||||
app.state.inventory_scheduler = inventory_scheduler # Store scheduler for manual triggering
|
||||
app.state.event_publisher = self.event_publisher # Store event publisher for ML insights
|
||||
else:
|
||||
self.logger.error("Event publisher not initialized, alert service unavailable")
|
||||
|
||||
|
||||
@@ -14,6 +14,7 @@ import os
|
||||
# Add shared clients to path
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), '../../../..'))
|
||||
from shared.clients.ai_insights_client import AIInsightsClient
|
||||
from shared.messaging import UnifiedEventPublisher
|
||||
|
||||
from app.ml.safety_stock_optimizer import SafetyStockOptimizer
|
||||
|
||||
@@ -28,15 +29,18 @@ class SafetyStockInsightsOrchestrator:
|
||||
1. Optimize safety stock from demand history and cost parameters
|
||||
2. Generate insights comparing optimal vs hardcoded approach
|
||||
3. Post insights to AI Insights Service
|
||||
4. Provide optimized safety stock levels for inventory management
|
||||
4. Publish recommendation events to RabbitMQ
|
||||
5. Provide optimized safety stock levels for inventory management
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
ai_insights_base_url: str = "http://ai-insights-service:8000"
|
||||
ai_insights_base_url: str = "http://ai-insights-service:8000",
|
||||
event_publisher: Optional[UnifiedEventPublisher] = None
|
||||
):
|
||||
self.optimizer = SafetyStockOptimizer()
|
||||
self.ai_insights_client = AIInsightsClient(ai_insights_base_url)
|
||||
self.event_publisher = event_publisher
|
||||
|
||||
async def optimize_and_post_insights(
|
||||
self,
|
||||
@@ -109,6 +113,17 @@ class SafetyStockInsightsOrchestrator:
|
||||
successful=post_results['successful'],
|
||||
failed=post_results['failed']
|
||||
)
|
||||
|
||||
# Step 4: Publish recommendation events to RabbitMQ
|
||||
created_insights = post_results.get('created_insights', [])
|
||||
if created_insights:
|
||||
product_context = product_characteristics.copy() if product_characteristics else {}
|
||||
product_context['inventory_product_id'] = inventory_product_id
|
||||
await self._publish_insight_events(
|
||||
tenant_id=tenant_id,
|
||||
insights=created_insights,
|
||||
product_context=product_context
|
||||
)
|
||||
else:
|
||||
post_results = {'total': 0, 'successful': 0, 'failed': 0}
|
||||
logger.info("No insights to post for product", inventory_product_id=inventory_product_id)
|
||||
@@ -167,6 +182,84 @@ class SafetyStockInsightsOrchestrator:
|
||||
|
||||
return enriched
|
||||
|
||||
async def _publish_insight_events(
|
||||
self,
|
||||
tenant_id: str,
|
||||
insights: List[Dict[str, Any]],
|
||||
product_context: Optional[Dict[str, Any]] = None
|
||||
) -> None:
|
||||
"""
|
||||
Publish recommendation events to RabbitMQ for each insight.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant identifier
|
||||
insights: List of created insights (with insight_id from AI Insights Service)
|
||||
product_context: Optional product context (name, id, etc.)
|
||||
"""
|
||||
if not self.event_publisher:
|
||||
logger.warning("Event publisher not configured, skipping event publication")
|
||||
return
|
||||
|
||||
for insight in insights:
|
||||
try:
|
||||
# Determine severity based on confidence and priority
|
||||
confidence = insight.get('confidence', 0)
|
||||
priority = insight.get('priority', 'medium')
|
||||
|
||||
if priority == 'urgent' or confidence >= 90:
|
||||
severity = 'urgent'
|
||||
elif priority == 'high' or confidence >= 70:
|
||||
severity = 'high'
|
||||
elif priority == 'medium' or confidence >= 50:
|
||||
severity = 'medium'
|
||||
else:
|
||||
severity = 'low'
|
||||
|
||||
# Build event metadata
|
||||
event_metadata = {
|
||||
'insight_id': insight.get('id'),
|
||||
'insight_type': insight.get('insight_type'),
|
||||
'inventory_product_id': insight.get('metrics_json', {}).get('inventory_product_id'),
|
||||
'ingredient_name': product_context.get('ingredient_name') if product_context else None,
|
||||
'suggested_safety_stock': insight.get('metrics_json', {}).get('suggested_safety_stock'),
|
||||
'current_safety_stock': insight.get('metrics_json', {}).get('current_safety_stock'),
|
||||
'estimated_savings': insight.get('impact_value'),
|
||||
'confidence': confidence,
|
||||
'recommendation': insight.get('recommendation'),
|
||||
'impact_type': insight.get('impact_type'),
|
||||
'source_service': 'inventory',
|
||||
'source_model': 'safety_stock_optimizer'
|
||||
}
|
||||
|
||||
# Remove None values
|
||||
event_metadata = {k: v for k, v in event_metadata.items() if v is not None}
|
||||
|
||||
# Publish recommendation event
|
||||
await self.event_publisher.publish_recommendation(
|
||||
event_type='ai_safety_stock_optimization',
|
||||
tenant_id=tenant_id,
|
||||
severity=severity,
|
||||
data=event_metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Published safety stock insight recommendation event",
|
||||
tenant_id=tenant_id,
|
||||
insight_id=insight.get('id'),
|
||||
insight_type=insight.get('insight_type'),
|
||||
severity=severity
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to publish insight event",
|
||||
tenant_id=tenant_id,
|
||||
insight_id=insight.get('id'),
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
# Don't raise - we don't want to fail the whole workflow if event publishing fails
|
||||
|
||||
async def optimize_all_products(
|
||||
self,
|
||||
tenant_id: str,
|
||||
|
||||
@@ -92,8 +92,8 @@ async def load_fixture_data_for_tenant(
|
||||
return 0
|
||||
|
||||
# Parse and adjust dates from fixture to reference_time
|
||||
base_started_at = resolve_time_marker(orchestration_run_data.get("started_at"))
|
||||
base_completed_at = resolve_time_marker(orchestration_run_data.get("completed_at"))
|
||||
base_started_at = resolve_time_marker(orchestration_run_data.get("started_at"), reference_time)
|
||||
base_completed_at = resolve_time_marker(orchestration_run_data.get("completed_at"), reference_time)
|
||||
|
||||
# Adjust dates to make them appear recent relative to session creation
|
||||
started_at = adjust_date_for_demo(base_started_at, reference_time) if base_started_at else reference_time - timedelta(hours=2)
|
||||
|
||||
@@ -6,7 +6,7 @@ Provides endpoints to trigger ML insight generation for:
|
||||
- Price forecasting and timing recommendations
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from fastapi import APIRouter, Depends, HTTPException, Request
|
||||
from pydantic import BaseModel, Field
|
||||
from typing import Optional, List
|
||||
from uuid import UUID
|
||||
@@ -108,6 +108,7 @@ class PriceForecastResponse(BaseModel):
|
||||
async def trigger_supplier_analysis(
|
||||
tenant_id: str,
|
||||
request_data: SupplierAnalysisRequest,
|
||||
request: Request,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
@@ -142,8 +143,11 @@ async def trigger_supplier_analysis(
|
||||
from app.core.config import settings
|
||||
from sqlalchemy import select
|
||||
|
||||
# Get event publisher from app state
|
||||
event_publisher = getattr(request.app.state, 'event_publisher', None)
|
||||
|
||||
# Initialize orchestrator and clients
|
||||
orchestrator = SupplierInsightsOrchestrator()
|
||||
orchestrator = SupplierInsightsOrchestrator(event_publisher=event_publisher)
|
||||
suppliers_client = SuppliersServiceClient(settings)
|
||||
|
||||
# Get suppliers to analyze from suppliers service via API
|
||||
@@ -319,6 +323,7 @@ async def trigger_supplier_analysis(
|
||||
async def trigger_price_forecasting(
|
||||
tenant_id: str,
|
||||
request_data: PriceForecastRequest,
|
||||
request: Request,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
@@ -353,8 +358,11 @@ async def trigger_price_forecasting(
|
||||
from app.core.config import settings
|
||||
from sqlalchemy import select
|
||||
|
||||
# Get event publisher from app state
|
||||
event_publisher = getattr(request.app.state, 'event_publisher', None)
|
||||
|
||||
# Initialize orchestrator and inventory client
|
||||
orchestrator = PriceInsightsOrchestrator()
|
||||
orchestrator = PriceInsightsOrchestrator(event_publisher=event_publisher)
|
||||
inventory_client = InventoryServiceClient(settings)
|
||||
|
||||
# Get ingredients to forecast from inventory service via API
|
||||
@@ -594,6 +602,7 @@ async def generate_price_insights_internal(
|
||||
result = await trigger_price_forecasting(
|
||||
tenant_id=tenant_id,
|
||||
request_data=request_data,
|
||||
request=request,
|
||||
db=db
|
||||
)
|
||||
|
||||
|
||||
@@ -107,6 +107,7 @@ class ProcurementService(StandardFastAPIService):
|
||||
|
||||
# Store in app state for internal API access
|
||||
app.state.delivery_tracking_service = self.delivery_tracking_service
|
||||
app.state.event_publisher = self.event_publisher
|
||||
|
||||
# Start overdue PO scheduler
|
||||
if self.rabbitmq_client and self.rabbitmq_client.connected:
|
||||
|
||||
@@ -14,6 +14,7 @@ import os
|
||||
# Add shared clients to path
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), '../../../..'))
|
||||
from shared.clients.ai_insights_client import AIInsightsClient
|
||||
from shared.messaging import UnifiedEventPublisher
|
||||
|
||||
from app.ml.price_forecaster import PriceForecaster
|
||||
|
||||
@@ -33,10 +34,12 @@ class PriceInsightsOrchestrator:
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
ai_insights_base_url: str = "http://ai-insights-service:8000"
|
||||
ai_insights_base_url: str = "http://ai-insights-service:8000",
|
||||
event_publisher: Optional[UnifiedEventPublisher] = None
|
||||
):
|
||||
self.forecaster = PriceForecaster()
|
||||
self.ai_insights_client = AIInsightsClient(ai_insights_base_url)
|
||||
self.event_publisher = event_publisher
|
||||
|
||||
async def forecast_and_post_insights(
|
||||
self,
|
||||
@@ -107,7 +110,17 @@ class PriceInsightsOrchestrator:
|
||||
post_results = {'total': 0, 'successful': 0, 'failed': 0}
|
||||
logger.info("No insights to post for ingredient", ingredient_id=ingredient_id)
|
||||
|
||||
# Step 4: Return comprehensive results
|
||||
# Step 4: Publish insight events to RabbitMQ
|
||||
created_insights = post_results.get('created_insights', [])
|
||||
if created_insights:
|
||||
ingredient_context = {'ingredient_id': ingredient_id}
|
||||
await self._publish_insight_events(
|
||||
tenant_id=tenant_id,
|
||||
insights=created_insights,
|
||||
ingredient_context=ingredient_context
|
||||
)
|
||||
|
||||
# Step 5: Return comprehensive results
|
||||
return {
|
||||
'tenant_id': tenant_id,
|
||||
'ingredient_id': ingredient_id,
|
||||
@@ -261,6 +274,71 @@ class PriceInsightsOrchestrator:
|
||||
'bulk_opportunity_count': bulk_opportunity_count
|
||||
}
|
||||
|
||||
async def _publish_insight_events(self, tenant_id, insights, ingredient_context=None):
|
||||
"""
|
||||
Publish insight events to RabbitMQ for alert processing.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant identifier
|
||||
insights: List of created insights
|
||||
ingredient_context: Additional context about the ingredient
|
||||
"""
|
||||
if not self.event_publisher:
|
||||
logger.warning("No event publisher available for price insights")
|
||||
return
|
||||
|
||||
for insight in insights:
|
||||
# Determine severity based on confidence and priority
|
||||
confidence = insight.get('confidence', 0)
|
||||
priority = insight.get('priority', 'medium')
|
||||
|
||||
# Map priority to severity, with confidence as tiebreaker
|
||||
if priority == 'critical' or (priority == 'high' and confidence >= 70):
|
||||
severity = 'high'
|
||||
elif priority == 'high' or (priority == 'medium' and confidence >= 80):
|
||||
severity = 'medium'
|
||||
else:
|
||||
severity = 'low'
|
||||
|
||||
# Prepare the event data
|
||||
event_data = {
|
||||
'insight_id': insight.get('id'),
|
||||
'type': insight.get('type'),
|
||||
'title': insight.get('title'),
|
||||
'description': insight.get('description'),
|
||||
'category': insight.get('category'),
|
||||
'priority': insight.get('priority'),
|
||||
'confidence': confidence,
|
||||
'recommendation': insight.get('recommendation_actions', []),
|
||||
'impact_type': insight.get('impact_type'),
|
||||
'impact_value': insight.get('impact_value'),
|
||||
'ingredient_id': ingredient_context.get('ingredient_id') if ingredient_context else None,
|
||||
'timestamp': insight.get('detected_at', datetime.utcnow().isoformat()),
|
||||
'source_service': 'procurement',
|
||||
'source_model': 'price_forecaster'
|
||||
}
|
||||
|
||||
try:
|
||||
await self.event_publisher.publish_recommendation(
|
||||
event_type='ai_price_forecast',
|
||||
tenant_id=tenant_id,
|
||||
severity=severity,
|
||||
data=event_data
|
||||
)
|
||||
logger.info(
|
||||
"Published price insight event",
|
||||
tenant_id=tenant_id,
|
||||
insight_id=insight.get('id'),
|
||||
severity=severity
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to publish price insight event",
|
||||
tenant_id=tenant_id,
|
||||
insight_id=insight.get('id'),
|
||||
error=str(e)
|
||||
)
|
||||
|
||||
def _generate_portfolio_summary_insight(
|
||||
self,
|
||||
tenant_id: str,
|
||||
|
||||
@@ -14,6 +14,7 @@ import os
|
||||
# Add shared clients to path
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), '../../../..'))
|
||||
from shared.clients.ai_insights_client import AIInsightsClient
|
||||
from shared.messaging import UnifiedEventPublisher
|
||||
|
||||
from app.ml.supplier_performance_predictor import SupplierPerformancePredictor
|
||||
|
||||
@@ -28,16 +29,19 @@ class SupplierInsightsOrchestrator:
|
||||
1. Analyze supplier performance from historical orders
|
||||
2. Generate insights for procurement risk management
|
||||
3. Post insights to AI Insights Service
|
||||
4. Provide supplier comparison and recommendations
|
||||
5. Track supplier reliability scores
|
||||
4. Publish recommendation events to RabbitMQ
|
||||
5. Provide supplier comparison and recommendations
|
||||
6. Track supplier reliability scores
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
ai_insights_base_url: str = "http://ai-insights-service:8000"
|
||||
ai_insights_base_url: str = "http://ai-insights-service:8000",
|
||||
event_publisher: Optional[UnifiedEventPublisher] = None
|
||||
):
|
||||
self.predictor = SupplierPerformancePredictor()
|
||||
self.ai_insights_client = AIInsightsClient(ai_insights_base_url)
|
||||
self.event_publisher = event_publisher
|
||||
|
||||
async def analyze_and_post_supplier_insights(
|
||||
self,
|
||||
@@ -105,7 +109,17 @@ class SupplierInsightsOrchestrator:
|
||||
post_results = {'total': 0, 'successful': 0, 'failed': 0}
|
||||
logger.info("No insights to post for supplier", supplier_id=supplier_id)
|
||||
|
||||
# Step 4: Return comprehensive results
|
||||
# Step 4: Publish insight events to RabbitMQ
|
||||
created_insights = post_results.get('created_insights', [])
|
||||
if created_insights:
|
||||
supplier_context = {'supplier_id': supplier_id}
|
||||
await self._publish_insight_events(
|
||||
tenant_id=tenant_id,
|
||||
insights=created_insights,
|
||||
supplier_context=supplier_context
|
||||
)
|
||||
|
||||
# Step 5: Return comprehensive results
|
||||
return {
|
||||
'tenant_id': tenant_id,
|
||||
'supplier_id': supplier_id,
|
||||
@@ -159,6 +173,71 @@ class SupplierInsightsOrchestrator:
|
||||
|
||||
return enriched
|
||||
|
||||
async def _publish_insight_events(self, tenant_id, insights, supplier_context=None):
|
||||
"""
|
||||
Publish insight events to RabbitMQ for alert processing.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant identifier
|
||||
insights: List of created insights
|
||||
supplier_context: Additional context about the supplier
|
||||
"""
|
||||
if not self.event_publisher:
|
||||
logger.warning("No event publisher available for supplier insights")
|
||||
return
|
||||
|
||||
for insight in insights:
|
||||
# Determine severity based on confidence and priority
|
||||
confidence = insight.get('confidence', 0)
|
||||
priority = insight.get('priority', 'medium')
|
||||
|
||||
# Map priority to severity, with confidence as tiebreaker
|
||||
if priority == 'critical' or (priority == 'high' and confidence >= 70):
|
||||
severity = 'high'
|
||||
elif priority == 'high' or (priority == 'medium' and confidence >= 80):
|
||||
severity = 'medium'
|
||||
else:
|
||||
severity = 'low'
|
||||
|
||||
# Prepare the event data
|
||||
event_data = {
|
||||
'insight_id': insight.get('id'),
|
||||
'type': insight.get('type'),
|
||||
'title': insight.get('title'),
|
||||
'description': insight.get('description'),
|
||||
'category': insight.get('category'),
|
||||
'priority': insight.get('priority'),
|
||||
'confidence': confidence,
|
||||
'recommendation': insight.get('recommendation_actions', []),
|
||||
'impact_type': insight.get('impact_type'),
|
||||
'impact_value': insight.get('impact_value'),
|
||||
'supplier_id': supplier_context.get('supplier_id') if supplier_context else None,
|
||||
'timestamp': insight.get('detected_at', datetime.utcnow().isoformat()),
|
||||
'source_service': 'procurement',
|
||||
'source_model': 'supplier_performance_predictor'
|
||||
}
|
||||
|
||||
try:
|
||||
await self.event_publisher.publish_recommendation(
|
||||
event_type='ai_supplier_recommendation',
|
||||
tenant_id=tenant_id,
|
||||
severity=severity,
|
||||
data=event_data
|
||||
)
|
||||
logger.info(
|
||||
"Published supplier insight event",
|
||||
tenant_id=tenant_id,
|
||||
insight_id=insight.get('id'),
|
||||
severity=severity
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to publish supplier insight event",
|
||||
tenant_id=tenant_id,
|
||||
insight_id=insight.get('id'),
|
||||
error=str(e)
|
||||
)
|
||||
|
||||
async def analyze_all_suppliers(
|
||||
self,
|
||||
tenant_id: str,
|
||||
|
||||
@@ -358,13 +358,66 @@ async def clone_demo_data(
|
||||
except KeyError:
|
||||
process_stage_value = None
|
||||
|
||||
# Transform foreign key references (product_id, recipe_id, order_id, forecast_id)
|
||||
transformed_product_id = None
|
||||
if batch_data.get('product_id'):
|
||||
try:
|
||||
transformed_product_id = str(transform_id(batch_data['product_id'], virtual_uuid))
|
||||
except (ValueError, Exception) as e:
|
||||
logger.warning("Failed to transform product_id",
|
||||
product_id=batch_data.get('product_id'),
|
||||
error=str(e))
|
||||
|
||||
transformed_recipe_id = None
|
||||
if batch_data.get('recipe_id'):
|
||||
try:
|
||||
transformed_recipe_id = str(transform_id(batch_data['recipe_id'], virtual_uuid))
|
||||
except (ValueError, Exception) as e:
|
||||
logger.warning("Failed to transform recipe_id",
|
||||
recipe_id=batch_data.get('recipe_id'),
|
||||
error=str(e))
|
||||
|
||||
transformed_order_id = None
|
||||
if batch_data.get('order_id'):
|
||||
try:
|
||||
transformed_order_id = str(transform_id(batch_data['order_id'], virtual_uuid))
|
||||
except (ValueError, Exception) as e:
|
||||
logger.warning("Failed to transform order_id",
|
||||
order_id=batch_data.get('order_id'),
|
||||
error=str(e))
|
||||
|
||||
transformed_forecast_id = None
|
||||
if batch_data.get('forecast_id'):
|
||||
try:
|
||||
transformed_forecast_id = str(transform_id(batch_data['forecast_id'], virtual_uuid))
|
||||
except (ValueError, Exception) as e:
|
||||
logger.warning("Failed to transform forecast_id",
|
||||
forecast_id=batch_data.get('forecast_id'),
|
||||
error=str(e))
|
||||
|
||||
# Transform equipment_used array
|
||||
transformed_equipment = []
|
||||
if batch_data.get('equipment_used'):
|
||||
for equip_id in batch_data['equipment_used']:
|
||||
try:
|
||||
transformed_equipment.append(str(transform_id(equip_id, virtual_uuid)))
|
||||
except (ValueError, Exception) as e:
|
||||
logger.warning("Failed to transform equipment_id",
|
||||
equipment_id=equip_id,
|
||||
error=str(e))
|
||||
|
||||
# staff_assigned contains user IDs - these should NOT be transformed
|
||||
# because they reference actual user accounts which are NOT cloned
|
||||
# The demo uses the same user accounts across all virtual tenants
|
||||
staff_assigned = batch_data.get('staff_assigned', [])
|
||||
|
||||
new_batch = ProductionBatch(
|
||||
id=str(transformed_id),
|
||||
tenant_id=virtual_uuid,
|
||||
batch_number=f"{session_id[:8]}-{batch_data.get('batch_number', f'BATCH-{uuid.uuid4().hex[:8].upper()}')}",
|
||||
product_id=batch_data.get('product_id'),
|
||||
product_id=transformed_product_id,
|
||||
product_name=batch_data.get('product_name'),
|
||||
recipe_id=batch_data.get('recipe_id'),
|
||||
recipe_id=transformed_recipe_id,
|
||||
planned_start_time=adjusted_planned_start,
|
||||
planned_end_time=adjusted_planned_end,
|
||||
planned_quantity=batch_data.get('planned_quantity'),
|
||||
@@ -389,11 +442,11 @@ async def clone_demo_data(
|
||||
waste_quantity=batch_data.get('waste_quantity'),
|
||||
defect_quantity=batch_data.get('defect_quantity'),
|
||||
waste_defect_type=batch_data.get('waste_defect_type'),
|
||||
equipment_used=batch_data.get('equipment_used'),
|
||||
staff_assigned=batch_data.get('staff_assigned'),
|
||||
equipment_used=transformed_equipment,
|
||||
staff_assigned=staff_assigned,
|
||||
station_id=batch_data.get('station_id'),
|
||||
order_id=batch_data.get('order_id'),
|
||||
forecast_id=batch_data.get('forecast_id'),
|
||||
order_id=transformed_order_id,
|
||||
forecast_id=transformed_forecast_id,
|
||||
is_rush_order=batch_data.get('is_rush_order', False),
|
||||
is_special_recipe=batch_data.get('is_special_recipe', False),
|
||||
is_ai_assisted=batch_data.get('is_ai_assisted', False),
|
||||
|
||||
@@ -7,7 +7,7 @@ Provides endpoints to trigger ML insight generation for:
|
||||
- Process efficiency analysis
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from fastapi import APIRouter, Depends, HTTPException, Request
|
||||
from pydantic import BaseModel, Field
|
||||
from typing import Optional, List
|
||||
from uuid import UUID
|
||||
@@ -71,6 +71,7 @@ class YieldPredictionResponse(BaseModel):
|
||||
async def trigger_yield_prediction(
|
||||
tenant_id: str,
|
||||
request_data: YieldPredictionRequest,
|
||||
request: Request,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
@@ -81,10 +82,12 @@ async def trigger_yield_prediction(
|
||||
2. Runs the YieldInsightsOrchestrator to predict yields
|
||||
3. Generates insights about yield optimization opportunities
|
||||
4. Posts insights to AI Insights Service
|
||||
5. Publishes recommendation events to RabbitMQ
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
request_data: Prediction parameters
|
||||
request: FastAPI request (for app state access)
|
||||
db: Database session
|
||||
|
||||
Returns:
|
||||
@@ -103,8 +106,13 @@ async def trigger_yield_prediction(
|
||||
from shared.clients.recipes_client import RecipesServiceClient
|
||||
from app.core.config import settings
|
||||
|
||||
# Get event publisher from app state (if available)
|
||||
event_publisher = getattr(request.app.state, 'event_publisher', None) if hasattr(request, 'app') else None
|
||||
|
||||
# Initialize orchestrator and recipes client
|
||||
orchestrator = YieldInsightsOrchestrator()
|
||||
orchestrator = YieldInsightsOrchestrator(
|
||||
event_publisher=event_publisher
|
||||
)
|
||||
recipes_client = RecipesServiceClient(settings)
|
||||
|
||||
# Get recipes to analyze from recipes service via API
|
||||
@@ -186,12 +194,18 @@ async def trigger_yield_prediction(
|
||||
continue # Skip batches without complete data
|
||||
|
||||
production_data.append({
|
||||
'production_date': batch.actual_start_time,
|
||||
'production_run_id': str(batch.id), # Required: unique identifier for each production run
|
||||
'recipe_id': str(batch.recipe_id), # Required: recipe identifier
|
||||
'started_at': batch.actual_start_time,
|
||||
'completed_at': batch.actual_end_time, # Optional but useful for duration analysis
|
||||
'batch_size': float(batch.planned_quantity), # Use planned_quantity as batch_size
|
||||
'planned_quantity': float(batch.planned_quantity),
|
||||
'actual_quantity': float(batch.actual_quantity),
|
||||
'yield_percentage': yield_pct,
|
||||
'worker_id': batch.notes or 'unknown', # Use notes field or default
|
||||
'batch_number': batch.batch_number
|
||||
'staff_assigned': batch.staff_assigned if batch.staff_assigned else ['unknown'],
|
||||
'batch_number': batch.batch_number,
|
||||
'equipment_id': batch.equipment_used[0] if batch.equipment_used and len(batch.equipment_used) > 0 else None,
|
||||
'notes': batch.quality_notes # Optional quality notes
|
||||
})
|
||||
|
||||
if not production_data:
|
||||
@@ -202,6 +216,14 @@ async def trigger_yield_prediction(
|
||||
|
||||
production_history = pd.DataFrame(production_data)
|
||||
|
||||
# Debug: Log DataFrame columns and sample data
|
||||
logger.debug(
|
||||
"Production history DataFrame created",
|
||||
recipe_id=recipe_id,
|
||||
columns=list(production_history.columns),
|
||||
sample_data=production_history.head(1).to_dict('records') if len(production_history) > 0 else None
|
||||
)
|
||||
|
||||
# Run yield analysis
|
||||
results = await orchestrator.analyze_and_post_insights(
|
||||
tenant_id=tenant_id,
|
||||
@@ -291,8 +313,6 @@ async def ml_insights_health():
|
||||
# INTERNAL ENDPOINTS (for demo-session service)
|
||||
# ================================================================
|
||||
|
||||
from fastapi import Request
|
||||
|
||||
# Create a separate router for internal endpoints to avoid the tenant prefix
|
||||
internal_router = APIRouter(
|
||||
tags=["ML Insights - Internal"]
|
||||
@@ -347,6 +367,7 @@ async def generate_yield_insights_internal(
|
||||
result = await trigger_yield_prediction(
|
||||
tenant_id=tenant_id,
|
||||
request_data=request_data,
|
||||
request=request,
|
||||
db=db
|
||||
)
|
||||
|
||||
|
||||
@@ -142,6 +142,7 @@ class ProductionService(StandardFastAPIService):
|
||||
app.state.production_alert_service = self.alert_service # Also store with this name for internal trigger
|
||||
app.state.notification_service = self.notification_service # Notification service for state change events
|
||||
app.state.production_scheduler = self.production_scheduler # Store scheduler for manual triggering
|
||||
app.state.event_publisher = self.event_publisher # Store event publisher for ML insights
|
||||
|
||||
async def on_shutdown(self, app: FastAPI):
|
||||
"""Custom shutdown logic for production service"""
|
||||
|
||||
@@ -14,6 +14,7 @@ import os
|
||||
# Add shared clients to path
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), '../../../..'))
|
||||
from shared.clients.ai_insights_client import AIInsightsClient
|
||||
from shared.messaging import UnifiedEventPublisher
|
||||
|
||||
from app.ml.yield_predictor import YieldPredictor
|
||||
|
||||
@@ -28,15 +29,18 @@ class YieldInsightsOrchestrator:
|
||||
1. Predict yield for upcoming production run or analyze historical performance
|
||||
2. Generate insights for yield optimization opportunities
|
||||
3. Post insights to AI Insights Service
|
||||
4. Provide yield predictions for production planning
|
||||
4. Publish recommendation events to RabbitMQ
|
||||
5. Provide yield predictions for production planning
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
ai_insights_base_url: str = "http://ai-insights-service:8000"
|
||||
ai_insights_base_url: str = "http://ai-insights-service:8000",
|
||||
event_publisher: Optional[UnifiedEventPublisher] = None
|
||||
):
|
||||
self.predictor = YieldPredictor()
|
||||
self.ai_insights_client = AIInsightsClient(ai_insights_base_url)
|
||||
self.event_publisher = event_publisher
|
||||
|
||||
async def predict_and_post_insights(
|
||||
self,
|
||||
@@ -54,7 +58,7 @@ class YieldInsightsOrchestrator:
|
||||
recipe_id: Recipe identifier
|
||||
production_history: Historical production runs
|
||||
production_context: Upcoming production context:
|
||||
- worker_id
|
||||
- staff_assigned (list of staff IDs)
|
||||
- planned_start_time
|
||||
- batch_size
|
||||
- planned_quantity
|
||||
@@ -109,6 +113,17 @@ class YieldInsightsOrchestrator:
|
||||
successful=post_results['successful'],
|
||||
failed=post_results['failed']
|
||||
)
|
||||
|
||||
# Step 4: Publish recommendation events to RabbitMQ
|
||||
created_insights = post_results.get('created_insights', [])
|
||||
if created_insights:
|
||||
recipe_context = production_context.copy() if production_context else {}
|
||||
recipe_context['recipe_id'] = recipe_id
|
||||
await self._publish_insight_events(
|
||||
tenant_id=tenant_id,
|
||||
insights=created_insights,
|
||||
recipe_context=recipe_context
|
||||
)
|
||||
else:
|
||||
post_results = {'total': 0, 'successful': 0, 'failed': 0}
|
||||
logger.info("No insights to post for recipe", recipe_id=recipe_id)
|
||||
@@ -193,6 +208,15 @@ class YieldInsightsOrchestrator:
|
||||
total=post_results['total'],
|
||||
successful=post_results['successful']
|
||||
)
|
||||
|
||||
# Step 4: Publish recommendation events to RabbitMQ
|
||||
created_insights = post_results.get('created_insights', [])
|
||||
if created_insights:
|
||||
await self._publish_insight_events(
|
||||
tenant_id=tenant_id,
|
||||
insights=created_insights,
|
||||
recipe_context={'recipe_id': recipe_id}
|
||||
)
|
||||
else:
|
||||
post_results = {'total': 0, 'successful': 0, 'failed': 0}
|
||||
|
||||
@@ -248,6 +272,83 @@ class YieldInsightsOrchestrator:
|
||||
|
||||
return enriched
|
||||
|
||||
async def _publish_insight_events(
|
||||
self,
|
||||
tenant_id: str,
|
||||
insights: List[Dict[str, Any]],
|
||||
recipe_context: Optional[Dict[str, Any]] = None
|
||||
) -> None:
|
||||
"""
|
||||
Publish recommendation events to RabbitMQ for each insight.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant identifier
|
||||
insights: List of created insights (with insight_id from AI Insights Service)
|
||||
recipe_context: Optional recipe context (name, id, etc.)
|
||||
"""
|
||||
if not self.event_publisher:
|
||||
logger.warning("Event publisher not configured, skipping event publication")
|
||||
return
|
||||
|
||||
for insight in insights:
|
||||
try:
|
||||
# Determine severity based on confidence and priority
|
||||
confidence = insight.get('confidence', 0)
|
||||
priority = insight.get('priority', 'medium')
|
||||
|
||||
if priority == 'urgent' or confidence >= 90:
|
||||
severity = 'urgent'
|
||||
elif priority == 'high' or confidence >= 70:
|
||||
severity = 'high'
|
||||
elif priority == 'medium' or confidence >= 50:
|
||||
severity = 'medium'
|
||||
else:
|
||||
severity = 'low'
|
||||
|
||||
# Build event metadata
|
||||
event_metadata = {
|
||||
'insight_id': insight.get('id'), # From AI Insights Service response
|
||||
'insight_type': insight.get('insight_type'),
|
||||
'recipe_id': insight.get('metrics_json', {}).get('recipe_id'),
|
||||
'recipe_name': recipe_context.get('recipe_name') if recipe_context else None,
|
||||
'predicted_yield': insight.get('metrics_json', {}).get('predicted_yield'),
|
||||
'confidence': confidence,
|
||||
'recommendation': insight.get('recommendation'),
|
||||
'impact_type': insight.get('impact_type'),
|
||||
'impact_value': insight.get('impact_value'),
|
||||
'source_service': 'production',
|
||||
'source_model': 'yield_predictor'
|
||||
}
|
||||
|
||||
# Remove None values
|
||||
event_metadata = {k: v for k, v in event_metadata.items() if v is not None}
|
||||
|
||||
# Publish recommendation event
|
||||
await self.event_publisher.publish_recommendation(
|
||||
event_type='ai_yield_prediction',
|
||||
tenant_id=tenant_id,
|
||||
severity=severity,
|
||||
data=event_metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Published yield insight recommendation event",
|
||||
tenant_id=tenant_id,
|
||||
insight_id=insight.get('id'),
|
||||
insight_type=insight.get('insight_type'),
|
||||
severity=severity
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to publish insight event",
|
||||
tenant_id=tenant_id,
|
||||
insight_id=insight.get('id'),
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
# Don't raise - we don't want to fail the whole workflow if event publishing fails
|
||||
|
||||
async def analyze_all_recipes(
|
||||
self,
|
||||
tenant_id: str,
|
||||
|
||||
@@ -62,14 +62,14 @@ class YieldPredictor:
|
||||
- planned_quantity
|
||||
- actual_quantity
|
||||
- yield_percentage
|
||||
- worker_id
|
||||
- staff_assigned (list of staff IDs)
|
||||
- started_at
|
||||
- completed_at
|
||||
- batch_size
|
||||
- equipment_id (optional)
|
||||
- notes (optional)
|
||||
production_context: Upcoming production context:
|
||||
- worker_id
|
||||
- staff_assigned (list of staff IDs)
|
||||
- planned_start_time
|
||||
- batch_size
|
||||
- equipment_id (optional)
|
||||
@@ -212,6 +212,9 @@ class YieldPredictor:
|
||||
df['is_small_batch'] = (df['batch_size'] < df['batch_size'].quantile(0.25)).astype(int)
|
||||
|
||||
# Worker experience features (proxy: number of previous runs)
|
||||
# Extract first worker from staff_assigned list
|
||||
df['worker_id'] = df['staff_assigned'].apply(lambda x: x[0] if isinstance(x, list) and len(x) > 0 else 'unknown')
|
||||
|
||||
df = df.sort_values('started_at')
|
||||
df['worker_run_count'] = df.groupby('worker_id').cumcount() + 1
|
||||
df['worker_experience_level'] = pd.cut(
|
||||
@@ -232,6 +235,10 @@ class YieldPredictor:
|
||||
factors = {}
|
||||
|
||||
# Worker impact
|
||||
# Extract worker_id from staff_assigned for analysis
|
||||
if 'worker_id' not in feature_df.columns:
|
||||
feature_df['worker_id'] = feature_df['staff_assigned'].apply(lambda x: x[0] if isinstance(x, list) and len(x) > 0 else 'unknown')
|
||||
|
||||
worker_yields = feature_df.groupby('worker_id')['yield_percentage'].agg(['mean', 'std', 'count'])
|
||||
worker_yields = worker_yields[worker_yields['count'] >= 3] # Min 3 runs per worker
|
||||
|
||||
@@ -339,7 +346,10 @@ class YieldPredictor:
|
||||
if 'duration_hours' in feature_df.columns:
|
||||
feature_columns.append('duration_hours')
|
||||
|
||||
# Encode worker_id
|
||||
# Encode worker_id (extracted from staff_assigned)
|
||||
if 'worker_id' not in feature_df.columns:
|
||||
feature_df['worker_id'] = feature_df['staff_assigned'].apply(lambda x: x[0] if isinstance(x, list) and len(x) > 0 else 'unknown')
|
||||
|
||||
worker_encoding = {worker: idx for idx, worker in enumerate(feature_df['worker_id'].unique())}
|
||||
feature_df['worker_encoded'] = feature_df['worker_id'].map(worker_encoding)
|
||||
feature_columns.append('worker_encoded')
|
||||
@@ -420,11 +430,15 @@ class YieldPredictor:
|
||||
) -> Dict[str, Any]:
|
||||
"""Predict yield for upcoming production run."""
|
||||
# Extract context
|
||||
worker_id = production_context.get('worker_id')
|
||||
staff_assigned = production_context.get('staff_assigned', [])
|
||||
worker_id = staff_assigned[0] if isinstance(staff_assigned, list) and len(staff_assigned) > 0 else 'unknown'
|
||||
planned_start = pd.to_datetime(production_context.get('planned_start_time'))
|
||||
batch_size = production_context.get('batch_size')
|
||||
|
||||
# Get worker experience
|
||||
if 'worker_id' not in feature_df.columns:
|
||||
feature_df['worker_id'] = feature_df['staff_assigned'].apply(lambda x: x[0] if isinstance(x, list) and len(x) > 0 else 'unknown')
|
||||
|
||||
worker_runs = feature_df[feature_df['worker_id'] == worker_id]
|
||||
worker_run_count = len(worker_runs) if len(worker_runs) > 0 else 1
|
||||
|
||||
@@ -578,7 +592,7 @@ class YieldPredictor:
|
||||
'action': 'review_production_factors',
|
||||
'params': {
|
||||
'recipe_id': recipe_id,
|
||||
'worker_id': production_context.get('worker_id')
|
||||
'worker_id': worker_id
|
||||
}
|
||||
}]
|
||||
})
|
||||
|
||||
@@ -31,7 +31,7 @@ def stable_yield_history():
|
||||
'planned_quantity': 100,
|
||||
'actual_quantity': np.random.normal(97, 1.5), # 97% avg, low variance
|
||||
'yield_percentage': np.random.normal(97, 1.5),
|
||||
'worker_id': f'worker_{i % 3}', # 3 workers
|
||||
'staff_assigned': [f'worker_{i % 3}'], # 3 workers
|
||||
'started_at': run_date,
|
||||
'completed_at': run_date + timedelta(hours=4),
|
||||
'batch_size': np.random.randint(80, 120)
|
||||
|
||||
@@ -45,7 +45,7 @@ class SalesServiceClient(BaseServiceClient):
|
||||
if product_id:
|
||||
params["product_id"] = product_id
|
||||
|
||||
result = await self.get("sales", tenant_id=tenant_id, params=params)
|
||||
result = await self.get("sales/sales", tenant_id=tenant_id, params=params)
|
||||
|
||||
# Handle both list and dict responses
|
||||
if result is None:
|
||||
|
||||
@@ -28,7 +28,7 @@ class SuppliersServiceClient(BaseServiceClient):
|
||||
async def get_supplier_by_id(self, tenant_id: str, supplier_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get supplier details by ID"""
|
||||
try:
|
||||
result = await self.get(f"suppliers/{supplier_id}", tenant_id=tenant_id)
|
||||
result = await self.get(f"suppliers/suppliers/{supplier_id}", tenant_id=tenant_id)
|
||||
if result:
|
||||
logger.info("Retrieved supplier details from suppliers service",
|
||||
supplier_id=supplier_id, tenant_id=tenant_id)
|
||||
|
||||
@@ -49,7 +49,8 @@
|
||||
"batch_number": "BCN-HAR-20250115-001",
|
||||
"created_at": "BASE_TS",
|
||||
"enterprise_shared": true,
|
||||
"source_location": "Central Warehouse - Barcelona"
|
||||
"source_location": "Central Warehouse - Barcelona",
|
||||
"staff_assigned": []
|
||||
},
|
||||
{
|
||||
"id": "10000000-0000-0000-0000-000000002002",
|
||||
@@ -64,7 +65,8 @@
|
||||
"batch_number": "BCN-MAN-20250115-001",
|
||||
"created_at": "BASE_TS",
|
||||
"enterprise_shared": true,
|
||||
"source_location": "Central Warehouse - Barcelona"
|
||||
"source_location": "Central Warehouse - Barcelona",
|
||||
"staff_assigned": []
|
||||
},
|
||||
{
|
||||
"id": "20000000-0000-0000-0000-000000002001",
|
||||
@@ -79,7 +81,8 @@
|
||||
"batch_number": "BCN-BAG-20250115-001",
|
||||
"created_at": "BASE_TS",
|
||||
"enterprise_shared": true,
|
||||
"source_location": "Central Production Facility - Barcelona"
|
||||
"source_location": "Central Production Facility - Barcelona",
|
||||
"staff_assigned": []
|
||||
},
|
||||
{
|
||||
"id": "20000000-0000-0000-0000-000000002002",
|
||||
@@ -94,7 +97,8 @@
|
||||
"batch_number": "BCN-CRO-20250115-001",
|
||||
"created_at": "BASE_TS",
|
||||
"enterprise_shared": true,
|
||||
"source_location": "Central Production Facility - Barcelona"
|
||||
"source_location": "Central Production Facility - Barcelona",
|
||||
"staff_assigned": []
|
||||
}
|
||||
],
|
||||
"local_sales": [
|
||||
@@ -203,7 +207,8 @@
|
||||
"operator_id": "50000000-0000-0000-0000-000000000012",
|
||||
"created_at": "BASE_TS",
|
||||
"notes": "Producció matinal de baguettes a Barcelona",
|
||||
"enterprise_location_production": true
|
||||
"enterprise_location_production": true,
|
||||
"staff_assigned": []
|
||||
},
|
||||
{
|
||||
"id": "40000000-0000-0000-0000-000000002002",
|
||||
@@ -222,7 +227,8 @@
|
||||
"operator_id": "50000000-0000-0000-0000-000000000013",
|
||||
"created_at": "BASE_TS",
|
||||
"notes": "Producció de croissants en curs a Barcelona",
|
||||
"enterprise_location_production": true
|
||||
"enterprise_location_production": true,
|
||||
"staff_assigned": []
|
||||
}
|
||||
],
|
||||
"local_forecasts": [
|
||||
|
||||
@@ -46,7 +46,8 @@
|
||||
"batch_number": "MAD-HAR-20250115-001",
|
||||
"created_at": "BASE_TS",
|
||||
"enterprise_shared": true,
|
||||
"source_location": "Central Warehouse - Madrid"
|
||||
"source_location": "Central Warehouse - Madrid",
|
||||
"staff_assigned": []
|
||||
},
|
||||
{
|
||||
"id": "20000000-0000-0000-0000-000000001501",
|
||||
@@ -61,7 +62,8 @@
|
||||
"batch_number": "MAD-BAG-20250115-001",
|
||||
"created_at": "BASE_TS",
|
||||
"enterprise_shared": true,
|
||||
"source_location": "Central Production Facility - Madrid"
|
||||
"source_location": "Central Production Facility - Madrid",
|
||||
"staff_assigned": []
|
||||
}
|
||||
],
|
||||
"local_sales": [
|
||||
|
||||
@@ -49,7 +49,8 @@
|
||||
"batch_number": "VLC-HAR-20250115-001",
|
||||
"created_at": "BASE_TS",
|
||||
"enterprise_shared": true,
|
||||
"source_location": "Central Warehouse - Valencia"
|
||||
"source_location": "Central Warehouse - Valencia",
|
||||
"staff_assigned": []
|
||||
},
|
||||
{
|
||||
"id": "10000000-0000-0000-0000-000000003002",
|
||||
@@ -64,7 +65,8 @@
|
||||
"batch_number": "VLC-MAN-20250115-001",
|
||||
"created_at": "BASE_TS",
|
||||
"enterprise_shared": true,
|
||||
"source_location": "Central Warehouse - Valencia"
|
||||
"source_location": "Central Warehouse - Valencia",
|
||||
"staff_assigned": []
|
||||
},
|
||||
{
|
||||
"id": "10000000-0000-0000-0000-000000003003",
|
||||
@@ -79,7 +81,8 @@
|
||||
"batch_number": "VLC-SAL-20250115-001",
|
||||
"created_at": "BASE_TS",
|
||||
"enterprise_shared": true,
|
||||
"source_location": "Central Warehouse - Valencia"
|
||||
"source_location": "Central Warehouse - Valencia",
|
||||
"staff_assigned": []
|
||||
},
|
||||
{
|
||||
"id": "20000000-0000-0000-0000-000000003001",
|
||||
@@ -94,7 +97,8 @@
|
||||
"batch_number": "VLC-BAG-20250115-001",
|
||||
"created_at": "BASE_TS",
|
||||
"enterprise_shared": true,
|
||||
"source_location": "Central Production Facility - Valencia"
|
||||
"source_location": "Central Production Facility - Valencia",
|
||||
"staff_assigned": []
|
||||
},
|
||||
{
|
||||
"id": "20000000-0000-0000-0000-000000003002",
|
||||
@@ -109,7 +113,8 @@
|
||||
"batch_number": "VLC-PAN-20250115-001",
|
||||
"created_at": "BASE_TS",
|
||||
"enterprise_shared": true,
|
||||
"source_location": "Central Production Facility - Valencia"
|
||||
"source_location": "Central Production Facility - Valencia",
|
||||
"staff_assigned": []
|
||||
}
|
||||
],
|
||||
"local_sales": [
|
||||
@@ -232,7 +237,8 @@
|
||||
"operator_id": "50000000-0000-0000-0000-000000000013",
|
||||
"created_at": "BASE_TS",
|
||||
"notes": "Producción matinal de baguettes en Valencia",
|
||||
"enterprise_location_production": true
|
||||
"enterprise_location_production": true,
|
||||
"staff_assigned": []
|
||||
},
|
||||
{
|
||||
"id": "40000000-0000-0000-0000-000000003002",
|
||||
@@ -251,7 +257,8 @@
|
||||
"operator_id": "50000000-0000-0000-0000-000000000014",
|
||||
"created_at": "BASE_TS",
|
||||
"notes": "Producción de pan de campo completada",
|
||||
"enterprise_location_production": true
|
||||
"enterprise_location_production": true,
|
||||
"staff_assigned": []
|
||||
},
|
||||
{
|
||||
"id": "40000000-0000-0000-0000-000000003003",
|
||||
@@ -270,7 +277,8 @@
|
||||
"operator_id": "50000000-0000-0000-0000-000000000013",
|
||||
"created_at": "BASE_TS",
|
||||
"notes": "Lote programado para mañana - pedido de hotel",
|
||||
"enterprise_location_production": true
|
||||
"enterprise_location_production": true,
|
||||
"staff_assigned": []
|
||||
}
|
||||
],
|
||||
"local_forecasts": [
|
||||
|
||||
@@ -85,7 +85,8 @@
|
||||
"quantity": 40.0,
|
||||
"delivery_time": "2025-01-15T16:00:00Z"
|
||||
}
|
||||
]
|
||||
],
|
||||
"staff_assigned": []
|
||||
}
|
||||
]
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
296
shared/demo/fixtures/professional/generate_ai_insights_data.py
Normal file
296
shared/demo/fixtures/professional/generate_ai_insights_data.py
Normal file
@@ -0,0 +1,296 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Generate AI Insights Data for Professional Demo
|
||||
Adds realistic stock movements and production worker data to enable AI insight generation
|
||||
"""
|
||||
|
||||
import json
|
||||
import random
|
||||
from datetime import datetime, timedelta
|
||||
from uuid import uuid4, UUID
|
||||
from typing import List, Dict, Any
|
||||
|
||||
# Set random seed for reproducibility
|
||||
random.seed(42)
|
||||
|
||||
# Key ingredients that need demand history (matching actual IDs in 03-inventory.json)
|
||||
KEY_INGREDIENTS = [
|
||||
{"id": "10000000-0000-0000-0000-000000000001", "name": "Harina de Trigo T55", "avg_daily": 45.0, "variability": 0.3, "unit_cost": 0.85},
|
||||
{"id": "10000000-0000-0000-0000-000000000002", "name": "Harina de Trigo T65", "avg_daily": 35.0, "variability": 0.25, "unit_cost": 0.95},
|
||||
{"id": "10000000-0000-0000-0000-000000000003", "name": "Harina de Fuerza W300", "avg_daily": 25.0, "variability": 0.35, "unit_cost": 1.15},
|
||||
{"id": "10000000-0000-0000-0000-000000000011", "name": "Mantequilla sin Sal", "avg_daily": 8.5, "variability": 0.35, "unit_cost": 6.50},
|
||||
{"id": "10000000-0000-0000-0000-000000000012", "name": "Leche Entera Fresca", "avg_daily": 18.0, "variability": 0.3, "unit_cost": 0.95},
|
||||
{"id": "10000000-0000-0000-0000-000000000014", "name": "Huevos Frescos", "avg_daily": 5.5, "variability": 0.4, "unit_cost": 3.80},
|
||||
{"id": "10000000-0000-0000-0000-000000000021", "name": "Levadura Fresca", "avg_daily": 3.5, "variability": 0.4, "unit_cost": 4.20},
|
||||
{"id": "10000000-0000-0000-0000-000000000031", "name": "Sal Marina Fina", "avg_daily": 2.8, "variability": 0.2, "unit_cost": 1.50},
|
||||
{"id": "10000000-0000-0000-0000-000000000032", "name": "Azúcar Blanco", "avg_daily": 12.0, "variability": 0.3, "unit_cost": 1.10},
|
||||
{"id": "10000000-0000-0000-0000-000000000013", "name": "Nata para Montar", "avg_daily": 4.2, "variability": 0.35, "unit_cost": 2.80},
|
||||
]
|
||||
|
||||
# Workers with different skill levels (matching users in 02-auth.json)
|
||||
WORKERS = [
|
||||
{"id": "c1a2b3c4-d5e6-47a8-b9c0-d1e2f3a4b5c6", "name": "María García (Owner - Master Baker)", "skill_level": 0.98, "shift": "morning"}, # Expert
|
||||
{"id": "50000000-0000-0000-0000-000000000001", "name": "Juan Panadero (Baker)", "skill_level": 0.95, "shift": "morning"}, # Very skilled
|
||||
{"id": "50000000-0000-0000-0000-000000000006", "name": "Isabel Producción (Production Manager)", "skill_level": 0.90, "shift": "afternoon"}, # Experienced
|
||||
{"id": "50000000-0000-0000-0000-000000000005", "name": "Carlos Almacén (Warehouse - Occasional Baker)", "skill_level": 0.78, "shift": "afternoon"}, # Learning
|
||||
]
|
||||
|
||||
TENANT_ID = "a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6"
|
||||
|
||||
|
||||
def generate_stock_movements(days: int = 90) -> List[Dict[str, Any]]:
|
||||
"""Generate realistic stock movements for AI insights"""
|
||||
movements = []
|
||||
|
||||
# Generate PRODUCTION_USE movements (daily consumption)
|
||||
for day in range(days, 0, -1):
|
||||
for ingredient in KEY_INGREDIENTS:
|
||||
# Skip some days randomly (not every ingredient used every day)
|
||||
if random.random() < 0.15: # 15% chance to skip
|
||||
continue
|
||||
|
||||
# Calculate quantity with variability
|
||||
base_qty = ingredient["avg_daily"]
|
||||
variability = ingredient["variability"]
|
||||
quantity = base_qty * random.uniform(1 - variability, 1 + variability)
|
||||
|
||||
# Reduce usage on weekends (lower production)
|
||||
date_offset = f"BASE_TS - {day}d"
|
||||
day_of_week = (90 - day) % 7 # Approximate day of week
|
||||
if day_of_week in [5, 6]: # Weekend
|
||||
quantity *= 0.6
|
||||
|
||||
# Round to 2 decimals
|
||||
quantity = round(quantity, 2)
|
||||
|
||||
movement = {
|
||||
"id": str(uuid4()),
|
||||
"tenant_id": TENANT_ID,
|
||||
"ingredient_id": ingredient["id"],
|
||||
"stock_id": None,
|
||||
"movement_type": "PRODUCTION_USE",
|
||||
"quantity": quantity,
|
||||
"unit_cost": ingredient["unit_cost"],
|
||||
"total_cost": round(quantity * ingredient["unit_cost"], 2),
|
||||
"quantity_before": None,
|
||||
"quantity_after": None,
|
||||
"movement_date": date_offset,
|
||||
"reason_code": "production_consumption",
|
||||
"notes": f"Daily production usage - {ingredient['name']}",
|
||||
"created_at": date_offset,
|
||||
"created_by": "c1a2b3c4-d5e6-47a8-b9c0-d1e2f3a4b5c6"
|
||||
}
|
||||
movements.append(movement)
|
||||
|
||||
# Generate PURCHASE movements (supplier deliveries - weekly/bi-weekly)
|
||||
for ingredient in KEY_INGREDIENTS:
|
||||
# Calculate delivery frequency based on usage
|
||||
weekly_usage = ingredient["avg_daily"] * 7
|
||||
delivery_qty = weekly_usage * 2 # 2 weeks of stock
|
||||
|
||||
# Bi-weekly deliveries over 90 days = ~6-7 deliveries
|
||||
num_deliveries = 6
|
||||
delivery_interval = days // num_deliveries
|
||||
|
||||
for delivery_num in range(num_deliveries):
|
||||
day_offset = days - (delivery_num * delivery_interval) - random.randint(0, 3)
|
||||
if day_offset < 1:
|
||||
continue
|
||||
|
||||
# Add some variability to delivery quantity
|
||||
qty = delivery_qty * random.uniform(0.9, 1.1)
|
||||
qty = round(qty, 2)
|
||||
|
||||
movement = {
|
||||
"id": str(uuid4()),
|
||||
"tenant_id": TENANT_ID,
|
||||
"ingredient_id": ingredient["id"],
|
||||
"stock_id": None,
|
||||
"movement_type": "PURCHASE",
|
||||
"quantity": qty,
|
||||
"unit_cost": ingredient["unit_cost"],
|
||||
"total_cost": round(qty * ingredient["unit_cost"], 2),
|
||||
"quantity_before": None,
|
||||
"quantity_after": None,
|
||||
"movement_date": f"BASE_TS - {day_offset}d",
|
||||
"reason_code": "supplier_delivery",
|
||||
"notes": f"Weekly delivery from supplier - {ingredient['name']}",
|
||||
"created_at": f"BASE_TS - {day_offset}d",
|
||||
"created_by": "c1a2b3c4-d5e6-47a8-b9c0-d1e2f3a4b5c6"
|
||||
}
|
||||
movements.append(movement)
|
||||
|
||||
# Add occasional stockout events (0 inventory remaining)
|
||||
# Add 5-8 stockout PRODUCTION_USE movements
|
||||
for _ in range(random.randint(5, 8)):
|
||||
ingredient = random.choice(KEY_INGREDIENTS)
|
||||
day_offset = random.randint(1, days)
|
||||
|
||||
movement = {
|
||||
"id": str(uuid4()),
|
||||
"tenant_id": TENANT_ID,
|
||||
"ingredient_id": ingredient["id"],
|
||||
"stock_id": None,
|
||||
"movement_type": "PRODUCTION_USE",
|
||||
"quantity": round(ingredient["avg_daily"] * 1.3, 2), # Higher than usual
|
||||
"unit_cost": ingredient["unit_cost"],
|
||||
"total_cost": round(ingredient["avg_daily"] * 1.3 * ingredient["unit_cost"], 2),
|
||||
"quantity_before": round(ingredient["avg_daily"] * 0.8, 2),
|
||||
"quantity_after": 0.0, # Stockout!
|
||||
"movement_date": f"BASE_TS - {day_offset}d",
|
||||
"reason_code": "production_consumption_stockout",
|
||||
"notes": f"STOCKOUT - Ran out of {ingredient['name']} during production",
|
||||
"created_at": f"BASE_TS - {day_offset}d",
|
||||
"created_by": "c1a2b3c4-d5e6-47a8-b9c0-d1e2f3a4b5c6"
|
||||
}
|
||||
movements.append(movement)
|
||||
|
||||
return movements
|
||||
|
||||
|
||||
def add_worker_data_to_batches(batches: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
|
||||
"""Add staff_assigned and completed_at to production batches"""
|
||||
updated_batches = []
|
||||
|
||||
for batch in batches:
|
||||
# Skip if no yield data (can't assign skill-based worker)
|
||||
if batch.get("yield_percentage") is None:
|
||||
updated_batches.append(batch)
|
||||
continue
|
||||
|
||||
# Assign worker based on yield (better yields = better workers)
|
||||
yield_pct = batch["yield_percentage"]
|
||||
|
||||
if yield_pct >= 95:
|
||||
# Expert workers for high yields
|
||||
worker = random.choice(WORKERS[:2])
|
||||
elif yield_pct >= 90:
|
||||
# Experienced workers
|
||||
worker = random.choice(WORKERS[1:3])
|
||||
elif yield_pct >= 85:
|
||||
# Competent workers
|
||||
worker = random.choice(WORKERS[2:4])
|
||||
else:
|
||||
# Junior workers for lower yields
|
||||
worker = random.choice(WORKERS[3:])
|
||||
|
||||
# Add staff_assigned
|
||||
if "staff_assigned" not in batch or not isinstance(batch["staff_assigned"], list):
|
||||
batch["staff_assigned"] = []
|
||||
batch["staff_assigned"].append(worker["id"])
|
||||
|
||||
# Calculate completed_at from actual_start_time + planned_duration_minutes
|
||||
if batch.get("actual_start_time") and batch.get("planned_duration_minutes"):
|
||||
# Parse the BASE_TS offset
|
||||
start_time_str = batch["actual_start_time"]
|
||||
duration_mins = batch["planned_duration_minutes"]
|
||||
|
||||
# Add duration to start time with some variability
|
||||
actual_duration = duration_mins * random.uniform(0.95, 1.15) # +/- 15% variability
|
||||
|
||||
# Parse the start time offset to calculate completion time
|
||||
# Format: "BASE_TS - 6d 7h 30m" or "BASE_TS - 6d 7h"
|
||||
if "BASE_TS" in start_time_str:
|
||||
# Extract the offset parts
|
||||
# Convert duration to hours for easier calculation
|
||||
duration_hours = actual_duration / 60.0
|
||||
|
||||
# Parse existing offset
|
||||
parts = start_time_str.replace("BASE_TS", "").strip()
|
||||
# Simple approach: just add the duration to the hours component
|
||||
# Example: "- 6d 7h 30m" -> add 3.5h -> "- 6d 10h 30m" (approximately)
|
||||
|
||||
# For simplicity, create a new timestamp offset
|
||||
# Don't try to parse complex string, just create a note field
|
||||
batch["actual_duration_minutes"] = round(actual_duration, 1)
|
||||
# Don't set completed_at - let the system calculate it if needed
|
||||
|
||||
updated_batches.append(batch)
|
||||
|
||||
return updated_batches
|
||||
|
||||
|
||||
def main():
|
||||
"""Generate and update JSON files with AI insights data"""
|
||||
print("🔧 Generating AI Insights Data for Professional Demo...")
|
||||
print()
|
||||
|
||||
# 1. Generate stock movements
|
||||
print("📊 Generating stock movements...")
|
||||
stock_movements = generate_stock_movements(days=90)
|
||||
usage_count = len([m for m in stock_movements if m["movement_type"] == "PRODUCTION_USE"])
|
||||
in_count = len([m for m in stock_movements if m["movement_type"] == "PURCHASE"])
|
||||
stockout_count = len([m for m in stock_movements if m.get("quantity_after") == 0.0])
|
||||
|
||||
print(f" ✓ Generated {len(stock_movements)} stock movements")
|
||||
print(f" - PRODUCTION_USE movements: {usage_count}")
|
||||
print(f" - PURCHASE movements (deliveries): {in_count}")
|
||||
print(f" - Stockout events: {stockout_count}")
|
||||
print()
|
||||
|
||||
# 2. Load and update inventory JSON
|
||||
print("📦 Updating 03-inventory.json...")
|
||||
with open("/Users/urtzialfaro/Documents/bakery-ia/shared/demo/fixtures/professional/03-inventory.json", "r") as f:
|
||||
inventory_data = json.load(f)
|
||||
|
||||
# Append new movements to existing ones
|
||||
existing_movements = inventory_data.get("stock_movements", [])
|
||||
print(f" - Existing movements: {len(existing_movements)}")
|
||||
inventory_data["stock_movements"] = existing_movements + stock_movements
|
||||
print(f" - Total movements: {len(inventory_data['stock_movements'])}")
|
||||
|
||||
# Save updated inventory
|
||||
with open("/Users/urtzialfaro/Documents/bakery-ia/shared/demo/fixtures/professional/03-inventory.json", "w") as f:
|
||||
json.dump(inventory_data, f, indent=2, ensure_ascii=False)
|
||||
print(" ✓ Updated inventory file")
|
||||
print()
|
||||
|
||||
# 3. Load and update production JSON
|
||||
print("🏭 Updating 06-production.json...")
|
||||
with open("/Users/urtzialfaro/Documents/bakery-ia/shared/demo/fixtures/professional/06-production.json", "r") as f:
|
||||
production_data = json.load(f)
|
||||
|
||||
# Update production batches with worker data
|
||||
original_batches = production_data.get("batches", [])
|
||||
print(f" - Total batches: {len(original_batches)}")
|
||||
|
||||
updated_batches = add_worker_data_to_batches(original_batches)
|
||||
batches_with_workers = len([b for b in updated_batches if b.get("staff_assigned") and len(b.get("staff_assigned", [])) > 0])
|
||||
batches_with_completion = len([b for b in updated_batches if b.get("completed_at")])
|
||||
|
||||
production_data["batches"] = updated_batches
|
||||
print(f" - Batches with worker_id: {batches_with_workers}")
|
||||
print(f" - Batches with completed_at: {batches_with_completion}")
|
||||
|
||||
# Save updated production
|
||||
with open("/Users/urtzialfaro/Documents/bakery-ia/shared/demo/fixtures/professional/06-production.json", "w") as f:
|
||||
json.dump(production_data, f, indent=2, ensure_ascii=False)
|
||||
print(" ✓ Updated production file")
|
||||
print()
|
||||
|
||||
# 4. Summary
|
||||
print("=" * 60)
|
||||
print("✅ AI INSIGHTS DATA GENERATION COMPLETE")
|
||||
print("=" * 60)
|
||||
print()
|
||||
print("📊 DATA ADDED:")
|
||||
print(f" • Stock movements (PRODUCTION_USE): {usage_count} records (90 days)")
|
||||
print(f" • Stock movements (PURCHASE): {in_count} deliveries")
|
||||
print(f" • Stockout events: {stockout_count}")
|
||||
print(f" • Worker assignments: {batches_with_workers} batches")
|
||||
print(f" • Completion timestamps: {batches_with_completion} batches")
|
||||
print()
|
||||
print("🎯 AI INSIGHTS READINESS:")
|
||||
print(" ✓ Safety Stock Optimizer: READY (90 days demand data)")
|
||||
print(" ✓ Yield Predictor: READY (worker data added)")
|
||||
print(" ✓ Sustainability Metrics: READY (existing waste data)")
|
||||
print()
|
||||
print("🚀 Next steps:")
|
||||
print(" 1. Test demo session creation")
|
||||
print(" 2. Verify AI insights generation")
|
||||
print(" 3. Check insight quality in frontend")
|
||||
print()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -449,7 +449,7 @@ class UnifiedEventPublisher:
|
||||
elif event_class == "notification":
|
||||
routing_key = f"notification.{event_domain}.info"
|
||||
elif event_class == "recommendation":
|
||||
routing_key = f"recommendation.{event_domain}.medium"
|
||||
routing_key = f"recommendation.{event_domain}.{severity or 'medium'}"
|
||||
else: # business events
|
||||
routing_key = f"business.{event_type.replace('.', '_')}"
|
||||
|
||||
@@ -538,14 +538,16 @@ class UnifiedEventPublisher:
|
||||
self,
|
||||
event_type: str,
|
||||
tenant_id: Union[str, uuid.UUID],
|
||||
data: Dict[str, Any]
|
||||
data: Dict[str, Any],
|
||||
severity: Optional[str] = None
|
||||
) -> bool:
|
||||
"""Publish a recommendation (suggestion to user)"""
|
||||
return await self.publish_event(
|
||||
event_type=event_type,
|
||||
tenant_id=tenant_id,
|
||||
data=data,
|
||||
event_class="recommendation"
|
||||
event_class="recommendation",
|
||||
severity=severity
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -174,42 +174,81 @@ def resolve_time_marker(
|
||||
# Just "BASE_TS" - return session_created_at
|
||||
return session_created_at
|
||||
|
||||
# Parse operator and value
|
||||
operator = offset_part[0]
|
||||
value_part = offset_part[1:].strip()
|
||||
# Handle complex multi-operation markers like "- 30d 6h + 4h 5m"
|
||||
# Split by operators to handle multiple operations
|
||||
import re
|
||||
|
||||
if operator not in ['+', '-']:
|
||||
raise ValueError(f"Invalid operator in time marker: {time_marker}")
|
||||
# Parse all operations in the format: [operator][value]
|
||||
# Pattern matches: optional whitespace, operator (+/-), number with optional decimal, unit (d/h/m)
|
||||
pattern = r'\s*([+-])\s*(\d+\.?\d*)\s*([dhm])'
|
||||
operations = []
|
||||
|
||||
# Parse time components (supports decimals like 0.5d, 1.25h)
|
||||
days = 0.0
|
||||
hours = 0.0
|
||||
minutes = 0.0
|
||||
# Find all operations in the string
|
||||
for match in re.finditer(pattern, offset_part):
|
||||
operator = match.group(1)
|
||||
value = float(match.group(2))
|
||||
unit = match.group(3)
|
||||
operations.append((operator, value, unit))
|
||||
|
||||
if 'd' in value_part:
|
||||
# Handle days (supports decimals like 0.5d = 12 hours)
|
||||
day_part, rest = value_part.split('d', 1)
|
||||
days = float(day_part)
|
||||
value_part = rest
|
||||
if not operations:
|
||||
# Fallback to old simple parsing for backwards compatibility
|
||||
operator = offset_part[0]
|
||||
value_part = offset_part[1:].strip()
|
||||
|
||||
if 'h' in value_part:
|
||||
# Handle hours (supports decimals like 1.5h = 1h30m)
|
||||
hour_part, rest = value_part.split('h', 1)
|
||||
hours = float(hour_part)
|
||||
value_part = rest
|
||||
if operator not in ['+', '-']:
|
||||
raise ValueError(f"Invalid operator in time marker: {time_marker}")
|
||||
|
||||
if 'm' in value_part:
|
||||
# Handle minutes (supports decimals like 30.5m)
|
||||
minute_part = value_part.split('m', 1)[0]
|
||||
minutes = float(minute_part)
|
||||
# Parse time components (supports decimals like 0.5d, 1.25h)
|
||||
days = 0.0
|
||||
hours = 0.0
|
||||
minutes = 0.0
|
||||
|
||||
# Calculate offset using float values
|
||||
offset = timedelta(days=days, hours=hours, minutes=minutes)
|
||||
if 'd' in value_part:
|
||||
# Handle days (supports decimals like 0.5d = 12 hours)
|
||||
day_part, rest = value_part.split('d', 1)
|
||||
days = float(day_part)
|
||||
value_part = rest
|
||||
|
||||
if operator == '+':
|
||||
return session_created_at + offset
|
||||
else:
|
||||
return session_created_at - offset
|
||||
if 'h' in value_part:
|
||||
# Handle hours (supports decimals like 1.5h = 1h30m)
|
||||
hour_part, rest = value_part.split('h', 1)
|
||||
hours = float(hour_part)
|
||||
value_part = rest
|
||||
|
||||
if 'm' in value_part:
|
||||
# Handle minutes (supports decimals like 30.5m)
|
||||
minute_part = value_part.split('m', 1)[0]
|
||||
minutes = float(minute_part)
|
||||
|
||||
# Calculate offset using float values
|
||||
offset = timedelta(days=days, hours=hours, minutes=minutes)
|
||||
|
||||
if operator == '+':
|
||||
return session_created_at + offset
|
||||
else:
|
||||
return session_created_at - offset
|
||||
|
||||
# Process multiple operations
|
||||
result_time = session_created_at
|
||||
|
||||
for operator, value, unit in operations:
|
||||
if unit == 'd':
|
||||
offset = timedelta(days=value)
|
||||
elif unit == 'h':
|
||||
offset = timedelta(hours=value)
|
||||
elif unit == 'm':
|
||||
offset = timedelta(minutes=value)
|
||||
else:
|
||||
raise ValueError(f"Invalid time unit '{unit}' in time marker: {time_marker}")
|
||||
|
||||
if operator == '+':
|
||||
result_time = result_time + offset
|
||||
elif operator == '-':
|
||||
result_time = result_time - offset
|
||||
else:
|
||||
raise ValueError(f"Invalid operator '{operator}' in time marker: {time_marker}")
|
||||
|
||||
return result_time
|
||||
|
||||
|
||||
def shift_to_session_time(
|
||||
|
||||
Reference in New Issue
Block a user