diff --git a/.github/workflows/playwright.yml b/.github/workflows/playwright.yml
deleted file mode 100644
index 72473836..00000000
--- a/.github/workflows/playwright.yml
+++ /dev/null
@@ -1,112 +0,0 @@
-name: Playwright E2E Tests
-
-on:
- push:
- branches: [main, develop]
- paths:
- - 'frontend/**'
- - '.github/workflows/playwright.yml'
- pull_request:
- branches: [main, develop]
- paths:
- - 'frontend/**'
- - '.github/workflows/playwright.yml'
-
-jobs:
- test:
- timeout-minutes: 60
- runs-on: ubuntu-latest
-
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
-
- - name: Setup Node.js
- uses: actions/setup-node@v4
- with:
- node-version: '20'
- cache: 'npm'
- cache-dependency-path: frontend/package-lock.json
-
- - name: Install frontend dependencies
- run: npm ci
- working-directory: ./frontend
-
- - name: Install Playwright browsers
- run: npx playwright install --with-deps
- working-directory: ./frontend
-
- - name: Run Playwright tests
- run: npx playwright test
- working-directory: ./frontend
- env:
- CI: true
- # Add test user credentials as secrets
- TEST_USER_EMAIL: ${{ secrets.TEST_USER_EMAIL }}
- TEST_USER_PASSWORD: ${{ secrets.TEST_USER_PASSWORD }}
-
- - name: Upload test results
- uses: actions/upload-artifact@v4
- if: always()
- with:
- name: playwright-report
- path: frontend/playwright-report/
- retention-days: 30
-
- - name: Upload test videos
- uses: actions/upload-artifact@v4
- if: failure()
- with:
- name: playwright-videos
- path: frontend/test-results/
- retention-days: 7
-
- - name: Upload screenshots
- uses: actions/upload-artifact@v4
- if: failure()
- with:
- name: playwright-screenshots
- path: frontend/test-results/**/*.png
- retention-days: 7
-
- - name: Comment PR with test results
- uses: actions/github-script@v7
- if: github.event_name == 'pull_request' && always()
- with:
- github-token: ${{ secrets.GITHUB_TOKEN }}
- script: |
- const fs = require('fs');
- const path = require('path');
-
- try {
- // Read test results
- const resultsPath = path.join('frontend', 'test-results', 'results.json');
-
- if (fs.existsSync(resultsPath)) {
- const results = JSON.parse(fs.readFileSync(resultsPath, 'utf8'));
-
- const passed = results.stats?.expected || 0;
- const failed = results.stats?.unexpected || 0;
- const skipped = results.stats?.skipped || 0;
- const total = passed + failed + skipped;
-
- const comment = `## ๐ญ Playwright Test Results
-
- - โ
**Passed:** ${passed}
- - โ **Failed:** ${failed}
- - โญ๏ธ **Skipped:** ${skipped}
- - ๐ **Total:** ${total}
-
- ${failed > 0 ? 'โ ๏ธ Some tests failed. Check the workflow artifacts for details.' : 'โจ All tests passed!'}
- `;
-
- github.rest.issues.createComment({
- issue_number: context.issue.number,
- owner: context.repo.owner,
- repo: context.repo.repo,
- body: comment
- });
- }
- } catch (error) {
- console.log('Could not post test results comment:', error);
- }
diff --git a/.github/workflows/validate-demo-data.yml b/.github/workflows/validate-demo-data.yml
deleted file mode 100644
index 7b195057..00000000
--- a/.github/workflows/validate-demo-data.yml
+++ /dev/null
@@ -1,75 +0,0 @@
-name: Validate Demo Data
-
-on:
- push:
- branches: [ main ]
- paths:
- - 'shared/demo/**'
- - 'scripts/validate_cross_refs.py'
- pull_request:
- branches: [ main ]
- paths:
- - 'shared/demo/**'
- - 'scripts/validate_cross_refs.py'
- workflow_dispatch:
-
-jobs:
- validate-demo-data:
- name: Validate Demo Data
- runs-on: ubuntu-latest
-
- steps:
- - name: Checkout repository
- uses: actions/checkout@v4
-
- - name: Set up Python
- uses: actions/setup-python@v4
- with:
- python-version: '3.9'
-
- - name: Install dependencies
- run: |
- python -m pip install --upgrade pip
- pip install pyyaml json-schema
-
- - name: Run cross-reference validation
- run: |
- echo "๐ Running cross-reference validation..."
- python scripts/validate_cross_refs.py
-
- - name: Validate JSON schemas
- run: |
- echo "๐ Validating JSON schemas..."
- find shared/demo/schemas -name "*.schema.json" -exec echo "Validating {}" \;
- # Add schema validation logic here
-
- - name: Check JSON syntax
- run: |
- echo "๐ Checking JSON syntax..."
- find shared/demo/fixtures -name "*.json" -exec python -m json.tool {} > /dev/null \;
- echo "โ
All JSON files are valid"
-
- - name: Validate required fields
- run: |
- echo "๐ Validating required fields..."
- # Add required field validation logic here
-
- - name: Check temporal consistency
- run: |
- echo "โฐ Checking temporal consistency..."
- # Add temporal validation logic here
-
- - name: Summary
- run: |
- echo "๐ Demo data validation completed successfully!"
- echo "โ
All checks passed"
-
- - name: Upload validation report
- if: always()
- uses: actions/upload-artifact@v3
- with:
- name: validation-report
- path: |
- validation-report.txt
- **/validation-*.log
- if-no-files-found: ignore
\ No newline at end of file
diff --git a/AI_INSIGHTS_DATA_FLOW.md b/AI_INSIGHTS_DATA_FLOW.md
deleted file mode 100644
index 89615b10..00000000
--- a/AI_INSIGHTS_DATA_FLOW.md
+++ /dev/null
@@ -1,354 +0,0 @@
-# AI Insights Data Flow Diagram
-
-## Quick Reference: JSON Files โ AI Insights
-
-```
-โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
-โ DEMO SESSION CREATION โ
-โ โ
-โ POST /api/demo/sessions {"demo_account_type": "professional"} โ
-โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโฌโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
- โ
- โผ
-โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
-โ CLONE ORCHESTRATOR (Auto-triggered) โ
-โ โ
-โ Loads JSON files from: โ
-โ shared/demo/fixtures/professional/*.json โ
-โ โ
-โ Clones data to virtual_tenant_id โ
-โโโโโโโโโโโโโโโโโโฌโโโโโโโโโโโโโโโโโโโโโฌโโโโโโโโโโโโโโโโโฌโโโโโโโโโโโ
- โ โ โ
- โผ โผ โผ
- โโโโโโโโโโโโโโโโโโโโโโ โโโโโโโโโโโโโโโโโโโ โโโโโโโโโโโโโโโโ
- โ 03-inventory.json โ โ 06-production โ โ 09-sales.jsonโ
- โ โ โ .json โ โ โ
- โ โข stock_movements โ โ โ โ โข sales_data โ
- โ (90 days) โ โ โข batches with โ โ (30+ days) โ
- โ โข PRODUCTION_USE โ โ staff_assignedโ โ โ
- โ โข PURCHASE โ โ โข yield_% โ โ โ
- โ โข Stockouts (!) โ โ โข duration โ โ โ
- โโโโโโโโโโโฌโโโโโโโโโโโ โโโโโโโโโโฌโโโโโโโโโ โโโโโโโโฌโโโโโโโโ
- โ โ โ
- โ โ โ
- โผ โผ โผ
- โโโโโโโโโโโโโโโโโโโโโโ โโโโโโโโโโโโโโโโโโโ โโโโโโโโโโโโโโโโ
- โ Inventory Service โ โ Production โ โ Forecasting โ
- โ โ โ Service โ โ Service โ
- โ ML Model: โ โ โ โ โ
- โ Safety Stock โ โ ML Model: โ โ ML Model: โ
- โ Optimizer โ โ Yield Predictor โ โ Demand โ
- โ โ โ โ โ Analyzer โ
- โโโโโโโโโโโฌโโโโโโโโโโโ โโโโโโโโโโฌโโโโโโโโโ โโโโโโโโฌโโโโโโโโ
- โ โ โ
- โ Analyzes 90 days โ Correlates โ Detects
- โ consumption โ worker skills โ trends &
- โ patterns โ with yields โ seasonality
- โ โ โ
- โผ โผ โผ
- โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
- โ Each Service Posts Insights via AIInsightsClient โ
- โ โ
- โ POST /api/ai-insights/tenants/{virtual_tenant_id}/insightsโ
- โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโฌโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
- โ
- โผ
- โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
- โ AI Insights Service โ
- โ โ
- โ Database Tables: โ
- โ โข ai_insights โ
- โ โข insight_feedback โ
- โ โข insight_correlations โ
- โ โ
- โ Stores: โ
- โ โข Title, description โ
- โ โข Priority, confidence โ
- โ โข Impact metrics (โฌ/year) โ
- โ โข Recommendation actions โ
- โ โข Expires in 7 days โ
- โโโโโโโโโโโโโโโโโฌโโโโโโโโโโโโโโโโโโโโโโโโ
- โ
- โผ
- โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
- โ RabbitMQ Events Published โ
- โ โ
- โ โข ai_safety_stock_optimization โ
- โ โข ai_yield_prediction โ
- โ โข ai_demand_forecast โ
- โ โข ai_price_forecast โ
- โ โข ai_supplier_performance โ
- โโโโโโโโโโโโโโโโโฌโโโโโโโโโโโโโโโโโโโโโโโโ
- โ
- โผ
- โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
- โ Frontend Consumes โ
- โ โ
- โ GET /api/ai-insights/tenants/{id}/ โ
- โ insights?filters... โ
- โ โ
- โ Displays: โ
- โ โข AIInsightsPage.tsx โ
- โ โข AIInsightsWidget.tsx (dashboard) โ
- โ โข Service-specific widgets โ
- โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
-```
-
-## Data Requirements by AI Model
-
-### 1. Safety Stock Optimizer
-**Requires**: 90 days of stock movements
-
-```json
-// 03-inventory.json
-{
- "stock_movements": [
- // Daily consumption (PRODUCTION_USE)
- { "movement_type": "PRODUCTION_USE", "quantity": 45.0, "movement_date": "BASE_TS - 1d" },
- { "movement_type": "PRODUCTION_USE", "quantity": 52.3, "movement_date": "BASE_TS - 2d" },
- // ... repeat 90 days per ingredient
-
- // Stockout events (critical!)
- { "movement_type": "PRODUCTION_USE", "quantity_after": 0.0, "movement_date": "BASE_TS - 15d" }
- ]
-}
-```
-
-**Generates**:
-- Optimal reorder points
-- Cost savings from reduced safety stock
-- Stockout risk alerts
-
----
-
-### 2. Yield Predictor
-**Requires**: Historical batches with worker data
-
-```json
-// 06-production.json
-{
- "batches": [
- {
- "yield_percentage": 96.5,
- "staff_assigned": ["50000000-0000-0000-0000-000000000001"], // Expert worker
- "actual_duration_minutes": 175.5
- },
- {
- "yield_percentage": 88.2,
- "staff_assigned": ["50000000-0000-0000-0000-000000000005"], // Junior worker
- "actual_duration_minutes": 195.0
- }
- ]
-}
-```
-
-**Generates**:
-- Yield predictions for upcoming batches
-- Worker-product performance correlations
-- Waste reduction opportunities
-
----
-
-### 3. Demand Analyzer
-**Requires**: Sales history (30+ days)
-
-```json
-// 09-sales.json
-{
- "sales_data": [
- { "product_id": "...", "quantity": 51.11, "sales_date": "BASE_TS - 1d" },
- { "product_id": "...", "quantity": 48.29, "sales_date": "BASE_TS - 2d" }
- // ... repeat 30+ days
- ]
-}
-```
-
-**Generates**:
-- Trend analysis (up/down)
-- Seasonal patterns
-- Production recommendations
-
----
-
-### 4. Price Forecaster
-**Requires**: Purchase order history
-
-```json
-// 07-procurement.json
-{
- "purchase_orders": [
- {
- "supplier_id": "...",
- "items": [{ "unit_price": 0.85, "ordered_quantity": 500 }],
- "order_date": "BASE_TS - 7d"
- },
- {
- "supplier_id": "...",
- "items": [{ "unit_price": 0.92, "ordered_quantity": 500 }], // Price increased!
- "order_date": "BASE_TS - 1d"
- }
- ]
-}
-```
-
-**Generates**:
-- Price trend analysis
-- Bulk buying opportunities
-- Supplier cost comparisons
-
----
-
-### 5. Supplier Performance Analyzer
-**Requires**: Purchase orders with delivery tracking
-
-```json
-// 07-procurement.json
-{
- "purchase_orders": [
- {
- "supplier_id": "40000000-0000-0000-0000-000000000001",
- "required_delivery_date": "BASE_TS - 4h",
- "estimated_delivery_date": "BASE_TS - 4h",
- "status": "confirmed", // Still not delivered = LATE
- "reasoning_data": {
- "metadata": {
- "delivery_delayed": true,
- "delay_hours": 4
- }
- }
- }
- ]
-}
-```
-
-**Generates**:
-- Supplier reliability scores
-- Delivery performance alerts
-- Risk management recommendations
-
----
-
-## Insight Types Generated
-
-| Service | Category | Priority | Example Title |
-|---------|----------|----------|---------------|
-| Inventory | inventory | medium | "Safety stock optimization for Harina T55: Reduce from 200kg to 145kg, save โฌ1,200/year" |
-| Inventory | inventory | critical | "Stockout risk: Levadura Fresca below critical level (3 events in 90 days)" |
-| Production | production | medium | "Yield prediction: Batch #4502 expected 94.2% yield - assign expert worker for 98%" |
-| Production | production | high | "Waste reduction: Training junior staff on croissants could save โฌ2,400/year" |
-| Forecasting | forecasting | medium | "Demand trending up 15% for Croissants - increase production by 12 units next week" |
-| Forecasting | forecasting | low | "Weekend sales 40% lower - optimize Saturday production to reduce waste" |
-| Procurement | procurement | high | "Price alert: Mantequilla up 8% in 60 days - consider bulk purchase now" |
-| Procurement | procurement | medium | "Supplier performance: Harinas del Norte late on 3/10 deliveries - consider backup" |
-
----
-
-## Testing Checklist
-
-Run this before creating a demo session:
-
-```bash
-cd /Users/urtzialfaro/Documents/bakery-ia
-
-# 1. Generate AI insights data
-python shared/demo/fixtures/professional/generate_ai_insights_data.py
-
-# 2. Verify data counts
-python -c "
-import json
-
-# Check inventory
-with open('shared/demo/fixtures/professional/03-inventory.json') as f:
- inv = json.load(f)
- movements = len(inv.get('stock_movements', []))
- stockouts = sum(1 for m in inv['stock_movements'] if m.get('quantity_after') == 0.0)
- print(f'โ Stock movements: {movements} (need 800+)')
- print(f'โ Stockout events: {stockouts} (need 5+)')
-
-# Check production
-with open('shared/demo/fixtures/professional/06-production.json') as f:
- prod = json.load(f)
- batches_with_workers = sum(1 for b in prod['batches'] if b.get('staff_assigned'))
- batches_with_yield = sum(1 for b in prod['batches'] if b.get('yield_percentage'))
- print(f'โ Batches with workers: {batches_with_workers} (need 200+)')
- print(f'โ Batches with yield: {batches_with_yield} (need 200+)')
-
-# Check sales
-with open('shared/demo/fixtures/professional/09-sales.json') as f:
- sales = json.load(f)
- sales_count = len(sales.get('sales_data', []))
- print(f'โ Sales records: {sales_count} (need 30+)')
-
-# Check procurement
-with open('shared/demo/fixtures/professional/07-procurement.json') as f:
- proc = json.load(f)
- po_count = len(proc.get('purchase_orders', []))
- delayed = sum(1 for po in proc['purchase_orders'] if po.get('reasoning_data', {}).get('metadata', {}).get('delivery_delayed'))
- print(f'โ Purchase orders: {po_count} (need 5+)')
- print(f'โ Delayed deliveries: {delayed} (need 1+)')
-"
-
-# 3. Validate JSON syntax
-for file in shared/demo/fixtures/professional/*.json; do
- echo "Checking $file..."
- python -m json.tool "$file" > /dev/null && echo " โ Valid" || echo " โ INVALID JSON"
-done
-```
-
-**Expected Output**:
-```
-โ Stock movements: 842 (need 800+)
-โ Stockout events: 6 (need 5+)
-โ Batches with workers: 247 (need 200+)
-โ Batches with yield: 312 (need 200+)
-โ Sales records: 44 (need 30+)
-โ Purchase orders: 8 (need 5+)
-โ Delayed deliveries: 2 (need 1+)
-
-Checking shared/demo/fixtures/professional/01-tenant.json...
- โ Valid
-Checking shared/demo/fixtures/professional/02-auth.json...
- โ Valid
-...
-```
-
----
-
-## Troubleshooting Quick Guide
-
-| Problem | Cause | Solution |
-|---------|-------|----------|
-| No insights generated | Missing stock movements | Run `generate_ai_insights_data.py` |
-| Low confidence scores | < 60 days of data | Ensure 90 days of movements |
-| No yield predictions | Missing staff_assigned | Run generator script |
-| No supplier insights | No delayed deliveries | Check 07-procurement.json for delayed POs |
-| Insights not in frontend | Tenant ID mismatch | Verify virtual_tenant_id matches |
-| DB errors during cloning | JSON syntax error | Validate all JSON files |
-
----
-
-## Files Modified by Generator
-
-When you run `generate_ai_insights_data.py`, these files are updated:
-
-1. **03-inventory.json**:
- - Adds ~842 stock movements
- - Includes 5-8 stockout events
- - Spans 90 days of history
-
-2. **06-production.json**:
- - Adds `staff_assigned` to ~247 batches
- - Adds `actual_duration_minutes`
- - Correlates workers with yields
-
-**Backup your files first** (optional):
-```bash
-cp shared/demo/fixtures/professional/03-inventory.json shared/demo/fixtures/professional/03-inventory.json.backup
-cp shared/demo/fixtures/professional/06-production.json shared/demo/fixtures/professional/06-production.json.backup
-```
-
-To restore:
-```bash
-cp shared/demo/fixtures/professional/03-inventory.json.backup shared/demo/fixtures/professional/03-inventory.json
-cp shared/demo/fixtures/professional/06-production.json.backup shared/demo/fixtures/professional/06-production.json
-```
diff --git a/AI_INSIGHTS_DEMO_SETUP_GUIDE.md b/AI_INSIGHTS_DEMO_SETUP_GUIDE.md
deleted file mode 100644
index 79068c4d..00000000
--- a/AI_INSIGHTS_DEMO_SETUP_GUIDE.md
+++ /dev/null
@@ -1,631 +0,0 @@
-# AI Insights Demo Setup Guide
-
-## Overview
-This guide explains how to populate demo JSON files to generate AI insights across different services during demo sessions.
-
-## Architecture Summary
-
-```
-Demo Session Creation
- โ
-Clone Base Tenant Data (from JSON files)
- โ
-Populate Database with 90 days of history
- โ
-Trigger ML Models in Services
- โ
-Post AI Insights to AI Insights Service
- โ
-Display in Frontend
-```
-
-## Key Files to Populate
-
-### 1. **03-inventory.json** - Stock Movements (CRITICAL for AI Insights)
-**Location**: `/shared/demo/fixtures/professional/03-inventory.json`
-
-**What to Add**: `stock_movements` array with 90 days of historical data
-
-```json
-{
- "stock_movements": [
- {
- "id": "uuid",
- "tenant_id": "a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6",
- "ingredient_id": "10000000-0000-0000-0000-000000000001",
- "stock_id": null,
- "movement_type": "PRODUCTION_USE", // or "PURCHASE"
- "quantity": 45.23,
- "unit_cost": 0.85,
- "total_cost": 38.45,
- "quantity_before": null,
- "quantity_after": null, // Set to 0.0 for stockout events!
- "movement_date": "BASE_TS - 7d",
- "reason_code": "production_consumption",
- "notes": "Daily production usage",
- "created_at": "BASE_TS - 7d",
- "created_by": "c1a2b3c4-d5e6-47a8-b9c0-d1e2f3a4b5c6"
- }
- ]
-}
-```
-
-**Why This Matters**:
-- **Safety Stock Optimizer** needs 90 days of `PRODUCTION_USE` movements to calculate:
- - Average daily consumption
- - Demand variability
- - Optimal reorder points
- - Cost savings from optimized safety stock levels
-- **Stockout events** (quantity_after = 0.0) trigger critical insights
-- **Purchase patterns** help identify supplier reliability
-
-**AI Insights Generated**:
-- `"Safety stock optimization: Reduce Harina T55 from 200kg to 145kg, save โฌ1,200/year"`
-- `"Detected 3 stockouts in 90 days for Levadura Fresca - increase safety stock by 25%"`
-- `"Inventory carrying cost opportunity: โฌ850/year savings across 5 ingredients"`
-
----
-
-### 2. **06-production.json** - Worker Assignments (CRITICAL for Yield Predictions)
-**Location**: `/shared/demo/fixtures/professional/06-production.json`
-
-**What to Add**: Worker IDs to `batches` array + actual duration
-
-```json
-{
- "batches": [
- {
- "id": "40000000-0000-0000-0000-000000000001",
- "product_id": "20000000-0000-0000-0000-000000000001",
- "status": "COMPLETED",
- "yield_percentage": 96.5,
- "staff_assigned": [
- "50000000-0000-0000-0000-000000000001" // Juan Panadero (expert)
- ],
- "actual_start_time": "BASE_TS - 6d 7h",
- "planned_duration_minutes": 180,
- "actual_duration_minutes": 175.5,
- "completed_at": "BASE_TS - 6d 4h"
- }
- ]
-}
-```
-
-**Why This Matters**:
-- **Yield Predictor** correlates worker skill levels with yield performance
-- Needs historical batches with:
- - `staff_assigned` (worker IDs)
- - `yield_percentage`
- - `actual_duration_minutes`
-- Worker skill levels defined in `generate_ai_insights_data.py`:
- - Marรญa Garcรญa (Owner): 0.98 - Expert
- - Juan Panadero (Baker): 0.95 - Very skilled
- - Isabel Producciรณn: 0.90 - Experienced
- - Carlos Almacรฉn: 0.78 - Learning
-
-**AI Insights Generated**:
-- `"Batch #4502 predicted yield: 94.2% (ยฑ2.1%) - assign expert worker for 98% yield"`
-- `"Waste reduction opportunity: Training junior staff could save โฌ2,400/year"`
-- `"Optimal staffing: Schedule Marรญa for croissants (complex), Carlos for baguettes (standard)"`
-
----
-
-### 3. **09-sales.json** - Sales History (For Demand Forecasting)
-**Location**: `/shared/demo/fixtures/professional/09-sales.json`
-
-**What's Already There**: Daily sales records with variability
-
-```json
-{
- "sales_data": [
- {
- "id": "SALES-202501-2287",
- "tenant_id": "a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6",
- "product_id": "20000000-0000-0000-0000-000000000001",
- "quantity": 51.11,
- "unit_price": 6.92,
- "total_amount": 335.29,
- "sales_date": "BASE_TS - 7d 4h",
- "sales_channel": "online",
- "payment_method": "cash",
- "customer_id": "50000000-0000-0000-0000-000000000001"
- }
- ]
-}
-```
-
-**AI Insights Generated**:
-- `"Demand trending up 15% for Croissants - increase next week's production by 12 units"`
-- `"Weekend sales 40% lower - reduce Saturday production to avoid waste"`
-- `"Seasonal pattern detected: Baguette demand peaks Mondays (+25%)"`
-
----
-
-### 4. **07-procurement.json** - Purchase Orders (For Supplier Performance)
-**Location**: `/shared/demo/fixtures/professional/07-procurement.json`
-
-**What's Already There**: Purchase orders with delivery tracking
-
-```json
-{
- "purchase_orders": [
- {
- "id": "50000000-0000-0000-0000-0000000000c1",
- "po_number": "PO-LATE-0001",
- "supplier_id": "40000000-0000-0000-0000-000000000001",
- "status": "confirmed",
- "required_delivery_date": "BASE_TS - 4h",
- "estimated_delivery_date": "BASE_TS - 4h",
- "notes": "โ ๏ธ EDGE CASE: Delivery should have arrived 4 hours ago",
- "reasoning_data": {
- "type": "low_stock_detection",
- "metadata": {
- "delivery_delayed": true,
- "delay_hours": 4
- }
- }
- }
- ]
-}
-```
-
-**AI Insights Generated**:
-- `"Supplier 'Harinas del Norte' late on 3/10 deliveries - consider backup supplier"`
-- `"Price trend: Mantequilla up 8% in 60 days - consider bulk purchase now"`
-- `"Procurement optimization: Consolidate 3 orders to Lรกcteos Gipuzkoa, save โฌ45 shipping"`
-
----
-
-### 5. **11-orchestrator.json** - Orchestration Metadata
-**Location**: `/shared/demo/fixtures/professional/11-orchestrator.json`
-
-**What's Already There**: Last orchestration run results
-
-```json
-{
- "orchestration_run": {
- "id": "90000000-0000-0000-0000-000000000001",
- "status": "completed",
- "run_type": "daily",
- "started_at": "BASE_TS - 1d 16h",
- "completed_at": "BASE_TS - 1d 15h45m"
- },
- "orchestration_results": {
- "production_batches_created": 18,
- "purchase_orders_created": 6,
- "ai_insights_posted": 5 // โ Number of AI insights generated
- },
- "ai_insights": {
- "yield_improvement_suggestions": 2,
- "waste_reduction_opportunities": 1,
- "demand_forecasting_updates": 3,
- "procurement_optimization": 2,
- "production_scheduling": 1
- }
-}
-```
-
-**Purpose**: Shows orchestration metadata, NOT the insights themselves (those are in ai_insights service)
-
----
-
-## How AI Insights Are Generated
-
-### Step 1: Demo Session Creation
-When a user creates a demo session:
-
-```bash
-POST /api/demo/sessions
-{
- "demo_account_type": "professional"
-}
-```
-
-### Step 2: Data Cloning (Automatic)
-The `CloneOrchestrator` clones base tenant data from JSON files:
-- Copies inventory products, recipes, suppliers, etc.
-- **Crucially**: Loads 90 days of stock movements
-- Loads production batches with worker assignments
-- Loads sales history
-
-**File**: `/services/demo_session/app/services/clone_orchestrator.py`
-
-### Step 3: AI Model Execution (After Data Clone)
-Each service runs its ML models:
-
-#### **Inventory Service**
-```python
-# File: /services/inventory/app/ml/safety_stock_insights_orchestrator.py
-async def generate_portfolio_summary(tenant_id: str):
- # Analyze 90 days of stock movements
- # Calculate optimal safety stock levels
- # Generate insights with cost impact
- insights = await ai_insights_client.create_insights_bulk(tenant_id, insights_list)
-```
-
-**Triggers**: After inventory data is cloned
-**Publishes Event**: `ai_safety_stock_optimization`
-
-#### **Production Service**
-```python
-# File: /services/production/app/ml/yield_insights_orchestrator.py
-async def generate_yield_predictions(tenant_id: str):
- # Analyze historical batches + worker performance
- # Predict yield for upcoming batches
- # Identify waste reduction opportunities
- insights = await ai_insights_client.create_insights_bulk(tenant_id, insights_list)
-```
-
-**Triggers**: After production batches are cloned
-**Publishes Event**: `ai_yield_prediction`
-
-#### **Forecasting Service**
-```python
-# File: /services/forecasting/app/ml/demand_insights_orchestrator.py
-async def generate_demand_insights(tenant_id: str):
- # Analyze sales history
- # Detect trends, seasonality
- # Recommend production adjustments
- insights = await ai_insights_client.create_insights_bulk(tenant_id, insights_list)
-```
-
-**Triggers**: After forecasts are generated
-**Publishes Event**: `ai_demand_forecast`
-
-#### **Procurement Service**
-```python
-# File: /services/procurement/app/ml/price_insights_orchestrator.py
-async def generate_price_insights(tenant_id: str):
- # Analyze purchase order history
- # Detect price trends
- # Recommend bulk buying opportunities
- insights = await ai_insights_client.create_insights_bulk(tenant_id, insights_list)
-```
-
-**Triggers**: After purchase orders are cloned
-**Publishes Event**: `ai_price_forecast`
-
-### Step 4: AI Insights Storage
-All insights are posted to:
-```
-POST /api/ai-insights/tenants/{tenant_id}/insights
-```
-
-Stored in `ai_insights` service database with:
-- Priority (low, medium, high, critical)
-- Confidence score (0-100)
-- Impact metrics (cost savings, waste reduction, etc.)
-- Recommendation actions
-- Expiration (default 7 days)
-
-### Step 5: Frontend Display
-User sees insights in:
-- **AI Insights Page**: `/app/analytics/ai-insights`
-- **Dashboard Widget**: Summary of actionable insights
-- **Service-specific pages**: Contextual insights (e.g., production page shows yield predictions)
-
----
-
-## Running the Generator Script
-
-### Automated Approach (Recommended)
-Run the provided script to populate **03-inventory.json** and **06-production.json**:
-
-```bash
-cd /Users/urtzialfaro/Documents/bakery-ia
-python shared/demo/fixtures/professional/generate_ai_insights_data.py
-```
-
-**What it does**:
-1. Generates **~800-900 stock movements** (90 days ร 10 ingredients):
- - Daily PRODUCTION_USE movements with variability
- - Bi-weekly PURCHASE deliveries
- - 5-8 stockout events (quantity_after = 0.0)
-
-2. Adds **worker assignments** to production batches:
- - Assigns workers based on yield performance
- - Adds actual_duration_minutes
- - Correlates high yields with expert workers
-
-3. **Output**:
- ```
- โ
AI INSIGHTS DATA GENERATION COMPLETE
-
- ๐ DATA ADDED:
- โข Stock movements (PRODUCTION_USE): 720 records (90 days)
- โข Stock movements (PURCHASE): 60 deliveries
- โข Stockout events: 6
- โข Worker assignments: 245 batches
-
- ๐ฏ AI INSIGHTS READINESS:
- โ Safety Stock Optimizer: READY (90 days demand data)
- โ Yield Predictor: READY (worker data added)
- โ Sustainability Metrics: READY (existing waste data)
- ```
-
----
-
-## Manual Data Population (Alternative)
-
-If you need custom data, manually add to JSON files:
-
-### For Safety Stock Insights
-Add to `03-inventory.json`:
-```json
-{
- "stock_movements": [
- // 90 days of daily consumption for each ingredient
- {
- "movement_type": "PRODUCTION_USE",
- "ingredient_id": "10000000-0000-0000-0000-000000000001",
- "quantity": 45.0, // Average daily usage
- "movement_date": "BASE_TS - 1d"
- },
- {
- "movement_type": "PRODUCTION_USE",
- "ingredient_id": "10000000-0000-0000-0000-000000000001",
- "quantity": 52.3, // Variability is key!
- "movement_date": "BASE_TS - 2d"
- },
- // ... repeat for 90 days
-
- // Add stockout events (triggers critical insights)
- {
- "movement_type": "PRODUCTION_USE",
- "ingredient_id": "10000000-0000-0000-0000-000000000001",
- "quantity": 48.0,
- "quantity_before": 45.0,
- "quantity_after": 0.0, // STOCKOUT!
- "movement_date": "BASE_TS - 15d",
- "notes": "STOCKOUT - Ran out during production"
- }
- ]
-}
-```
-
-### For Yield Prediction Insights
-Add to `06-production.json`:
-```json
-{
- "batches": [
- {
- "id": "batch-uuid",
- "product_id": "20000000-0000-0000-0000-000000000001",
- "status": "COMPLETED",
- "yield_percentage": 96.5, // High yield
- "staff_assigned": [
- "50000000-0000-0000-0000-000000000001" // Expert worker (Juan)
- ],
- "actual_duration_minutes": 175.5,
- "planned_duration_minutes": 180
- },
- {
- "id": "batch-uuid-2",
- "product_id": "20000000-0000-0000-0000-000000000001",
- "status": "COMPLETED",
- "yield_percentage": 88.2, // Lower yield
- "staff_assigned": [
- "50000000-0000-0000-0000-000000000005" // Junior worker (Carlos)
- ],
- "actual_duration_minutes": 195.0,
- "planned_duration_minutes": 180
- }
- ]
-}
-```
-
----
-
-## Verifying AI Insights Generation
-
-### 1. Check Demo Session Logs
-After creating a demo session, check service logs:
-
-```bash
-# Inventory service (safety stock insights)
-docker logs bakery-inventory-service | grep "ai_safety_stock"
-
-# Production service (yield insights)
-docker logs bakery-production-service | grep "ai_yield"
-
-# Forecasting service (demand insights)
-docker logs bakery-forecasting-service | grep "ai_demand"
-
-# Procurement service (price insights)
-docker logs bakery-procurement-service | grep "ai_price"
-```
-
-### 2. Query AI Insights API
-```bash
-curl -X GET "http://localhost:8000/api/ai-insights/tenants/{tenant_id}/insights" \
- -H "Authorization: Bearer {token}"
-```
-
-**Expected Response**:
-```json
-{
- "items": [
- {
- "id": "insight-uuid",
- "type": "optimization",
- "category": "inventory",
- "priority": "medium",
- "confidence": 88.5,
- "title": "Safety stock optimization opportunity for Harina T55",
- "description": "Reduce safety stock from 200kg to 145kg based on 90-day demand analysis",
- "impact_type": "cost_savings",
- "impact_value": 1200.0,
- "impact_unit": "EUR/year",
- "is_actionable": true,
- "recommendation_actions": [
- "Update reorder point to 145kg",
- "Adjust automatic procurement rules"
- ],
- "status": "new",
- "detected_at": "2025-01-16T10:30:00Z"
- }
- ],
- "total": 5,
- "page": 1
-}
-```
-
-### 3. Check Frontend
-Navigate to: `http://localhost:3000/app/analytics/ai-insights`
-
-Should see:
-- **Statistics**: Total insights, actionable count, average confidence
-- **Insight Cards**: Categorized by type (inventory, production, procurement, forecasting)
-- **Action Buttons**: Apply, Dismiss, Acknowledge
-
----
-
-## Troubleshooting
-
-### No Insights Generated
-
-**Problem**: AI Insights page shows 0 insights after demo session creation
-
-**Solutions**:
-1. **Check stock movements count**:
- ```bash
- # Should have ~800+ movements
- cat shared/demo/fixtures/professional/03-inventory.json | jq '.stock_movements | length'
- ```
- If < 100, run `generate_ai_insights_data.py`
-
-2. **Check worker assignments**:
- ```bash
- # Should have ~200+ batches with staff_assigned
- cat shared/demo/fixtures/professional/06-production.json | jq '[.batches[] | select(.staff_assigned != null)] | length'
- ```
- If 0, run `generate_ai_insights_data.py`
-
-3. **Check service logs for errors**:
- ```bash
- docker logs bakery-ai-insights-service --tail 100
- ```
-
-4. **Verify ML models are enabled**:
- Check `.env` files for:
- ```
- AI_INSIGHTS_ENABLED=true
- ML_MODELS_ENABLED=true
- ```
-
-### Insights Not Showing in Frontend
-
-**Problem**: API returns insights but frontend shows empty
-
-**Solutions**:
-1. **Check tenant_id mismatch**:
- - Frontend uses virtual_tenant_id from demo session
- - Insights should be created with same virtual_tenant_id
-
-2. **Check filters**:
- - Frontend may filter by status, priority, category
- - Try "Show All" filter
-
-3. **Check browser console**:
- ```javascript
- // In browser dev tools
- localStorage.getItem('demo_session')
- // Should show virtual_tenant_id
- ```
-
-### Low Confidence Scores
-
-**Problem**: Insights generated but confidence < 50%
-
-**Causes**:
-- Insufficient historical data (< 60 days)
-- High variability in data (inconsistent patterns)
-- Missing worker assignments for yield predictions
-
-**Solutions**:
-- Ensure 90 days of stock movements
-- Add more consistent patterns (reduce random variability)
-- Verify all batches have `staff_assigned` and `yield_percentage`
-
----
-
-## Summary Checklist
-
-Before creating a demo session, verify:
-
-- [ ] `03-inventory.json` has 800+ stock movements (90 days)
-- [ ] Stock movements include PRODUCTION_USE and PURCHASE types
-- [ ] 5-8 stockout events present (quantity_after = 0.0)
-- [ ] `06-production.json` batches have `staff_assigned` arrays
-- [ ] Batches have `yield_percentage` and `actual_duration_minutes`
-- [ ] `09-sales.json` has daily sales for 30+ days
-- [ ] `07-procurement.json` has purchase orders with delivery dates
-- [ ] All JSON files are valid (no syntax errors)
-
-**Quick Validation**:
-```bash
-cd /Users/urtzialfaro/Documents/bakery-ia
-python -c "
-import json
-with open('shared/demo/fixtures/professional/03-inventory.json') as f:
- data = json.load(f)
- movements = len(data.get('stock_movements', []))
- stockouts = sum(1 for m in data['stock_movements'] if m.get('quantity_after') == 0.0)
- print(f'โ Stock movements: {movements}')
- print(f'โ Stockout events: {stockouts}')
-
-with open('shared/demo/fixtures/professional/06-production.json') as f:
- data = json.load(f)
- batches_with_workers = sum(1 for b in data['batches'] if b.get('staff_assigned'))
- print(f'โ Batches with workers: {batches_with_workers}')
-"
-```
-
-**Expected Output**:
-```
-โ Stock movements: 842
-โ Stockout events: 6
-โ Batches with workers: 247
-```
-
----
-
-## Next Steps
-
-1. **Run generator script** (if not already done):
- ```bash
- python shared/demo/fixtures/professional/generate_ai_insights_data.py
- ```
-
-2. **Create demo session**:
- ```bash
- curl -X POST http://localhost:8000/api/demo/sessions \
- -H "Content-Type: application/json" \
- -d '{"demo_account_type": "professional"}'
- ```
-
-3. **Wait for cloning** (~40 seconds)
-
-4. **Navigate to AI Insights**:
- `http://localhost:3000/app/analytics/ai-insights`
-
-5. **Verify insights** (should see 5-10 insights across categories)
-
-6. **Test actions**:
- - Click "Apply" on an insight
- - Check if recommendation is executed
- - Provide feedback on outcome
-
----
-
-## Additional Resources
-
-- **AI Insights Service**: `/services/ai_insights/README.md`
-- **ML Models Documentation**: `/services/*/app/ml/README.md`
-- **Demo Session Flow**: `/services/demo_session/README.md`
-- **Frontend Integration**: `/frontend/src/pages/app/analytics/ai-insights/README.md`
-
-For questions or issues, check service logs:
-```bash
-docker-compose logs -f ai-insights-service inventory-service production-service forecasting-service procurement-service
-```
diff --git a/AI_INSIGHTS_QUICK_START.md b/AI_INSIGHTS_QUICK_START.md
deleted file mode 100644
index b2e2af25..00000000
--- a/AI_INSIGHTS_QUICK_START.md
+++ /dev/null
@@ -1,565 +0,0 @@
-# AI Insights Quick Start Guide
-
-## TL;DR - Get AI Insights in 3 Steps
-
-```bash
-# 1. Generate demo data with AI insights support (90 days history)
-cd /Users/urtzialfaro/Documents/bakery-ia
-python shared/demo/fixtures/professional/generate_ai_insights_data.py
-
-# 2. Create a demo session
-curl -X POST http://localhost:8000/api/demo/sessions \
- -H "Content-Type: application/json" \
- -d '{"demo_account_type": "professional"}'
-
-# 3. Wait ~40 seconds, then view insights at:
-# http://localhost:3000/app/analytics/ai-insights
-```
-
----
-
-## What You'll See
-
-After the demo session is ready, navigate to the AI Insights page. You should see **5-10 insights** across these categories:
-
-### ๐ฐ **Inventory Optimization** (2-3 insights)
-```
-Priority: Medium | Confidence: 88%
-"Safety stock optimization for Harina de Trigo T55"
-Reduce safety stock from 200kg to 145kg based on 90-day demand analysis.
-Impact: Save โฌ1,200/year in carrying costs
-Actions: โ Apply recommendation
-```
-
-### ๐ **Production Efficiency** (2-3 insights)
-```
-Priority: High | Confidence: 92%
-"Yield prediction: Batch #4502"
-Predicted yield: 94.2% (ยฑ2.1%) - Assign expert worker for 98% yield
-Impact: Reduce waste by 3.8% (โฌ450/year)
-Actions: โ Assign worker | โ Dismiss
-```
-
-### ๐ **Demand Forecasting** (1-2 insights)
-```
-Priority: Medium | Confidence: 85%
-"Demand trending up for Croissants"
-15% increase detected - recommend increasing production by 12 units next week
-Impact: Prevent stockouts, capture โฌ600 additional revenue
-Actions: โ Apply to production schedule
-```
-
-### ๐ **Procurement Optimization** (1-2 insights)
-```
-Priority: High | Confidence: 79%
-"Price alert: Mantequilla price increasing"
-Detected 8% price increase over 60 days - consider bulk purchase now
-Impact: Lock in current price, save โฌ320 over 3 months
-Actions: โ Create bulk order
-```
-
-### โ ๏ธ **Supplier Performance** (0-1 insights)
-```
-Priority: Critical | Confidence: 95%
-"Delivery delays from Harinas del Norte"
-Late on 3/10 deliveries (avg delay: 4.2 hours) - consider backup supplier
-Impact: Reduce production delays, prevent stockouts
-Actions: โ Contact supplier | โ Add backup
-```
-
----
-
-## Detailed Walkthrough
-
-### Step 1: Prepare Demo Data
-
-Run the generator script to add AI-ready data to JSON files:
-
-```bash
-cd /Users/urtzialfaro/Documents/bakery-ia
-python shared/demo/fixtures/professional/generate_ai_insights_data.py
-```
-
-**What this does**:
-- Adds **~842 stock movements** (90 days ร 10 ingredients)
-- Adds **~247 worker assignments** to production batches
-- Includes **5-8 stockout events** (critical for insights)
-- Correlates worker skill levels with yield performance
-
-**Expected output**:
-```
-๐ง Generating AI Insights Data for Professional Demo...
-
-๐ Generating stock movements...
- โ Generated 842 stock movements
- - PRODUCTION_USE movements: 720
- - PURCHASE movements (deliveries): 60
- - Stockout events: 6
-
-๐ฆ Updating 03-inventory.json...
- - Existing movements: 0
- - Total movements: 842
- โ Updated inventory file
-
-๐ญ Updating 06-production.json...
- - Total batches: 312
- - Batches with worker_id: 247
- - Batches with completed_at: 0
- โ Updated production file
-
-============================================================
-โ
AI INSIGHTS DATA GENERATION COMPLETE
-============================================================
-
-๐ DATA ADDED:
- โข Stock movements (PRODUCTION_USE): 720 records (90 days)
- โข Stock movements (PURCHASE): 60 deliveries
- โข Stockout events: 6
- โข Worker assignments: 247 batches
- โข Completion timestamps: 0 batches
-
-๐ฏ AI INSIGHTS READINESS:
- โ Safety Stock Optimizer: READY (90 days demand data)
- โ Yield Predictor: READY (worker data added)
- โ Sustainability Metrics: READY (existing waste data)
-
-๐ Next steps:
- 1. Test demo session creation
- 2. Verify AI insights generation
- 3. Check insight quality in frontend
-```
-
-### Step 2: Create Demo Session
-
-**Option A: Using cURL (API)**
-```bash
-curl -X POST http://localhost:8000/api/demo/sessions \
- -H "Content-Type: application/json" \
- -d '{
- "demo_account_type": "professional",
- "subscription_tier": "professional"
- }' | jq
-```
-
-**Response**:
-```json
-{
- "session_id": "demo_abc123xyz456",
- "virtual_tenant_id": "550e8400-e29b-41d4-a716-446655440000",
- "demo_account_type": "professional",
- "status": "pending",
- "expires_at": "2025-01-16T14:00:00Z",
- "created_at": "2025-01-16T12:00:00Z"
-}
-```
-
-**Save the virtual_tenant_id** - you'll need it to query insights.
-
-**Option B: Using Frontend**
-1. Navigate to: `http://localhost:3000`
-2. Click "Try Demo" or "Create Demo Session"
-3. Select "Professional" account type
-4. Click "Create Session"
-
-### Step 3: Wait for Data Cloning
-
-The demo session will clone all data from JSON files to the virtual tenant. This takes **~30-45 seconds**.
-
-**Monitor progress**:
-```bash
-# Check session status
-curl http://localhost:8000/api/demo/sessions/demo_abc123xyz456/status | jq
-
-# Watch logs (in separate terminal)
-docker-compose logs -f demo-session-service
-```
-
-**Status progression**:
-```
-pending โ cloning โ ready
-```
-
-**When status = "ready"**:
-```json
-{
- "session_id": "demo_abc123xyz456",
- "status": "ready",
- "progress": {
- "inventory": { "status": "completed", "records_cloned": 850 },
- "production": { "status": "completed", "records_cloned": 350 },
- "forecasting": { "status": "completed", "records_cloned": 120 },
- "procurement": { "status": "completed", "records_cloned": 85 }
- },
- "total_records_cloned": 1405,
- "cloning_completed_at": "2025-01-16T12:00:45Z"
-}
-```
-
-### Step 4: AI Models Execute (Automatic)
-
-Once data is cloned, each service automatically runs its ML models:
-
-**Timeline**:
-```
-T+0s: Data cloning starts
-T+40s: Cloning completes
-T+42s: Inventory service runs Safety Stock Optimizer
- โ Posts 2-3 insights to AI Insights Service
-T+44s: Production service runs Yield Predictor
- โ Posts 2-3 insights to AI Insights Service
-T+46s: Forecasting service runs Demand Analyzer
- โ Posts 1-2 insights to AI Insights Service
-T+48s: Procurement service runs Price Forecaster
- โ Posts 1-2 insights to AI Insights Service
-T+50s: All insights ready for display
-```
-
-**Watch service logs**:
-```bash
-# Inventory service (Safety Stock Insights)
-docker logs bakery-inventory-service 2>&1 | grep -i "ai_insights\|safety_stock"
-
-# Production service (Yield Predictions)
-docker logs bakery-production-service 2>&1 | grep -i "ai_insights\|yield"
-
-# Forecasting service (Demand Insights)
-docker logs bakery-forecasting-service 2>&1 | grep -i "ai_insights\|demand"
-
-# Procurement service (Price/Supplier Insights)
-docker logs bakery-procurement-service 2>&1 | grep -i "ai_insights\|price\|supplier"
-```
-
-**Expected log entries**:
-```
-inventory-service | [INFO] Safety stock optimizer: Analyzing 842 movements for 10 ingredients
-inventory-service | [INFO] Generated 3 insights for tenant 550e8400-e29b-41d4-a716-446655440000
-inventory-service | [INFO] Posted insights to AI Insights Service
-
-production-service | [INFO] Yield predictor: Analyzing 247 batches with worker data
-production-service | [INFO] Generated 2 yield prediction insights
-production-service | [INFO] Posted insights to AI Insights Service
-
-forecasting-service | [INFO] Demand analyzer: Processing 44 sales records
-forecasting-service | [INFO] Detected trend: Croissants +15%
-forecasting-service | [INFO] Posted 2 demand insights to AI Insights Service
-```
-
-### Step 5: View Insights in Frontend
-
-**Navigate to**:
-```
-http://localhost:3000/app/analytics/ai-insights
-```
-
-**Expected UI**:
-
-```
-โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
-โ AI Insights โ
-โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโค
-โ Statistics โ
-โ โโโโโโโโโโโ โโโโโโโโโโโ โโโโโโโโโโโ โโโโโโโโโโโ โ
-โ โ Total โ โActionableโ โAvg Conf.โ โCritical โ โ
-โ โ 8 โ โ 6 โ โ 86.5% โ โ 1 โ โ
-โ โโโโโโโโโโโ โโโโโโโโโโโ โโโโโโโโโโโ โโโโโโโโโโโ โ
-โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโค
-โ Filters: [All] [Inventory] [Production] [Procurement] โ
-โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโค
-โ โ
-โ ๐ด Critical | Confidence: 95% โ
-โ โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ โ
-โ โ Delivery delays from Harinas del Norte โ โ
-โ โ โ โ
-โ โ Late on 3/10 deliveries (avg 4.2h delay) โ โ
-โ โ Consider backup supplier to prevent stockouts โ โ
-โ โ โ โ
-โ โ Impact: Reduce production delays โ โ
-โ โ โ โ
-โ โ [Contact Supplier] [Add Backup] [Dismiss] โ โ
-โ โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ โ
-โ โ
-โ ๐ก Medium | Confidence: 88% โ
-โ โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ โ
-โ โ Safety stock optimization for Harina T55 โ โ
-โ โ โ โ
-โ โ Reduce from 200kg to 145kg based on 90-day demand โ โ
-โ โ โ โ
-โ โ Impact: โฌ1,200/year savings in carrying costs โ โ
-โ โ โ โ
-โ โ [Apply] [Dismiss] โ โ
-โ โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ โ
-โ โ
-โ ... (6 more insights) โ
-โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
-```
-
----
-
-## Verify Insights via API
-
-**Query all insights**:
-```bash
-TENANT_ID="550e8400-e29b-41d4-a716-446655440000" # Your virtual_tenant_id
-
-curl -X GET "http://localhost:8000/api/ai-insights/tenants/${TENANT_ID}/insights" \
- -H "Authorization: Bearer YOUR_TOKEN" | jq
-```
-
-**Response**:
-```json
-{
- "items": [
- {
- "id": "insight-uuid-1",
- "type": "optimization",
- "category": "inventory",
- "priority": "medium",
- "confidence": 88.5,
- "title": "Safety stock optimization for Harina T55",
- "description": "Reduce safety stock from 200kg to 145kg based on 90-day demand analysis",
- "impact_type": "cost_savings",
- "impact_value": 1200.0,
- "impact_unit": "EUR/year",
- "is_actionable": true,
- "recommendation_actions": [
- "Update reorder point to 145kg",
- "Adjust automatic procurement rules"
- ],
- "status": "new",
- "detected_at": "2025-01-16T12:00:50Z",
- "expires_at": "2025-01-23T12:00:50Z"
- },
- {
- "id": "insight-uuid-2",
- "type": "prediction",
- "category": "production",
- "priority": "high",
- "confidence": 92.3,
- "title": "Yield prediction: Batch #4502",
- "description": "Predicted yield: 94.2% (ยฑ2.1%) - Assign expert worker for 98% yield",
- "impact_type": "waste_reduction",
- "impact_value": 450.0,
- "impact_unit": "EUR/year",
- "metrics": {
- "predicted_yield": 94.2,
- "confidence_interval": 2.1,
- "optimal_yield": 98.0,
- "waste_percentage": 3.8,
- "recommended_worker_id": "50000000-0000-0000-0000-000000000001"
- },
- "is_actionable": true,
- "status": "new"
- }
- ],
- "total": 8,
- "page": 1,
- "size": 50
-}
-```
-
-**Filter by category**:
-```bash
-# Inventory insights only
-curl "http://localhost:8000/api/ai-insights/tenants/${TENANT_ID}/insights?category=inventory" | jq
-
-# High priority only
-curl "http://localhost:8000/api/ai-insights/tenants/${TENANT_ID}/insights?priority=high" | jq
-
-# Actionable only
-curl "http://localhost:8000/api/ai-insights/tenants/${TENANT_ID}/insights?is_actionable=true" | jq
-```
-
-**Get aggregate metrics**:
-```bash
-curl "http://localhost:8000/api/ai-insights/tenants/${TENANT_ID}/insights/metrics/summary" | jq
-```
-
-**Response**:
-```json
-{
- "total_insights": 8,
- "actionable_count": 6,
- "average_confidence": 86.5,
- "by_priority": {
- "critical": 1,
- "high": 3,
- "medium": 3,
- "low": 1
- },
- "by_category": {
- "inventory": 3,
- "production": 2,
- "forecasting": 2,
- "procurement": 1
- },
- "total_impact_value": 4870.0,
- "impact_breakdown": {
- "cost_savings": 2350.0,
- "waste_reduction": 1520.0,
- "revenue_opportunity": 600.0,
- "risk_mitigation": 400.0
- }
-}
-```
-
----
-
-## Test Actions
-
-### Apply an Insight
-```bash
-INSIGHT_ID="insight-uuid-1"
-
-curl -X POST "http://localhost:8000/api/ai-insights/tenants/${TENANT_ID}/insights/${INSIGHT_ID}/apply" \
- -H "Content-Type: application/json" \
- -d '{
- "applied_by": "user-uuid",
- "notes": "Applied safety stock optimization"
- }' | jq
-```
-
-**What happens**:
-- Insight status โ `"applied"`
-- Recommendation actions are executed (e.g., update reorder point)
-- Feedback tracking begins (monitors actual vs expected impact)
-
-### Provide Feedback
-```bash
-curl -X POST "http://localhost:8000/api/ai-insights/tenants/${TENANT_ID}/insights/${INSIGHT_ID}/feedback" \
- -H "Content-Type: application/json" \
- -d '{
- "action_taken": "adjusted_reorder_point",
- "outcome": "success",
- "expected_impact": 1200.0,
- "actual_impact": 1350.0,
- "variance": 150.0,
- "notes": "Exceeded expected savings by 12.5%"
- }' | jq
-```
-
-**Why this matters**:
-- Closed-loop learning: ML models improve based on feedback
-- Adjusts confidence scores for future insights
-- Tracks ROI of AI recommendations
-
-### Dismiss an Insight
-```bash
-curl -X DELETE "http://localhost:8000/api/ai-insights/tenants/${TENANT_ID}/insights/${INSIGHT_ID}" \
- -H "Content-Type: application/json" \
- -d '{
- "reason": "not_applicable",
- "notes": "Already using alternative supplier"
- }' | jq
-```
-
----
-
-## Common Issues & Solutions
-
-### Issue 1: No insights generated
-```bash
-# Check if data was cloned
-curl http://localhost:8000/api/demo/sessions/demo_abc123xyz456/status | jq '.total_records_cloned'
-# Should be 1400+
-
-# Check stock movements count
-docker exec -it bakery-inventory-service psql -U postgres -d inventory -c \
- "SELECT COUNT(*) FROM stock_movements WHERE tenant_id = '550e8400-e29b-41d4-a716-446655440000';"
-# Should be 842+
-
-# If count is low, regenerate data
-python shared/demo/fixtures/professional/generate_ai_insights_data.py
-```
-
-### Issue 2: Low confidence scores
-```bash
-# Check data quality
-python -c "
-import json
-with open('shared/demo/fixtures/professional/03-inventory.json') as f:
- data = json.load(f)
- movements = data.get('stock_movements', [])
- # Should have movements spanning 90 days
- unique_dates = len(set(m['movement_date'] for m in movements))
- print(f'Unique dates: {unique_dates} (need 80+)')
-"
-```
-
-### Issue 3: Insights not visible in frontend
-```bash
-# Check if frontend is using correct tenant_id
-# In browser console:
-# localStorage.getItem('demo_session')
-
-# Should match the virtual_tenant_id from API
-
-# Also check API directly
-curl "http://localhost:8000/api/ai-insights/tenants/${TENANT_ID}/insights" | jq '.total'
-# Should be > 0
-```
-
----
-
-## Pro Tips
-
-### 1. **Regenerate insights for existing session**
-```bash
-# Trigger refresh (expires old insights, generates new ones)
-curl -X POST "http://localhost:8000/api/ai-insights/tenants/${TENANT_ID}/insights/refresh" | jq
-```
-
-### 2. **Export insights to CSV**
-```bash
-curl "http://localhost:8000/api/ai-insights/tenants/${TENANT_ID}/insights?export=csv" > insights.csv
-```
-
-### 3. **Monitor insight generation in real-time**
-```bash
-# Terminal 1: Watch AI Insights service
-docker logs -f bakery-ai-insights-service
-
-# Terminal 2: Watch source services
-docker logs -f bakery-inventory-service bakery-production-service bakery-forecasting-service
-
-# Terminal 3: Monitor RabbitMQ events
-docker exec -it bakery-rabbitmq rabbitmqadmin list queues | grep ai_
-```
-
-### 4. **Test specific ML models**
-```bash
-# Trigger safety stock optimizer directly (for testing)
-curl -X POST "http://localhost:8000/api/inventory/tenants/${TENANT_ID}/ml/safety-stock/analyze" | jq
-
-# Trigger yield predictor
-curl -X POST "http://localhost:8000/api/production/tenants/${TENANT_ID}/ml/yield/predict" | jq
-```
-
----
-
-## Summary
-
-**โ
You should now have**:
-- Demo session with 1400+ records cloned
-- 8-10 AI insights across 5 categories
-- Insights visible in frontend at `/app/analytics/ai-insights`
-- Ability to apply, dismiss, and provide feedback on insights
-
-**๐ Expected results**:
-- **Safety Stock Insights**: 2-3 optimization recommendations (โฌ1,000-โฌ3,000/year savings)
-- **Yield Predictions**: 2-3 production efficiency insights (3-5% waste reduction)
-- **Demand Forecasts**: 1-2 trend analyses (production adjustments)
-- **Price Alerts**: 1-2 procurement opportunities (โฌ300-โฌ800 savings)
-- **Supplier Alerts**: 0-1 performance warnings (risk mitigation)
-
-**๐ฏ Next steps**:
-1. Explore the AI Insights page
-2. Click "Apply" on a recommendation
-3. Monitor the impact via feedback tracking
-4. Check how insights correlate (e.g., low stock + delayed supplier = critical alert)
-5. Review the orchestrator dashboard to see AI-enhanced decisions
-
-**Need help?** Check the full guides:
-- [AI_INSIGHTS_DEMO_SETUP_GUIDE.md](./AI_INSIGHTS_DEMO_SETUP_GUIDE.md) - Comprehensive documentation
-- [AI_INSIGHTS_DATA_FLOW.md](./AI_INSIGHTS_DATA_FLOW.md) - Architecture diagrams
-
-**Report issues**: `docker-compose logs -f > debug.log` and share the log
diff --git a/COMPLETE_FIX_SUMMARY.md b/COMPLETE_FIX_SUMMARY.md
deleted file mode 100644
index ae8d53e8..00000000
--- a/COMPLETE_FIX_SUMMARY.md
+++ /dev/null
@@ -1,246 +0,0 @@
-# Complete Fix Summary - Demo Session & AI Insights
-
-**Date**: 2025-12-16
-**Status**: โ
**ALL CRITICAL ISSUES FIXED**
-
----
-
-## ๐ฏ Issues Identified & Fixed
-
-### 1. โ
Orchestrator Import Bug (CRITICAL)
-**File**: [services/orchestrator/app/api/internal_demo.py:16](services/orchestrator/app/api/internal_demo.py#L16)
-
-**Issue**: Missing `OrchestrationStatus` import caused HTTP 500 during clone
-
-**Fix Applied**:
-```python
-# Before:
-from app.models.orchestration_run import OrchestrationRun
-
-# After:
-from app.models.orchestration_run import OrchestrationRun, OrchestrationStatus
-```
-
-**Result**: โ
Orchestrator redeployed and working
-
-
-### 2. โ
Production Duplicate Workers
-**File**: [shared/demo/fixtures/professional/06-production.json](shared/demo/fixtures/professional/06-production.json)
-
-**Issue**: Worker IDs duplicated in `staff_assigned` arrays from running generator script multiple times
-
-**Fix Applied**: Removed 56 duplicate worker assignments from 56 batches
-
-**Result**:
-- Total batches: 88
-- With workers: 75 (all COMPLETED batches) โ
CORRECT
-- No duplicates โ
-
-
-### 3. โ
Procurement Data Structure (CRITICAL)
-**File**: [shared/demo/fixtures/professional/07-procurement.json](shared/demo/fixtures/professional/07-procurement.json)
-
-**Issue**: Duplicate data structures
-- Enhancement script added nested `items` arrays inside `purchase_orders` (wrong structure)
-- Existing `purchase_order_items` table at root level (correct structure)
-- This caused duplication and model mismatch
-
-**Fix Applied**:
-1. **Removed 32 nested items arrays** from purchase_orders
-2. **Updated 10 existing PO items** with realistic price trends
-3. **Recalculated PO totals** based on updated item prices
-
-**Price Trends Added**:
-- โ Harina T55: +8% (โฌ0.85 โ โฌ0.92)
-- โ Harina T65: +6% (โฌ0.95 โ โฌ1.01)
-- โ Mantequilla: +12% (โฌ6.50 โ โฌ7.28) **highest increase**
-- โ Leche: -3% (โฌ0.95 โ โฌ0.92) **seasonal decrease**
-- โ Levadura: +4% (โฌ4.20 โ โฌ4.37)
-- โ Azรบcar: +2% (โฌ1.10 โ โฌ1.12) **stable**
-
-**Result**: โ
Correct structure, enables procurement AI insights
-
-
-### 4. โ ๏ธ Forecasting Clone Endpoint (IN PROGRESS)
-**File**: [services/forecasting/app/api/internal_demo.py:320-353](services/forecasting/app/api/internal_demo.py#L320-L353)
-
-**Issue**: Three problems preventing forecast cloning:
-1. Missing `batch_name` field (fixture has `batch_id`, model requires `batch_name`)
-2. UUID type mismatch (`product_id` string โ `inventory_product_id` UUID)
-3. Date fields not parsed (`BASE_TS` markers passed as strings)
-
-**Fix Applied**:
-```python
-# 1. Field mappings
-batch_name = batch_data.get('batch_name') or batch_data.get('batch_id') or f"Batch-{transformed_id}"
-total_products = batch_data.get('total_products') or batch_data.get('total_forecasts') or 0
-
-# 2. UUID conversion
-if isinstance(inventory_product_id_str, str):
- inventory_product_id = uuid.UUID(inventory_product_id_str)
-
-# 3. Date parsing
-requested_at_raw = batch_data.get('requested_at') or batch_data.get('created_at') or batch_data.get('prediction_date')
-requested_at = parse_date_field(requested_at_raw, session_time, 'requested_at') if requested_at_raw else session_time
-```
-
-**Status**: โ ๏ธ **Code fixed but Docker image not rebuilt**
-- Git commit: `35ae23b`
-- Tilt hasn't picked up changes yet
-- Need manual image rebuild or Tilt force update
-
----
-
-## ๐ Current Data Status
-
-| Data Source | Records | Status | AI Ready? |
-|-------------|---------|--------|-----------|
-| **Stock Movements** | 847 | โ
Excellent | โ
YES |
-| **Stockout Events** | 10 | โ
Good | โ
YES |
-| **Worker Assignments** | 75 | โ
Good (no duplicates) | โ
YES |
-| **Production Batches** | 88 | โ
Good | โ
YES |
-| **PO Items** | 18 | โ
Excellent (with price trends) | โ
YES |
-| **Price Trends** | 6 ingredients | โ
Excellent | โ
YES |
-| **Forecasts** | 28 (in fixture) | โ ๏ธ 0 cloned | โ NO |
-
----
-
-## ๐ฏ Expected AI Insights
-
-### Current State (After Procurement Fix)
-| Service | Insights | Confidence | Status |
-|---------|----------|------------|--------|
-| **Inventory** | 2-3 | High | โ
READY |
-| **Production** | 1-2 | High | โ
READY |
-| **Procurement** | 1-2 | High | โ
**READY** (price trends enabled) |
-| **Forecasting** | 0 | N/A | โ ๏ธ BLOCKED (image not rebuilt) |
-| **TOTAL** | **4-7** | - | โ
**GOOD** |
-
-### After Forecasting Image Rebuild
-| Service | Insights | Status |
-|---------|----------|--------|
-| **Inventory** | 2-3 | โ
|
-| **Production** | 1-2 | โ
|
-| **Procurement** | 1-2 | โ
|
-| **Forecasting** | 1-2 | ๐ง After rebuild |
-| **TOTAL** | **6-10** | ๐ฏ **TARGET** |
-
----
-
-## ๐ Next Steps
-
-### Immediate Actions Required
-
-**1. Rebuild Forecasting Service Docker Image**
-
-Option A - Manual Tilt trigger:
-```bash
-# Access Tilt UI at http://localhost:10350
-# Find "forecasting-service" and click "Force Update"
-```
-
-Option B - Manual Docker rebuild:
-```bash
-cd services/forecasting
-docker build -t bakery/forecasting-service:latest .
-kubectl delete pod -n bakery-ia $(kubectl get pods -n bakery-ia | grep forecasting-service | awk '{print $1}')
-```
-
-Option C - Wait for Tilt auto-rebuild (may take a few minutes)
-
-**2. Test Demo Session After Rebuild**
-```bash
-# Create new demo session
-curl -X POST http://localhost:8001/api/v1/demo/sessions \
- -H "Content-Type: application/json" \
- -d '{"demo_account_type":"professional"}' | jq
-
-# Save virtual_tenant_id from response
-
-# Wait 60 seconds for cloning + AI models
-
-# Check forecasting cloned successfully
-kubectl logs -n bakery-ia $(kubectl get pods -n bakery-ia | grep demo-session | awk '{print $1}') \
- | grep "forecasting.*completed"
-# Expected: "forecasting ... records_cloned=28"
-
-# Check AI insights count
-curl "http://localhost:8001/api/v1/ai-insights/tenants/{tenant_id}/insights" | jq '.total'
-# Expected: 6-10 insights
-```
-
----
-
-## ๐ Files Modified
-
-| File | Change | Commit |
-|------|--------|--------|
-| [services/orchestrator/app/api/internal_demo.py](services/orchestrator/app/api/internal_demo.py#L16) | Added OrchestrationStatus import | `c566967` |
-| [shared/demo/fixtures/professional/06-production.json](shared/demo/fixtures/professional/06-production.json) | Removed 56 duplicate workers | Manual edit |
-| [shared/demo/fixtures/professional/07-procurement.json](shared/demo/fixtures/professional/07-procurement.json) | Fixed structure + price trends | `dd79e6d` |
-| [services/forecasting/app/api/internal_demo.py](services/forecasting/app/api/internal_demo.py#L320-L353) | Fixed clone endpoint | `35ae23b` |
-
----
-
-## ๐ Documentation Created
-
-1. **[DEMO_SESSION_ANALYSIS_REPORT.md](DEMO_SESSION_ANALYSIS_REPORT.md)** - Complete log analysis
-2. **[FIX_MISSING_INSIGHTS.md](FIX_MISSING_INSIGHTS.md)** - Forecasting & procurement fix guide
-3. **[FINAL_STATUS_SUMMARY.md](FINAL_STATUS_SUMMARY.md)** - Previous status overview
-4. **[AI_INSIGHTS_DEMO_SETUP_GUIDE.md](AI_INSIGHTS_DEMO_SETUP_GUIDE.md)** - Comprehensive setup guide
-5. **[AI_INSIGHTS_DATA_FLOW.md](AI_INSIGHTS_DATA_FLOW.md)** - Architecture diagrams
-6. **[AI_INSIGHTS_QUICK_START.md](AI_INSIGHTS_QUICK_START.md)** - Quick reference
-7. **[verify_fixes.sh](verify_fixes.sh)** - Automated verification script
-8. **[fix_procurement_structure.py](shared/demo/fixtures/professional/fix_procurement_structure.py)** - Procurement fix script
-9. **[COMPLETE_FIX_SUMMARY.md](COMPLETE_FIX_SUMMARY.md)** - This document
-
----
-
-## โจ Summary
-
-### โ
Completed
-1. **Orchestrator bug** - Fixed and deployed
-2. **Production duplicates** - Cleaned up
-3. **Procurement structure** - Fixed and enhanced with price trends
-4. **Forecasting code** - Fixed but needs image rebuild
-5. **Documentation** - Complete
-
-### โ ๏ธ Pending
-1. **Forecasting Docker image** - Needs rebuild (Tilt or manual)
-
-### ๐ฏ Impact
-- **Current**: 4-7 AI insights per demo session โ
-- **After image rebuild**: 6-10 AI insights per demo session ๐ฏ
-- **Production ready**: Yes (after forecasting image rebuild)
-
----
-
-## ๐ Verification Commands
-
-```bash
-# Check orchestrator import
-grep "OrchestrationStatus" services/orchestrator/app/api/internal_demo.py
-
-# Check production no duplicates
-cat shared/demo/fixtures/professional/06-production.json | \
- jq '[.batches[] | select(.staff_assigned) | .staff_assigned | group_by(.) | select(length > 1)] | length'
-# Expected: 0
-
-# Check procurement structure
-cat shared/demo/fixtures/professional/07-procurement.json | \
- jq '[.purchase_orders[] | select(.items)] | length'
-# Expected: 0 (no nested items)
-
-# Check forecasting fix in code
-grep "parse_date_field(requested_at_raw" services/forecasting/app/api/internal_demo.py
-# Expected: Match found
-
-# Check forecasting pod image
-kubectl get pod -n bakery-ia $(kubectl get pods -n bakery-ia | grep forecasting-service | awk '{print $1}') \
- -o jsonpath='{.status.containerStatuses[0].imageID}'
-# Should show new image hash after rebuild
-```
-
----
-
-**๐ Bottom Line**: All critical bugs fixed in code. After forecasting image rebuild, demo sessions will generate **6-10 AI insights** with full procurement price trend analysis and demand forecasting capabilities.
diff --git a/DEMO_ARCHITECTURE_COMPLETE_SPEC.md b/DEMO_ARCHITECTURE_COMPLETE_SPEC.md
deleted file mode 100644
index 808ed83e..00000000
--- a/DEMO_ARCHITECTURE_COMPLETE_SPEC.md
+++ /dev/null
@@ -1,2335 +0,0 @@
-# ๐ Arquitectura Definitiva de Sesiรณn de Demo โ Alta Fidelidad, Baja Latencia
-
-## Resumen Ejecutivo
-
-Este documento especifica los **requisitos tรฉcnicos completos** para el sistema de **demostraciones tรฉcnicas hiperrealistas y deterministas** de Bakery-IA, basado en la implementaciรณn real actual del proyecto.
-
-**Objetivo principal:** Cada sesiรณn debe simular un entorno productivo operativo โcon datos interrelacionados, coherentes y contextualmente creรญblesโ, **sin dependencia de infraestructura batch (Jobs, CronJobs, scripts externos)**.
-
-**Caracterรญsticas clave:**
-- โ
Creaciรณn **instantรกnea (5โ15 s)** mediante llamadas HTTP paralelas
-- โ
**Totalmente reproducible** con garantรญas de integridad cruzada
-- โ
**Datos temporales dinรกmicos** ajustados al momento de creaciรณn de la sesiรณn
-- โ
**70% menos cรณdigo** que la arquitectura anterior basada en Kubernetes Jobs
-- โ
**3-6x mรกs rรกpido** que el enfoque anterior
-
----
-
-## ๐ Tabla de Contenidos
-
-1. [Fase 0: Anรกlisis y Alineaciรณn con Modelos de Base de Datos](#fase-0)
-2. [Arquitectura de Microservicios](#arquitectura)
-3. [Garantรญa de Integridad Transversal](#integridad)
-4. [Determinismo Temporal](#determinismo)
-5. [Modelo de Datos Base (SSOT)](#ssot)
-6. [Estado Semilla del Orquestador](#orquestador)
-7. [Limpieza de Sesiรณn](#limpieza)
-8. [Escenarios de Demo](#escenarios)
-9. [Verificaciรณn Tรฉcnica](#verificacion)
-
----
-
-
-## ๐ FASE 0: ANรLISIS Y ALINEACIรN CON MODELOS REALES DE BASE DE DATOS
-
-### ๐ Objetivo
-
-Derivar **esquemas de datos exactos y actualizados** para cada servicio, a partir de sus **modelos de base de datos en producciรณn**, y usarlos como *contrato de validaciรณn* para los archivos JSON de demo.
-
-> โจ **Principio**: *Los datos de demostraciรณn deben ser estructuralmente aceptables por los ORM/servicios tal como estรกn โ sin transformaciones ad-hoc ni supresiรณn de restricciones.*
-
-### โ
Actividades Obligatorias
-
-#### 1. Extracciรณn de Modelos Fuente-de-Verdad
-
-Para cada servicio con clonaciรณn, extraer modelos reales desde:
-
-**Archivos de modelos existentes:**
-```
-/services/tenant/app/models/tenants.py
-/services/auth/app/models/users.py
-/services/inventory/app/models/inventory.py
-/services/production/app/models/production.py
-/services/recipes/app/models/recipes.py
-/services/procurement/app/models/procurement.py
-/services/suppliers/app/models/suppliers.py
-/services/orders/app/models/orders.py
-/services/sales/app/models/sales.py
-/services/forecasting/app/models/forecasting.py
-/services/orchestrator/app/models/orchestrator.py
-```
-
-**Documentar para cada modelo:**
-- Campos obligatorios (`NOT NULL`, `nullable=False`)
-- Tipos de dato exactos (`UUID`, `DateTime(timezone=True)`, `Float`, `Enum`)
-- Claves forรกneas internas (con nombre de columna y tabla destino)
-- Referencias cross-service (UUIDs sin FK constraints)
-- รndices รบnicos (ej.: `unique=True`, รญndices compuestos)
-- Validaciones de negocio (ej.: `quantity >= 0`)
-- Valores por defecto (ej.: `default=uuid.uuid4`, `default=ProductionStatus.PENDING`)
-
-#### 2. Ejemplo de Modelo Real: ProductionBatch
-
-**Archivo:** [`/services/production/app/models/production.py:68-150`](services/production/app/models/production.py#L68-L150)
-
-```python
-class ProductionBatch(Base):
- """Production batch model for tracking individual production runs"""
- __tablename__ = "production_batches"
-
- # Primary identification
- id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
- tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
- batch_number = Column(String(50), nullable=False, unique=True, index=True)
-
- # Product and recipe information (cross-service references)
- product_id = Column(UUID(as_uuid=True), nullable=False, index=True) # โ inventory
- product_name = Column(String(255), nullable=False)
- recipe_id = Column(UUID(as_uuid=True), nullable=True) # โ recipes
-
- # Production planning (REQUIRED temporal fields)
- planned_start_time = Column(DateTime(timezone=True), nullable=False)
- planned_end_time = Column(DateTime(timezone=True), nullable=False)
- planned_quantity = Column(Float, nullable=False)
- planned_duration_minutes = Column(Integer, nullable=False)
-
- # Actual production tracking (OPTIONAL - only for started batches)
- actual_start_time = Column(DateTime(timezone=True), nullable=True)
- actual_end_time = Column(DateTime(timezone=True), nullable=True)
- actual_quantity = Column(Float, nullable=True)
-
- # Status and priority (REQUIRED with defaults)
- status = Column(
- SQLEnum(ProductionStatus),
- nullable=False,
- default=ProductionStatus.PENDING,
- index=True
- )
- priority = Column(
- SQLEnum(ProductionPriority),
- nullable=False,
- default=ProductionPriority.MEDIUM
- )
-
- # Process stage tracking (OPTIONAL)
- current_process_stage = Column(SQLEnum(ProcessStage), nullable=True, index=True)
-
- # Quality metrics (OPTIONAL)
- yield_percentage = Column(Float, nullable=True)
- quality_score = Column(Float, nullable=True)
- waste_quantity = Column(Float, nullable=True)
-
- # Equipment and staff (JSON arrays of UUIDs)
- equipment_used = Column(JSON, nullable=True) # [uuid1, uuid2, ...]
- staff_assigned = Column(JSON, nullable=True) # [uuid1, uuid2, ...]
-
- # Cross-service order tracking
- order_id = Column(UUID(as_uuid=True), nullable=True) # โ orders service
- forecast_id = Column(UUID(as_uuid=True), nullable=True) # โ forecasting
-
- # Reasoning data for i18n support
- reasoning_data = Column(JSON, nullable=True)
-
- # Audit fields
- created_at = Column(DateTime(timezone=True), server_default=func.now())
- updated_at = Column(DateTime(timezone=True), onupdate=func.now())
-```
-
-**Reglas de validaciรณn derivadas:**
-- `planned_start_time < planned_end_time`
-- `planned_quantity > 0`
-- `actual_quantity <= planned_quantity * 1.1` (permite 10% sobre-producciรณn)
-- `status = IN_PROGRESS` โ `actual_start_time` debe existir
-- `status = COMPLETED` โ `actual_end_time` debe existir
-- `equipment_used` debe contener al menos 1 UUID vรกlido
-
-#### 3. Generaciรณn de Esquemas de Validaciรณn (JSON Schema)
-
-Para cada modelo, crear JSON Schema Draft 7+ en:
-```
-shared/demo/schemas/{service_name}/{model_name}.schema.json
-```
-
-**Ejemplo para ProductionBatch:**
-
-```json
-{
- "$schema": "http://json-schema.org/draft-07/schema#",
- "$id": "https://schemas.bakery-ia.com/demo/production/batch/v1",
- "type": "object",
- "title": "ProductionBatch",
- "description": "Production batch for demo cloning",
- "properties": {
- "id": {
- "type": "string",
- "format": "uuid",
- "description": "Unique batch identifier"
- },
- "tenant_id": {
- "type": "string",
- "format": "uuid",
- "description": "Tenant owner (replaced during cloning)"
- },
- "batch_number": {
- "type": "string",
- "pattern": "^BATCH-[0-9]{8}-[A-Z0-9]{6}$",
- "description": "Unique batch code"
- },
- "product_id": {
- "type": "string",
- "format": "uuid",
- "description": "Cross-service ref to inventory.Ingredient (type=FINISHED_PRODUCT)"
- },
- "product_name": {
- "type": "string",
- "minLength": 1,
- "maxLength": 255
- },
- "recipe_id": {
- "type": ["string", "null"],
- "format": "uuid",
- "description": "Cross-service ref to recipes.Recipe"
- },
- "planned_start_time": {
- "type": "string",
- "format": "date-time",
- "description": "ISO 8601 datetime with timezone"
- },
- "planned_end_time": {
- "type": "string",
- "format": "date-time"
- },
- "planned_quantity": {
- "type": "number",
- "minimum": 0.1,
- "description": "Quantity in product's unit of measure"
- },
- "planned_duration_minutes": {
- "type": "integer",
- "minimum": 1
- },
- "actual_start_time": {
- "type": ["string", "null"],
- "format": "date-time",
- "description": "Set when status becomes IN_PROGRESS"
- },
- "actual_end_time": {
- "type": ["string", "null"],
- "format": "date-time",
- "description": "Set when status becomes COMPLETED"
- },
- "status": {
- "type": "string",
- "enum": ["PENDING", "IN_PROGRESS", "COMPLETED", "CANCELLED", "ON_HOLD", "QUALITY_CHECK", "FAILED"],
- "default": "PENDING"
- },
- "priority": {
- "type": "string",
- "enum": ["LOW", "MEDIUM", "HIGH", "URGENT"],
- "default": "MEDIUM"
- },
- "current_process_stage": {
- "type": ["string", "null"],
- "enum": ["mixing", "proofing", "shaping", "baking", "cooling", "packaging", "finishing", null]
- },
- "equipment_used": {
- "type": ["array", "null"],
- "items": { "type": "string", "format": "uuid" },
- "minItems": 1,
- "description": "Array of Equipment IDs"
- },
- "staff_assigned": {
- "type": ["array", "null"],
- "items": { "type": "string", "format": "uuid" }
- }
- },
- "required": [
- "id", "tenant_id", "batch_number", "product_id", "product_name",
- "planned_start_time", "planned_end_time", "planned_quantity",
- "planned_duration_minutes", "status", "priority"
- ],
- "additionalProperties": false,
- "allOf": [
- {
- "if": {
- "properties": { "status": { "const": "IN_PROGRESS" } }
- },
- "then": {
- "required": ["actual_start_time"]
- }
- },
- {
- "if": {
- "properties": { "status": { "const": "COMPLETED" } }
- },
- "then": {
- "required": ["actual_start_time", "actual_end_time", "actual_quantity"]
- }
- }
- ]
-}
-```
-
-#### 4. Creaciรณn de Fixtures Base con Validaciรณn CI/CD
-
-**Ubicaciรณn actual de datos semilla:**
-```
-services/{service}/scripts/demo/{entity}_es.json
-```
-
-**Archivos existentes (legacy - referenciar para migraciรณn):**
-```
-/services/auth/scripts/demo/usuarios_staff_es.json
-/services/suppliers/scripts/demo/proveedores_es.json
-/services/recipes/scripts/demo/recetas_es.json
-/services/inventory/scripts/demo/ingredientes_es.json
-/services/inventory/scripts/demo/stock_lotes_es.json
-/services/production/scripts/demo/equipos_es.json
-/services/production/scripts/demo/lotes_produccion_es.json
-/services/production/scripts/demo/plantillas_calidad_es.json
-/services/orders/scripts/demo/clientes_es.json
-/services/orders/scripts/demo/pedidos_config_es.json
-/services/forecasting/scripts/demo/previsiones_config_es.json
-```
-
-**Nueva estructura propuesta:**
-```
-shared/demo/fixtures/
-โโโ schemas/ # JSON Schemas for validation
-โ โโโ production/
-โ โ โโโ batch.schema.json
-โ โ โโโ equipment.schema.json
-โ โ โโโ quality_check.schema.json
-โ โโโ inventory/
-โ โ โโโ ingredient.schema.json
-โ โ โโโ stock.schema.json
-โ โโโ ...
-โโโ professional/ # Professional tier seed data
-โ โโโ 01-tenant.json
-โ โโโ 02-auth.json
-โ โโโ 03-inventory.json
-โ โโโ 04-recipes.json
-โ โโโ 05-suppliers.json
-โ โโโ 06-production.json
-โ โโโ 07-procurement.json
-โ โโโ 08-orders.json
-โ โโโ 09-sales.json
-โ โโโ 10-forecasting.json
-โโโ enterprise/ # Enterprise tier seed data
- โโโ parent/
- โ โโโ ...
- โโโ children/
- โโโ madrid.json
- โโโ barcelona.json
- โโโ valencia.json
-```
-
-**Integraciรณn CI/CD (GitHub Actions):**
-
-```yaml
-# .github/workflows/validate-demo-data.yml
-name: Validate Demo Data
-
-on: [push, pull_request]
-
-jobs:
- validate:
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v3
-
- - name: Setup Node.js (for ajv-cli)
- uses: actions/setup-node@v3
- with:
- node-version: '18'
-
- - name: Install ajv-cli
- run: npm install -g ajv-cli
-
- - name: Validate Professional Tier Data
- run: |
- for schema in shared/demo/schemas/*/*.schema.json; do
- service=$(basename $(dirname $schema))
- model=$(basename $schema .schema.json)
-
- # Find corresponding JSON file
- json_file="shared/demo/fixtures/professional/*-${service}.json"
-
- if ls $json_file 1> /dev/null 2>&1; then
- echo "Validating ${service}/${model}..."
- ajv validate -s "$schema" -d "$json_file" --strict=false
- fi
- done
-
- - name: Validate Enterprise Tier Data
- run: |
- # Similar validation for enterprise tier
- echo "Validating enterprise tier..."
-
- - name: Check Cross-Service References
- run: |
- # Custom script to validate UUIDs exist across services
- python scripts/validate_cross_refs.py
-```
-
-**Script de validaciรณn de referencias cruzadas:**
-
-```python
-# scripts/validate_cross_refs.py
-"""
-Validates cross-service UUID references in demo data.
-Ensures referential integrity without database constraints.
-"""
-import json
-from pathlib import Path
-from typing import Dict, Set
-import sys
-
-def load_all_fixtures(tier: str = "professional") -> Dict[str, any]:
- """Load all JSON fixtures for a tier"""
- fixtures_dir = Path(f"shared/demo/fixtures/{tier}")
- data = {}
-
- for json_file in sorted(fixtures_dir.glob("*.json")):
- service = json_file.stem.split('-', 1)[1] # Remove number prefix
- with open(json_file, 'r') as f:
- data[service] = json.load(f)
-
- return data
-
-def extract_ids(data: dict, entity_type: str) -> Set[str]:
- """Extract all IDs for an entity type"""
- entities = data.get(entity_type, [])
- return {e['id'] for e in entities}
-
-def validate_references(data: Dict[str, any]) -> bool:
- """Validate all cross-service references"""
- errors = []
-
- # Extract all available IDs
- ingredient_ids = extract_ids(data.get('inventory', {}), 'ingredients')
- recipe_ids = extract_ids(data.get('recipes', {}), 'recipes')
- equipment_ids = extract_ids(data.get('production', {}), 'equipment')
- supplier_ids = extract_ids(data.get('suppliers', {}), 'suppliers')
-
- # Validate ProductionBatch references
- for batch in data.get('production', {}).get('batches', []):
- # Check product_id exists in inventory
- if batch['product_id'] not in ingredient_ids:
- errors.append(
- f"Batch {batch['batch_number']}: "
- f"product_id {batch['product_id']} not found in inventory"
- )
-
- # Check recipe_id exists in recipes
- if batch.get('recipe_id') and batch['recipe_id'] not in recipe_ids:
- errors.append(
- f"Batch {batch['batch_number']}: "
- f"recipe_id {batch['recipe_id']} not found in recipes"
- )
-
- # Check equipment_used exists
- for eq_id in batch.get('equipment_used', []):
- if eq_id not in equipment_ids:
- errors.append(
- f"Batch {batch['batch_number']}: "
- f"equipment {eq_id} not found in equipment"
- )
-
- # Validate Recipe ingredient references
- for recipe in data.get('recipes', {}).get('recipes', []):
- for ingredient in recipe.get('ingredients', []):
- if ingredient['ingredient_id'] not in ingredient_ids:
- errors.append(
- f"Recipe {recipe['name']}: "
- f"ingredient_id {ingredient['ingredient_id']} not found"
- )
-
- # Validate Stock supplier references
- for stock in data.get('inventory', {}).get('stock', []):
- if stock.get('supplier_id') and stock['supplier_id'] not in supplier_ids:
- errors.append(
- f"Stock {stock['batch_number']}: "
- f"supplier_id {stock['supplier_id']} not found"
- )
-
- # Print errors
- if errors:
- print("โ Cross-reference validation FAILED:")
- for error in errors:
- print(f" - {error}")
- return False
-
- print("โ
All cross-service references are valid")
- return True
-
-if __name__ == "__main__":
- professional_data = load_all_fixtures("professional")
-
- if not validate_references(professional_data):
- sys.exit(1)
-
- print("โ
Demo data validation passed")
-```
-
-#### 5. Gestiรณn de Evoluciรณn de Modelos
-
-**Crear CHANGELOG.md por servicio:**
-
-```markdown
-# Production Service - Demo Data Changelog
-
-## 2025-12-13
-- **BREAKING**: Added required field `reasoning_data` (JSON) to ProductionBatch
- - Migration: Set to `null` for existing batches
- - Demo data: Added reasoning structure for i18n support
-- Updated JSON schema: `production/batch.schema.json` v1 โ v2
-
-## 2025-12-01
-- Added optional field `shelf_life_days` (int, nullable) to Ingredient model
-- Demo data: Updated ingredientes_es.json with shelf_life values
-- JSON schema: `inventory/ingredient.schema.json` remains v1 (backward compatible)
-```
-
-**Versionado de esquemas:**
-
-```json
-{
- "$schema": "http://json-schema.org/draft-07/schema#",
- "$id": "https://schemas.bakery-ia.com/demo/production/batch/v2",
- "version": "2.0.0",
- "changelog": "https://github.com/bakery-ia/schemas/blob/main/CHANGELOG.md#production-batch-v2",
- ...
-}
-```
-
-**Compatibilidad hacia atrรกs:**
-- Nuevos campos deben ser `nullable=True` o tener valores `default`
-- Nunca eliminar campos sin ciclo de deprecaciรณn
-- Mantener versiones antiguas de schemas durante al menos 2 releases
-
----
-
-
-## ๐ ARQUITECTURA DE MICROSERVICIOS
-
-### Inventario de Servicios (19 Total)
-
-**Archivo de referencia:** [`/services/demo_session/app/services/clone_orchestrator.py:42-106`](services/demo_session/app/services/clone_orchestrator.py#L42-L106)
-
-| Servicio | Puerto | Rol | Clonaciรณn | URL Kubernetes | Timeout |
-|----------|--------|-----|-----------|----------------|---------|
-| **tenant** | 8000 | Gestiรณn de tenants y suscripciones | โ
Requerido | `http://tenant-service:8000` | 10s |
-| **auth** | 8001 | Autenticaciรณn y usuarios | โ
Requerido | `http://auth-service:8001` | 10s |
-| **inventory** | 8002 | Ingredientes y stock | โ
Opcional | `http://inventory-service:8002` | 30s |
-| **production** | 8003 | Lotes y equipos de producciรณn | โ
Opcional | `http://production-service:8003` | 30s |
-| **recipes** | 8004 | Recetas y BOM | โ
Opcional | `http://recipes-service:8004` | 15s |
-| **procurement** | 8005 | รrdenes de compra | โ
Opcional | `http://procurement-service:8005` | 25s |
-| **suppliers** | 8006 | Proveedores | โ
Opcional | `http://suppliers-service:8006` | 20s |
-| **orders** | 8007 | Pedidos de clientes | โ
Opcional | `http://orders-service:8007` | 15s |
-| **sales** | 8008 | Historial de ventas | โ
Opcional | `http://sales-service:8008` | 30s |
-| **forecasting** | 8009 | Previsiรณn de demanda | โ
Opcional | `http://forecasting-service:8009` | 15s |
-| **distribution** | 8010 | Logรญstica y distribuciรณn | โ Futuro | `http://distribution-service:8010` | - |
-| **pos** | 8011 | Integraciรณn TPV | โ No necesario | `http://pos-service:8011` | - |
-| **orchestrator** | 8012 | Orquestaciรณn de workflows | โ
Opcional | `http://orchestrator-service:8012` | 15s |
-| **ai_insights** | 8013 | Insights generados por IA | โ Calculados post-clone | `http://ai-insights-service:8013` | - |
-| **training** | 8014 | Entrenamiento ML | โ No necesario | `http://training-service:8014` | - |
-| **alert_processor** | 8015 | Procesamiento de alertas | โ Disparado post-clone | `http://alert-processor-service:8015` | - |
-| **notification** | 8016 | Notificaciones (email/WhatsApp) | โ No necesario | `http://notification-service:8016` | - |
-| **external** | 8017 | Datos externos (clima/trรกfico) | โ No necesario | `http://external-service:8017` | - |
-| **demo_session** | 8018 | Orquestaciรณn de demos | N/A | `http://demo-session-service:8018` | - |
-
-### Flujo de Clonaciรณn
-
-**Archivo de referencia:** [`/services/demo_session/app/services/clone_orchestrator.py`](services/demo_session/app/services/clone_orchestrator.py)
-
-```
-POST /api/demo/sessions
- โ
-DemoSessionManager.create_session()
- โ
-CloneOrchestrator.clone_all_services(
- base_tenant_id="a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6",
- virtual_tenant_id=,
- demo_account_type="professional",
- session_id="demo_abc123",
- session_created_at="2025-12-13T10:00:00Z"
-)
- โ
-โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
-โ FASE 1: Clonaciรณn de Tenant Padre (Paralelo) โ
-โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
- โ
- โ โโบ POST tenant-service:8000/internal/demo/clone
- โ โโบ POST auth-service:8001/internal/demo/clone
- โ โโบ POST inventory-service:8002/internal/demo/clone
- โ โโบ POST recipes-service:8004/internal/demo/clone
- โ โโบ POST suppliers-service:8006/internal/demo/clone
- โ โโบ POST production-service:8003/internal/demo/clone
- โ โโบ POST procurement-service:8005/internal/demo/clone
- โ โโบ POST orders-service:8007/internal/demo/clone
- โ โโบ POST sales-service:8008/internal/demo/clone
- โ โโบ POST forecasting-service:8009/internal/demo/clone
- โโโบ POST orchestrator-service:8012/internal/demo/clone
- โ
- [Esperar a que todos completen - asyncio.gather()]
- โ
-โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
-โ FASE 2: [Solo Enterprise] Clonaciรณn de Outlets Hijos โ
-โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
- โ
- โ โโบ Para cada child_outlet (Madrid, Barcelona, Valencia):
- โ โ
- โ POST tenant-service:8000/internal/demo/create-child
- โ โ
- โ [Clonar servicios para child_tenant_id - paralelo]
- โ โ โโบ POST inventory-service/internal/demo/clone
- โ โ โโบ POST production-service/internal/demo/clone
- โ โ โโบ POST orders-service/internal/demo/clone
- โ โโโบ ...
- โ
-โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
-โ FASE 3: Generaciรณn de Alertas Post-Clonaciรณn โ
-โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
- โ
- โ โโบ POST procurement/internal/delivery-tracking/trigger
- โ โโบ POST inventory/internal/alerts/trigger
- โโโบ POST production/internal/alerts/trigger
- โ
-โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
-โ FASE 4: [Professional/Enterprise] Generaciรณn de Insights IA โ
-โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
- โ
- โ โโบ POST procurement/internal/insights/price/trigger
- โ โโบ POST inventory/internal/insights/safety-stock/trigger
- โโโบ POST production/internal/insights/yield/trigger
- โ
-โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
-โ FASE 5: Actualizaciรณn de Estado de Sesiรณn โ
-โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
- โ
- โ โโบ UPDATE demo_sessions SET status='READY', cloning_completed_at=NOW()
- โโโบ RETURN session + credentials
-```
-
-**Tiempos esperados:**
-- Professional: 5-10 segundos
-- Enterprise: 10-15 segundos (3 child outlets)
-
-**Comparaciรณn con arquitectura anterior:**
-- Professional antiguo: 30-40 segundos
-- Enterprise antiguo: 60-75 segundos
-- **Mejora: 3-6x mรกs rรกpido**
-
----
-
-
-## ๐ GARANTรA DE INTEGRIDAD TRANSVERSAL
-
-### Principio Rector
-
-> **Ningรบn ID referenciado puede existir en un servicio sin que su entidad origen exista en su servicio propietario, bajo el *mismo tenant virtual*.**
-
-### Estrategia de Referencias Cross-Service
-
-**Importante:** No existen claves forรกneas (FK) entre servicios - solo UUIDs almacenados.
-
-**Archivo de referencia:** Cada servicio implementa su propia lรณgica de validaciรณn en `/services/{service}/app/api/internal_demo.py`
-
-### Tabla de Integridad Obligatoria
-
-| Entidad (Servicio A) | Campo | Referencia | Entidad (Servicio B) | Validaciรณn Requerida |
-|----------------------|-------|------------|----------------------|----------------------|
-| `ProductionBatch` (production) | `recipe_id` | UUID | `Recipe` (recipes) | โ
Debe existir con `tenant_id = virtual_tenant_id` y `is_active = true` |
-| `ProductionBatch` (production) | `product_id` | UUID | `Ingredient` (inventory) | โ
Debe ser tipo `FINISHED_PRODUCT` del mismo tenant |
-| `ProductionBatch` (production) | `equipment_used` | UUID[] | `Equipment` (production) | โ
Todos los IDs deben existir con `status = OPERATIONAL` |
-| `ProductionBatch` (production) | `order_id` | UUID | `CustomerOrder` (orders) | โ
Debe existir y tener `status != CANCELLED` |
-| `ProductionBatch` (production) | `forecast_id` | UUID | `Forecast` (forecasting) | โ
Debe existir para el mismo `product_id` |
-| `Recipe` (recipes) | `finished_product_id` | UUID | `Ingredient` (inventory) | โ
Debe ser tipo `FINISHED_PRODUCT` |
-| `RecipeIngredient` (recipes) | `ingredient_id` | UUID | `Ingredient` (inventory) | โ
Debe existir y tener `product_type = INGREDIENT` |
-| `Stock` (inventory) | `ingredient_id` | UUID | `Ingredient` (inventory) | โ
FK interna - validaciรณn automรกtica |
-| `Stock` (inventory) | `supplier_id` | UUID | `Supplier` (suppliers) | โ
Debe existir con contrato vigente |
-| `PurchaseOrder` (procurement) | `supplier_id` | UUID | `Supplier` (suppliers) | โ
Debe existir y estar activo |
-| `PurchaseOrderItem` (procurement) | `ingredient_id` | UUID | `Ingredient` (inventory) | โ
Debe ser tipo `INGREDIENT` (no producto terminado) |
-| `PurchaseOrderItem` (procurement) | `purchase_order_id` | UUID | `PurchaseOrder` (procurement) | โ
FK interna - validaciรณn automรกtica |
-| `ProcurementRequirement` (procurement) | `plan_id` | UUID | `ProcurementPlan` (procurement) | โ
FK interna - validaciรณn automรกtica |
-| `ProcurementRequirement` (procurement) | `ingredient_id` | UUID | `Ingredient` (inventory) | โ
Debe existir |
-| `CustomerOrder` (orders) | `customer_id` | UUID | `Customer` (orders) | โ
FK interna - validaciรณn automรกtica |
-| `OrderItem` (orders) | `customer_order_id` | UUID | `CustomerOrder` (orders) | โ
FK interna - validaciรณn automรกtica |
-| `OrderItem` (orders) | `product_id` | UUID | `Ingredient` (inventory) | โ
Debe ser tipo `FINISHED_PRODUCT` |
-| `QualityCheck` (production) | `batch_id` | UUID | `ProductionBatch` (production) | โ
FK interna - validaciรณn automรกtica |
-| `QualityCheck` (production) | `template_id` | UUID | `QualityCheckTemplate` (production) | โ
FK interna - validaciรณn automรกtica |
-| `SalesData` (sales) | `product_id` | UUID | `Ingredient` (inventory) | โ
Debe ser tipo `FINISHED_PRODUCT` |
-| `Forecast` (forecasting) | `product_id` | UUID | `Ingredient` (inventory) | โ
Debe existir |
-
-### Mecanismo de Validaciรณn
-
-**Opciรณn 1: Validaciรณn Pre-Clonaciรณn (Recomendada)**
-
-Validar todas las referencias **en memoria** al cargar los datos base, asumiendo que los archivos JSON estรกn validados por CI/CD.
-
-```python
-# En clone_orchestrator.py o pre-clone validation script
-def validate_cross_service_refs(data: Dict[str, Any]) -> None:
- """
- Validates all cross-service references before cloning.
- Raises ValidationError if any reference is invalid.
- """
- errors = []
-
- # Extract available IDs
- ingredient_ids = {i['id'] for i in data['inventory']['ingredients']}
- recipe_ids = {r['id'] for r in data['recipes']['recipes']}
- equipment_ids = {e['id'] for e in data['production']['equipment']}
-
- # Validate ProductionBatch references
- for batch in data['production']['batches']:
- if batch['product_id'] not in ingredient_ids:
- errors.append(f"Batch {batch['batch_number']}: product_id not found")
-
- if batch.get('recipe_id') and batch['recipe_id'] not in recipe_ids:
- errors.append(f"Batch {batch['batch_number']}: recipe_id not found")
-
- for eq_id in batch.get('equipment_used', []):
- if eq_id not in equipment_ids:
- errors.append(f"Batch {batch['batch_number']}: equipment {eq_id} not found")
-
- if errors:
- raise ValidationError(f"Cross-service reference validation failed:\n" + "\n".join(errors))
-```
-
-**Opciรณn 2: Validaciรณn Runtime (Solo si datos no estรกn pre-validados)**
-
-```python
-# En internal_demo.py de cada servicio
-async def validate_cross_service_reference(
- service_url: str,
- entity_type: str,
- entity_id: UUID,
- tenant_id: UUID
-) -> bool:
- """
- Check if a cross-service reference exists.
-
- Args:
- service_url: URL of the service to check (e.g., "http://inventory-service:8000")
- entity_type: Type of entity (e.g., "ingredient", "recipe")
- entity_id: UUID of the entity
- tenant_id: Tenant ID to filter by
-
- Returns:
- True if entity exists, False otherwise
- """
- async with httpx.AsyncClient(timeout=5.0) as client:
- response = await client.head(
- f"{service_url}/internal/demo/exists",
- params={
- "entity_type": entity_type,
- "id": str(entity_id),
- "tenant_id": str(tenant_id)
- },
- headers={"X-Internal-API-Key": settings.INTERNAL_API_KEY}
- )
- return response.status_code == 200
-
-# Uso en clonaciรณn
-if batch.recipe_id:
- if not await validate_cross_service_reference(
- "http://recipes-service:8000",
- "recipe",
- batch.recipe_id,
- virtual_tenant_id
- ):
- raise IntegrityError(
- f"Recipe {batch.recipe_id} not found for batch {batch.batch_number}"
- )
-```
-
-**Endpoint de existencia (implementar en cada servicio):**
-
-```python
-# En services/{service}/app/api/internal_demo.py
-@router.head("/exists", dependencies=[Depends(verify_internal_key)])
-async def check_entity_exists(
- entity_type: str,
- id: UUID,
- tenant_id: UUID,
- db: AsyncSession = Depends(get_db)
-):
- """
- Check if an entity exists for a tenant.
- Returns 200 if exists, 404 if not.
- """
- if entity_type == "recipe":
- result = await db.execute(
- select(Recipe)
- .where(Recipe.id == id)
- .where(Recipe.tenant_id == tenant_id)
- )
- entity = result.scalar_one_or_none()
- # ... otros entity_types
-
- if entity:
- return Response(status_code=200)
- else:
- return Response(status_code=404)
-```
-
----
-
-
-## ๐
DETERMINISMO TEMPORAL
-
-### Lรญnea Base Fija
-
-**Archivo de referencia:** [`/shared/utils/demo_dates.py:11-42`](shared/utils/demo_dates.py#L11-L42)
-
-```python
-def get_base_reference_date(session_created_at: Optional[datetime] = None) -> datetime:
- """
- Get the base reference date for demo data.
-
- If session_created_at is provided, calculate relative to it.
- Otherwise, use current time (for backwards compatibility with seed scripts).
-
- Returns:
- Base reference date at 6 AM UTC
- """
- if session_created_at:
- if session_created_at.tzinfo is None:
- session_created_at = session_created_at.replace(tzinfo=timezone.utc)
- # Reference is session creation time at 6 AM that day
- return session_created_at.replace(
- hour=6, minute=0, second=0, microsecond=0
- )
- # Fallback for seed scripts: use today at 6 AM
- now = datetime.now(timezone.utc)
- return now.replace(hour=6, minute=0, second=0, microsecond=0)
-```
-
-**Concepto:**
-- Todos los datos de seed estรกn definidos respecto a una marca de tiempo fija: **6:00 AM UTC del dรญa de creaciรณn de la sesiรณn**
-- Esta marca se calcula dinรกmicamente en tiempo de clonaciรณn
-
-### Transformaciรณn Dinรกmica
-
-**Archivo de referencia:** [`/shared/utils/demo_dates.py:45-93`](shared/utils/demo_dates.py#L45-L93)
-
-```python
-def adjust_date_for_demo(
- original_date: Optional[datetime],
- session_created_at: datetime,
- base_reference_date: datetime = BASE_REFERENCE_DATE
-) -> Optional[datetime]:
- """
- Adjust a date from seed data to be relative to demo session creation time.
-
- Example:
- # Seed data created on 2025-12-13 06:00
- # Stock expiration: 2025-12-28 06:00 (15 days from seed date)
- # Demo session created: 2025-12-16 10:00
- # Base reference: 2025-12-16 06:00
- # Result: 2025-12-31 10:00 (15 days from session date)
- """
- if original_date is None:
- return None
-
- # Ensure timezone-aware datetimes
- if original_date.tzinfo is None:
- original_date = original_date.replace(tzinfo=timezone.utc)
- if session_created_at.tzinfo is None:
- session_created_at = session_created_at.replace(tzinfo=timezone.utc)
- if base_reference_date.tzinfo is None:
- base_reference_date = base_reference_date.replace(tzinfo=timezone.utc)
-
- # Calculate offset from base reference
- offset = original_date - base_reference_date
-
- # Apply offset to session creation date
- return session_created_at + offset
-```
-
-**En tiempo de clonaciรณn:**
-```python
-ฮt = session_created_at - base_reference_date
-new_timestamp = original_timestamp + ฮt
-```
-
-### Aplicaciรณn por Tipo de Dato
-
-| Tipo | Campos Afectados | Regla de Transformaciรณn | Archivo de Referencia |
-|------|------------------|-------------------------|------------------------|
-| **Orden de Compra** | `created_at`, `order_date`, `expected_delivery_date`, `approval_deadline` | `+ฮt`. Si `expected_delivery_date` cae en fin de semana โ desplazar al lunes siguiente | `procurement/app/api/internal_demo.py` |
-| **Lote de producciรณn** | `planned_start_time`, `planned_end_time`, `actual_start_time` | `+ฮt`. `actual_start_time = null` para lotes futuros; `actual_start_time = planned_start_time` para lotes en curso | `production/app/api/internal_demo.py` |
-| **Stock** | `received_date`, `expiration_date` | `+ฮt`. `expiration_date = received_date + shelf_life_days` | `inventory/app/api/internal_demo.py` |
-| **Pedido cliente** | `order_date`, `delivery_date` | `+ฮt`, manteniendo dรญas laborables (lunes-viernes) | `orders/app/api/internal_demo.py` |
-| **Alerta** | `triggered_at`, `acknowledged_at`, `resolved_at` | Solo `triggered_at` se transforma. `resolved_at` se calcula dinรกmicamente si estรก resuelta | `alert_processor/app/consumer/event_consumer.py` |
-| **Forecast** | `forecast_date`, `prediction_date` | `+ฮt`, alineado a inicio de semana (lunes) | `forecasting/app/api/internal_demo.py` |
-| **Entrega** | `expected_date`, `actual_date` | `+ฮt`, con ajuste por horario comercial (8-20h) | `procurement/app/api/internal_demo.py` |
-
-### Funciones Avanzadas de Ajuste Temporal
-
-**Archivo de referencia:** [`/shared/utils/demo_dates.py:264-341`](shared/utils/demo_dates.py#L264-L341)
-
-#### shift_to_session_time
-
-```python
-def shift_to_session_time(
- original_offset_days: int,
- original_hour: int,
- original_minute: int,
- session_created_at: datetime,
- base_reference: Optional[datetime] = None
-) -> datetime:
- """
- Shift a time from seed data to demo session time with same-day preservation.
-
- Ensures that:
- 1. Items scheduled for "today" (offset_days=0) remain on the same day as session creation
- 2. Future items stay in the future, past items stay in the past
- 3. Times don't shift to invalid moments (e.g., past times for pending items)
-
- Examples:
- # Session created at noon, item originally scheduled for morning
- >>> session = datetime(2025, 12, 12, 12, 0, tzinfo=timezone.utc)
- >>> result = shift_to_session_time(0, 6, 0, session) # Today at 06:00
- >>> # Returns today at 13:00 (shifted forward to stay in future)
-
- # Session created at noon, item originally scheduled for evening
- >>> result = shift_to_session_time(0, 18, 0, session) # Today at 18:00
- >>> # Returns today at 18:00 (already in future)
- """
- # ... (implementaciรณn en demo_dates.py)
-```
-
-#### ensure_future_time
-
-```python
-def ensure_future_time(
- target_time: datetime,
- reference_time: datetime,
- min_hours_ahead: float = 1.0
-) -> datetime:
- """
- Ensure a target time is in the future relative to reference time.
-
- If target_time is in the past or too close to reference_time,
- shift it forward by at least min_hours_ahead.
- """
- # ... (implementaciรณn en demo_dates.py)
-```
-
-#### calculate_edge_case_times
-
-**Archivo de referencia:** [`/shared/utils/demo_dates.py:421-477`](shared/utils/demo_dates.py#L421-L477)
-
-```python
-def calculate_edge_case_times(session_created_at: datetime) -> dict:
- """
- Calculate deterministic edge case times for demo sessions.
-
- These times are designed to always create specific demo scenarios:
- - One late delivery (should have arrived hours ago)
- - One overdue production batch (should have started hours ago)
- - One in-progress batch (started recently)
- - One upcoming batch (starts soon)
- - One arriving-soon delivery (arrives in a few hours)
-
- Returns:
- {
- 'late_delivery_expected': session - 4h,
- 'overdue_batch_planned_start': session - 2h,
- 'in_progress_batch_actual_start': session - 1h45m,
- 'upcoming_batch_planned_start': session + 1h30m,
- 'arriving_soon_delivery_expected': session + 2h30m,
- 'evening_batch_planned_start': today 17:00,
- 'tomorrow_morning_planned_start': tomorrow 05:00
- }
- """
-```
-
-### Casos Extremos (Edge Cases) Requeridos para UI/UX
-
-| Escenario | Configuraciรณn en Datos Base | Resultado Post-Transformaciรณn |
-|-----------|------------------------------|-------------------------------|
-| **OC retrasada** | `expected_delivery_date = BASE_TS - 1d`, `status = "PENDING"` | `expected_delivery_date = session_created_at - 4h` โ alerta roja "Retraso de proveedor" |
-| **Lote atrasado** | `planned_start_time = BASE_TS - 2h`, `status = "PENDING"`, `actual_start_time = null` | `planned_start_time = session_created_at - 2h` โ alerta amarilla "Producciรณn retrasada" |
-| **Lote en curso** | `planned_start_time = BASE_TS - 1h`, `status = "IN_PROGRESS"`, `actual_start_time = BASE_TS - 1h45m` | `actual_start_time = session_created_at - 1h45m` โ producciรณn activa visible |
-| **Lote prรณximo** | `planned_start_time = BASE_TS + 1.5h`, `status = "PENDING"` | `planned_start_time = session_created_at + 1.5h` โ prรณximo en planificaciรณn |
-| **Stock agotado + OC pendiente** | `Ingredient.stock = 0`, `reorder_point = 10`, `PurchaseOrder.status = "PENDING_APPROVAL"` | โ
Alerta de inventario *no se dispara* (evita duplicidad) |
-| **Stock agotado sin OC** | `Ingredient.stock = 0`, `reorder_point = 10`, **ninguna OC abierta** | โ Alerta de inventario *sรญ se dispara*: "Bajo stock โ acciรณn requerida" |
-| **Stock prรณximo a caducar** | `expiration_date = BASE_TS + 2d` | `expiration_date = session_created_at + 2d` โ alerta naranja "Caducidad prรณxima" |
-
-### Implementaciรณn en Clonaciรณn
-
-**Ejemplo real del servicio de producciรณn:**
-
-```python
-# services/production/app/api/internal_demo.py (simplificado)
-from shared.utils.demo_dates import (
- adjust_date_for_demo,
- calculate_edge_case_times,
- ensure_future_time,
- get_base_reference_date
-)
-
-@router.post("/clone")
-async def clone_production_data(
- request: DemoCloneRequest,
- db: AsyncSession = Depends(get_db)
-):
- session_created_at = datetime.fromisoformat(request.session_created_at)
- base_reference = get_base_reference_date(session_created_at)
- edge_times = calculate_edge_case_times(session_created_at)
-
- # Clone equipment (no date adjustment needed)
- for equipment in base_equipment:
- new_equipment = Equipment(
- id=uuid.uuid4(),
- tenant_id=request.virtual_tenant_id,
- # ... copy fields
- install_date=adjust_date_for_demo(
- equipment.install_date,
- session_created_at,
- base_reference
- )
- )
- db.add(new_equipment)
-
- # Clone production batches with edge cases
- batches = []
-
- # Edge case 1: Overdue batch (should have started 2h ago)
- batches.append({
- "planned_start_time": edge_times["overdue_batch_planned_start"],
- "planned_end_time": edge_times["overdue_batch_planned_start"] + timedelta(hours=3),
- "status": ProductionStatus.PENDING,
- "actual_start_time": None,
- "priority": ProductionPriority.URGENT
- })
-
- # Edge case 2: In-progress batch
- batches.append({
- "planned_start_time": edge_times["in_progress_batch_actual_start"],
- "planned_end_time": edge_times["upcoming_batch_planned_start"],
- "status": ProductionStatus.IN_PROGRESS,
- "actual_start_time": edge_times["in_progress_batch_actual_start"],
- "priority": ProductionPriority.HIGH,
- "current_process_stage": ProcessStage.BAKING
- })
-
- # Edge case 3: Upcoming batch
- batches.append({
- "planned_start_time": edge_times["upcoming_batch_planned_start"],
- "planned_end_time": edge_times["upcoming_batch_planned_start"] + timedelta(hours=2),
- "status": ProductionStatus.PENDING,
- "actual_start_time": None,
- "priority": ProductionPriority.MEDIUM
- })
-
- # Clone remaining batches from seed data
- for base_batch in seed_batches:
- adjusted_start = adjust_date_for_demo(
- base_batch.planned_start_time,
- session_created_at,
- base_reference
- )
-
- # Ensure future batches stay in the future
- if base_batch.status == ProductionStatus.PENDING:
- adjusted_start = ensure_future_time(adjusted_start, session_created_at, min_hours_ahead=1.0)
-
- batches.append({
- "planned_start_time": adjusted_start,
- "planned_end_time": adjust_date_for_demo(
- base_batch.planned_end_time,
- session_created_at,
- base_reference
- ),
- "status": base_batch.status,
- "actual_start_time": adjust_date_for_demo(
- base_batch.actual_start_time,
- session_created_at,
- base_reference
- ) if base_batch.actual_start_time else None,
- # ... other fields
- })
-
- for batch_data in batches:
- new_batch = ProductionBatch(
- id=uuid.uuid4(),
- tenant_id=request.virtual_tenant_id,
- **batch_data
- )
- db.add(new_batch)
-
- await db.commit()
-
- return {
- "service": "production",
- "status": "completed",
- "records_cloned": len(batches) + len(equipment_list),
- "details": {
- "batches": len(batches),
- "equipment": len(equipment_list),
- "edge_cases_created": 3
- }
- }
-```
-
----
-
-
-## ๐งฑ MODELO DE DATOS BASE โ FUENTE รNICA DE VERDAD (SSOT)
-
-### Ubicaciรณn Actual vs. Propuesta
-
-**Archivos existentes (legacy):**
-```
-/services/{service}/scripts/demo/{entity}_es.json
-```
-
-**Nueva estructura propuesta:**
-```
-shared/demo/
-โโโ schemas/ # JSON Schemas para validaciรณn
-โ โโโ production/
-โ โ โโโ batch.schema.json
-โ โ โโโ equipment.schema.json
-โ โ โโโ quality_check.schema.json
-โ โโโ inventory/
-โ โ โโโ ingredient.schema.json
-โ โ โโโ stock.schema.json
-โ โโโ recipes/
-โ โ โโโ recipe.schema.json
-โ โ โโโ recipe_ingredient.schema.json
-โ โโโ ... (mรกs servicios)
-โโโ fixtures/
-โ โโโ professional/
-โ โ โโโ 01-tenant.json # Metadata del tenant base
-โ โ โโโ 02-auth.json # Usuarios demo
-โ โ โโโ 03-inventory.json # Ingredientes + stock
-โ โ โโโ 04-recipes.json # Recetas
-โ โ โโโ 05-suppliers.json # Proveedores
-โ โ โโโ 06-production.json # Equipos + lotes
-โ โ โโโ 07-procurement.json # OCs + planes
-โ โ โโโ 08-orders.json # Clientes + pedidos
-โ โ โโโ 09-sales.json # Historial ventas
-โ โ โโโ 10-forecasting.json # Previsiones
-โ โโโ enterprise/
-โ โโโ parent/
-โ โ โโโ ... (misma estructura)
-โ โโโ children/
-โ โโโ madrid.json # Datos especรญficos Madrid
-โ โโโ barcelona.json # Datos especรญficos Barcelona
-โ โโโ valencia.json # Datos especรญficos Valencia
-โโโ metadata/
- โโโ tenant_configs.json # IDs base por tier
- โโโ demo_users.json # Usuarios hardcoded
- โโโ cross_refs_map.json # Mapa de dependencias
-```
-
-### IDs Fijos por Tier
-
-**Archivo de referencia:** [`/services/demo_session/app/core/config.py:38-72`](services/demo_session/app/core/config.py#L38-L72)
-
-```python
-DEMO_ACCOUNTS = {
- "professional": {
- "email": "demo.professional@panaderiaartesana.com",
- "name": "Panaderรญa Artesana Madrid - Demo",
- "subdomain": "demo-artesana",
- "base_tenant_id": "a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6",
- "subscription_tier": "professional",
- "tenant_type": "standalone"
- },
- "enterprise": {
- "email": "demo.enterprise@panaderiacentral.com",
- "name": "Panaderรญa Central - Demo Enterprise",
- "subdomain": "demo-central",
- "base_tenant_id": "80000000-0000-4000-a000-000000000001",
- "subscription_tier": "enterprise",
- "tenant_type": "parent",
- "children": [
- {
- "name": "Madrid Centro",
- "base_tenant_id": "A0000000-0000-4000-a000-000000000001",
- "location": {
- "city": "Madrid",
- "zone": "Centro",
- "latitude": 40.4168,
- "longitude": -3.7038
- }
- },
- {
- "name": "Barcelona Grร cia",
- "base_tenant_id": "B0000000-0000-4000-a000-000000000001",
- "location": {
- "city": "Barcelona",
- "zone": "Grร cia",
- "latitude": 41.4036,
- "longitude": 2.1561
- }
- },
- {
- "name": "Valencia Ruzafa",
- "base_tenant_id": "C0000000-0000-4000-a000-000000000001",
- "location": {
- "city": "Valencia",
- "zone": "Ruzafa",
- "latitude": 39.4623,
- "longitude": -0.3645
- }
- }
- ]
- }
-}
-```
-
-### Usuarios Demo Hardcoded
-
-```python
-# Estos IDs estรกn hardcoded en tenant/app/api/internal_demo.py
-DEMO_OWNER_IDS = {
- "professional": "c1a2b3c4-d5e6-47a8-b9c0-d1e2f3a4b5c6", # Marรญa Garcรญa Lรณpez
- "enterprise": "d2e3f4a5-b6c7-48d9-e0f1-a2b3c4d5e6f7" # Carlos Martรญnez Ruiz
-}
-
-STAFF_USERS = {
- "professional": [
- {
- "user_id": "50000000-0000-0000-0000-000000000001",
- "role": "baker",
- "name": "Juan Panadero"
- },
- {
- "user_id": "50000000-0000-0000-0000-000000000002",
- "role": "sales",
- "name": "Ana Ventas"
- },
- {
- "user_id": "50000000-0000-0000-0000-000000000003",
- "role": "quality_control",
- "name": "Pedro Calidad"
- },
- {
- "user_id": "50000000-0000-0000-0000-000000000004",
- "role": "admin",
- "name": "Laura Admin"
- },
- {
- "user_id": "50000000-0000-0000-0000-000000000005",
- "role": "warehouse",
- "name": "Carlos Almacรฉn"
- },
- {
- "user_id": "50000000-0000-0000-0000-000000000006",
- "role": "production_manager",
- "name": "Isabel Producciรณn"
- }
- ],
- "enterprise": [
- {
- "user_id": "50000000-0000-0000-0000-000000000011",
- "role": "production_manager",
- "name": "Roberto Producciรณn"
- },
- {
- "user_id": "50000000-0000-0000-0000-000000000012",
- "role": "quality_control",
- "name": "Marta Calidad"
- },
- {
- "user_id": "50000000-0000-0000-0000-000000000013",
- "role": "logistics",
- "name": "Javier Logรญstica"
- },
- {
- "user_id": "50000000-0000-0000-0000-000000000014",
- "role": "sales",
- "name": "Carmen Ventas"
- },
- {
- "user_id": "50000000-0000-0000-0000-000000000015",
- "role": "procurement",
- "name": "Luis Compras"
- },
- {
- "user_id": "50000000-0000-0000-0000-000000000016",
- "role": "maintenance",
- "name": "Miguel Mantenimiento"
- }
- ]
-}
-```
-
-### Transformaciรณn de IDs Durante Clonaciรณn
-
-Cada `base_tenant_id` en los archivos JSON se reemplaza por `virtual_tenant_id` durante la clonaciรณn.
-
-**Ejemplo de datos base:**
-
-```json
-// shared/demo/fixtures/professional/06-production.json
-{
- "equipment": [
- {
- "id": "eq-00000000-0001-0000-0000-000000000001",
- "tenant_id": "a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6", // โ Reemplazado
- "name": "Horno de leรฑa principal",
- "type": "OVEN",
- "model": "WoodFire Pro 3000",
- "status": "OPERATIONAL",
- "install_date": "2024-01-15T06:00:00Z"
- }
- ],
- "batches": [
- {
- "id": "batch-00000000-0001-0000-0000-000000000001",
- "tenant_id": "a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6", // โ Reemplazado
- "batch_number": "BATCH-20251213-000001",
- "product_id": "prod-00000000-0001-0000-0000-000000000001", // ref a inventory
- "product_name": "Baguette Tradicional",
- "recipe_id": "recipe-00000000-0001-0000-0000-000000000001", // ref a recipes
- "planned_start_time": "2025-12-13T08:00:00Z", // โ Ajustado por demo_dates
- "planned_end_time": "2025-12-13T11:00:00Z",
- "planned_quantity": 100.0,
- "planned_duration_minutes": 180,
- "status": "PENDING",
- "priority": "HIGH",
- "equipment_used": [
- "eq-00000000-0001-0000-0000-000000000001" // ref a equipment (mismo servicio)
- ]
- }
- ]
-}
-```
-
-**Transformaciรณn durante clonaciรณn:**
-
-```python
-# En production/app/api/internal_demo.py
-virtual_tenant_id = uuid.UUID("new-virtual-uuid-here")
-
-# Transformar equipo
-new_equipment_id = uuid.uuid4()
-equipment_id_map[old_equipment_id] = new_equipment_id
-
-new_equipment = Equipment(
- id=new_equipment_id,
- tenant_id=virtual_tenant_id, # โ REEMPLAZADO
- # ... resto de campos copiados
-)
-
-# Transformar lote
-new_batch = ProductionBatch(
- id=uuid.uuid4(),
- tenant_id=virtual_tenant_id, # โ REEMPLAZADO
- batch_number=f"BATCH-{datetime.now():%Y%m%d}-{random_suffix}", # Nuevo nรบmero
- product_id=batch_data["product_id"], # โ Mantener cross-service ref
- recipe_id=batch_data["recipe_id"], # โ Mantener cross-service ref
- equipment_used=[equipment_id_map[eq_id] for eq_id in batch_data["equipment_used"]], # โ Mapear IDs internos
- # ...
-)
-```
-
----
-
-
-## ๐ ESTADO SEMILLA DEL ORQUESTADOR
-
-### Condiciones Iniciales Garantizadas
-
-El estado inicial de la demo no es arbitrario: refleja el **output de la รบltima ejecuciรณn del Orquestador en producciรณn simulada**.
-
-| Sistema | Estado Esperado | Justificaciรณn |
-|---------|------------------|---------------|
-| **Inventario** | - 3 ingredientes en `stock < reorder_point`
- 2 con OC pendiente (no disparan alerta)
- 1 sin OC (dispara alerta roja) | Realismo operativo - problemas de abastecimiento |
-| **Producciรณn** | - 1 lote "atrasado" (inicio planeado: hace 2h, status: PENDING)
- 1 lote "en curso" (inicio real: hace 1h45m, status: IN_PROGRESS)
- 2 programados para hoy (futuros) | Simula operaciรณn diaria con variedad de estados |
-| **Procurement** | - 2 OCs pendientes (1 aprobada por IA, 1 en revisiรณn humana)
- 1 OC retrasada (entrega esperada: hace 4h)
- 3 OCs completadas (entregadas hace 1-7 dรญas) | Escenarios de toma de decisiรณn y seguimiento |
-| **Calidad** | - 3 controles completados (2 PASSED, 1 FAILED โ lote en cuarentena)
- 1 control pendiente (lote en QUALITY_CHECK) | Flujo de calidad realista |
-| **Pedidos** | - 5 clientes con pedidos recientes (รบltimos 30 dรญas)
- 2 pedidos pendientes de entrega (delivery_date: hoy/maรฑana)
- 8 pedidos completados | Actividad comercial realista |
-| **Forecasting** | - Previsiones para prรณximos 7 dรญas (generadas "ayer")
- Precisiรณn histรณrica: 88-92% (calculada vs sales reales) | Datos de IA/ML creรญbles |
-
-### Datos Especรญficos para Edge Cases
-
-**Archivo: `shared/demo/fixtures/professional/06-production.json` (ejemplo)**
-
-```json
-{
- "batches": [
- {
- "id": "batch-edge-overdue-0001",
- "tenant_id": "a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6",
- "batch_number": "BATCH-OVERDUE-0001",
- "product_id": "prod-baguette-traditional",
- "product_name": "Baguette Tradicional",
- "recipe_id": "recipe-baguette-traditional",
- "planned_start_time": "BASE_TS - 2h", // Marcador - se ajusta en clonaciรณn
- "planned_end_time": "BASE_TS + 1h",
- "planned_quantity": 100.0,
- "planned_duration_minutes": 180,
- "actual_start_time": null,
- "actual_end_time": null,
- "status": "PENDING",
- "priority": "URGENT",
- "equipment_used": ["eq-oven-main"],
- "reasoning_data": {
- "delay_reason": "equipment_maintenance",
- "delay_reason_i18n_key": "production.delay.equipment_maintenance"
- }
- },
- {
- "id": "batch-edge-in-progress-0001",
- "batch_number": "BATCH-IN-PROGRESS-0001",
- "product_id": "prod-croissant-butter",
- "product_name": "Croissant de Mantequilla",
- "recipe_id": "recipe-croissant-butter",
- "planned_start_time": "BASE_TS - 1h45m",
- "planned_end_time": "BASE_TS + 1h15m",
- "planned_quantity": 50.0,
- "planned_duration_minutes": 180,
- "actual_start_time": "BASE_TS - 1h45m",
- "actual_end_time": null,
- "actual_quantity": null,
- "status": "IN_PROGRESS",
- "priority": "HIGH",
- "current_process_stage": "BAKING",
- "equipment_used": ["eq-oven-main"],
- "staff_assigned": ["50000000-0000-0000-0000-000000000001"]
- },
- {
- "id": "batch-edge-upcoming-0001",
- "batch_number": "BATCH-UPCOMING-0001",
- "product_id": "prod-whole-wheat-bread",
- "product_name": "Pan Integral",
- "recipe_id": "recipe-whole-wheat",
- "planned_start_time": "BASE_TS + 1h30m",
- "planned_end_time": "BASE_TS + 4h30m",
- "planned_quantity": 80.0,
- "planned_duration_minutes": 180,
- "status": "PENDING",
- "priority": "MEDIUM",
- "equipment_used": ["eq-oven-secondary"]
- }
- ]
-}
-```
-
-**Nota:** Los marcadores `BASE_TS ยฑ Xh` se resuelven durante la clonaciรณn usando `calculate_edge_case_times()`.
-
----
-
-
-## ๐งน LIMPIEZA DE SESIรN โ ATOMICIDAD Y RESILIENCIA
-
-### Trigger Principal
-
-**API Endpoint:**
-```
-DELETE /api/demo/sessions/{session_id}
-```
-
-**Implementaciรณn:**
-```python
-# services/demo_session/app/api/sessions.py
-@router.delete("/{session_id}")
-async def destroy_demo_session(
- session_id: str,
- db: AsyncSession = Depends(get_db)
-):
- """
- Destroy a demo session and all associated data.
- This triggers parallel deletion across all services.
- """
- session = await get_session_by_id(db, session_id)
-
- # Update status to DESTROYING
- session.status = DemoSessionStatus.DESTROYING
- await db.commit()
-
- # Trigger cleanup
- cleanup_service = DemoCleanupService(redis_manager=redis)
- result = await cleanup_service.cleanup_session(session)
-
- if result["success"]:
- session.status = DemoSessionStatus.DESTROYED
- else:
- session.status = DemoSessionStatus.FAILED
- session.error_details = result["errors"]
-
- await db.commit()
-
- return {
- "session_id": session_id,
- "status": session.status,
- "records_deleted": result.get("total_deleted", 0),
- "duration_ms": result.get("duration_ms", 0)
- }
-```
-
-### Limpieza Paralela por Servicio
-
-**Archivo de referencia:** `services/demo_session/app/services/cleanup_service.py` (a crear basado en clone_orchestrator)
-
-```python
-# services/demo_session/app/services/cleanup_service.py
-class DemoCleanupService:
- """Orchestrates parallel demo data deletion via direct HTTP calls"""
-
- async def cleanup_session(self, session: DemoSession) -> Dict[str, Any]:
- """
- Delete all data for a demo session across all services.
-
- Returns:
- {
- "success": bool,
- "total_deleted": int,
- "duration_ms": int,
- "details": {service: {records_deleted, duration_ms}},
- "errors": []
- }
- """
- start_time = time.time()
- virtual_tenant_id = session.virtual_tenant_id
-
- # Define services to clean (same as cloning)
- services = [
- ServiceDefinition("tenant", "http://tenant-service:8000", required=True),
- ServiceDefinition("auth", "http://auth-service:8001", required=True),
- ServiceDefinition("inventory", "http://inventory-service:8002"),
- ServiceDefinition("production", "http://production-service:8003"),
- # ... etc
- ]
-
- # Delete from all services in parallel
- tasks = [
- self._delete_from_service(svc, virtual_tenant_id)
- for svc in services
- ]
-
- results = await asyncio.gather(*tasks, return_exceptions=True)
-
- # Aggregate results
- total_deleted = 0
- details = {}
- errors = []
-
- for svc, result in zip(services, results):
- if isinstance(result, Exception):
- errors.append(f"{svc.name}: {str(result)}")
- details[svc.name] = {"status": "error", "error": str(result)}
- else:
- total_deleted += result.get("records_deleted", {}).get("total", 0)
- details[svc.name] = result
-
- # Delete from Redis
- await self._delete_redis_cache(virtual_tenant_id)
-
- # Delete child tenants if enterprise
- if session.demo_account_type == "enterprise":
- child_metadata = session.session_metadata.get("children", [])
- for child in child_metadata:
- child_tenant_id = child["virtual_tenant_id"]
- await self._delete_from_all_services(child_tenant_id)
-
- duration_ms = int((time.time() - start_time) * 1000)
-
- return {
- "success": len(errors) == 0,
- "total_deleted": total_deleted,
- "duration_ms": duration_ms,
- "details": details,
- "errors": errors
- }
-
- async def _delete_from_service(
- self,
- service: ServiceDefinition,
- virtual_tenant_id: UUID
- ) -> Dict[str, Any]:
- """Delete all data from a single service"""
- async with httpx.AsyncClient(timeout=30.0) as client:
- response = await client.delete(
- f"{service.url}/internal/demo/tenant/{virtual_tenant_id}",
- headers={"X-Internal-API-Key": self.internal_api_key}
- )
-
- if response.status_code == 200:
- return response.json()
- elif response.status_code == 404:
- # Already deleted or never existed - idempotent
- return {
- "service": service.name,
- "status": "not_found",
- "records_deleted": {"total": 0}
- }
- else:
- raise Exception(f"HTTP {response.status_code}: {response.text}")
-
- async def _delete_redis_cache(self, virtual_tenant_id: UUID):
- """Delete all Redis keys for a virtual tenant"""
- pattern = f"*:{virtual_tenant_id}:*"
- keys = await self.redis_manager.keys(pattern)
- if keys:
- await self.redis_manager.delete(*keys)
-```
-
-### Endpoint de Limpieza en Cada Servicio
-
-**Contrato estรกndar:**
-
-```python
-# services/{service}/app/api/internal_demo.py
-@router.delete(
- "/tenant/{virtual_tenant_id}",
- dependencies=[Depends(verify_internal_key)]
-)
-async def delete_demo_tenant_data(
- virtual_tenant_id: UUID,
- db: AsyncSession = Depends(get_db)
-):
- """
- Delete all demo data for a virtual tenant.
- This endpoint is idempotent - safe to call multiple times.
-
- Returns:
- {
- "service": "production",
- "status": "deleted",
- "virtual_tenant_id": "uuid-here",
- "records_deleted": {
- "batches": 50,
- "equipment": 12,
- "quality_checks": 183,
- "total": 245
- },
- "duration_ms": 567
- }
- """
- start_time = time.time()
-
- records_deleted = {
- "batches": 0,
- "equipment": 0,
- "quality_checks": 0,
- "quality_templates": 0,
- "production_schedules": 0,
- "production_capacity": 0
- }
-
- try:
- # Delete in reverse dependency order
-
- # 1. Delete quality checks (depends on batches)
- result = await db.execute(
- delete(QualityCheck)
- .where(QualityCheck.tenant_id == virtual_tenant_id)
- )
- records_deleted["quality_checks"] = result.rowcount
-
- # 2. Delete production batches
- result = await db.execute(
- delete(ProductionBatch)
- .where(ProductionBatch.tenant_id == virtual_tenant_id)
- )
- records_deleted["batches"] = result.rowcount
-
- # 3. Delete equipment
- result = await db.execute(
- delete(Equipment)
- .where(Equipment.tenant_id == virtual_tenant_id)
- )
- records_deleted["equipment"] = result.rowcount
-
- # 4. Delete quality check templates
- result = await db.execute(
- delete(QualityCheckTemplate)
- .where(QualityCheckTemplate.tenant_id == virtual_tenant_id)
- )
- records_deleted["quality_templates"] = result.rowcount
-
- # 5. Delete production schedules
- result = await db.execute(
- delete(ProductionSchedule)
- .where(ProductionSchedule.tenant_id == virtual_tenant_id)
- )
- records_deleted["production_schedules"] = result.rowcount
-
- # 6. Delete production capacity records
- result = await db.execute(
- delete(ProductionCapacity)
- .where(ProductionCapacity.tenant_id == virtual_tenant_id)
- )
- records_deleted["production_capacity"] = result.rowcount
-
- await db.commit()
-
- records_deleted["total"] = sum(records_deleted.values())
-
- logger.info(
- "demo_data_deleted",
- service="production",
- virtual_tenant_id=str(virtual_tenant_id),
- records_deleted=records_deleted
- )
-
- return {
- "service": "production",
- "status": "deleted",
- "virtual_tenant_id": str(virtual_tenant_id),
- "records_deleted": records_deleted,
- "duration_ms": int((time.time() - start_time) * 1000)
- }
-
- except Exception as e:
- await db.rollback()
- logger.error(
- "demo_data_deletion_failed",
- service="production",
- virtual_tenant_id=str(virtual_tenant_id),
- error=str(e)
- )
- raise HTTPException(
- status_code=500,
- detail=f"Failed to delete demo data: {str(e)}"
- )
-```
-
-### Registro de Auditorรญa
-
-**Modelo:**
-
-```python
-# services/demo_session/app/models/cleanup_audit.py
-class DemoCleanupAudit(Base):
- """Audit log for demo session cleanup operations"""
- __tablename__ = "demo_cleanup_audit"
-
- id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
- session_id = Column(String(255), nullable=False, index=True)
- virtual_tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
-
- started_at = Column(DateTime(timezone=True), nullable=False)
- completed_at = Column(DateTime(timezone=True), nullable=True)
- duration_ms = Column(Integer, nullable=True)
-
- total_records_deleted = Column(Integer, default=0)
- service_results = Column(JSON, nullable=True) # Details per service
-
- status = Column(String(50), nullable=False) # SUCCESS, PARTIAL, FAILED
- error_details = Column(JSON, nullable=True)
-
- retry_count = Column(Integer, default=0)
- created_at = Column(DateTime(timezone=True), server_default=func.now())
-```
-
-**Logging:**
-
-```python
-# Al iniciar limpieza
-audit = DemoCleanupAudit(
- session_id=session.session_id,
- virtual_tenant_id=session.virtual_tenant_id,
- started_at=datetime.now(timezone.utc),
- status="IN_PROGRESS"
-)
-db.add(audit)
-await db.commit()
-
-# Al completar
-audit.completed_at = datetime.now(timezone.utc)
-audit.duration_ms = int((audit.completed_at - audit.started_at).total_seconds() * 1000)
-audit.total_records_deleted = cleanup_result["total_deleted"]
-audit.service_results = cleanup_result["details"]
-audit.status = "SUCCESS" if cleanup_result["success"] else "PARTIAL"
-audit.error_details = cleanup_result.get("errors")
-await db.commit()
-```
-
-### CronJob de Limpieza Automรกtica
-
-**Implementaciรณn:**
-
-```python
-# services/demo_session/app/services/scheduled_cleanup.py
-from apscheduler.schedulers.asyncio import AsyncIOScheduler
-from datetime import datetime, timezone, timedelta
-
-scheduler = AsyncIOScheduler()
-
-@scheduler.scheduled_job('cron', hour=2, minute=0) # 02:00 UTC diariamente
-async def cleanup_expired_sessions():
- """
- Find and clean up expired demo sessions.
- Runs daily at 2 AM UTC.
- """
- logger.info("scheduled_cleanup_started")
-
- async with get_async_session() as db:
- # Find expired sessions that haven't been destroyed
- one_hour_ago = datetime.now(timezone.utc) - timedelta(hours=1)
-
- result = await db.execute(
- select(DemoSession)
- .where(DemoSession.expires_at < one_hour_ago)
- .where(DemoSession.status.in_([
- DemoSessionStatus.READY,
- DemoSessionStatus.PARTIAL,
- DemoSessionStatus.FAILED
- ]))
- .limit(100) # Process in batches
- )
- expired_sessions = result.scalars().all()
-
- logger.info(
- "expired_sessions_found",
- count=len(expired_sessions)
- )
-
- cleanup_service = DemoCleanupService()
-
- success_count = 0
- partial_count = 0
- failed_count = 0
-
- for session in expired_sessions:
- try:
- result = await cleanup_service.cleanup_session(session)
-
- if result["success"]:
- session.status = DemoSessionStatus.DESTROYED
- success_count += 1
- else:
- session.status = DemoSessionStatus.PARTIAL
- session.error_details = result.get("errors")
- partial_count += 1
-
- # Retry limit
- retry_count = session.cleanup_retry_count or 0
- if retry_count < 3:
- session.cleanup_retry_count = retry_count + 1
- logger.warning(
- "cleanup_partial_will_retry",
- session_id=session.session_id,
- retry_count=session.cleanup_retry_count
- )
- else:
- logger.error(
- "cleanup_max_retries_exceeded",
- session_id=session.session_id
- )
- # Notify ops team
- await notify_ops_team(
- f"Demo cleanup failed after 3 retries: {session.session_id}"
- )
-
- except Exception as e:
- logger.error(
- "cleanup_exception",
- session_id=session.session_id,
- error=str(e)
- )
- session.status = DemoSessionStatus.FAILED
- session.error_details = {"exception": str(e)}
- failed_count += 1
-
- await db.commit()
-
- logger.info(
- "scheduled_cleanup_completed",
- total=len(expired_sessions),
- success=success_count,
- partial=partial_count,
- failed=failed_count
- )
-
- # Alert if >5% failure rate
- if len(expired_sessions) > 0:
- failure_rate = (partial_count + failed_count) / len(expired_sessions)
- if failure_rate > 0.05:
- await notify_ops_team(
- f"High demo cleanup failure rate: {failure_rate:.1%} "
- f"({partial_count + failed_count}/{len(expired_sessions)})"
- )
-
-# Start scheduler on app startup
-def start_scheduled_cleanup():
- scheduler.start()
- logger.info("scheduled_cleanup_started")
-```
-
-**Iniciar en app startup:**
-
-```python
-# services/demo_session/app/main.py
-from app.services.scheduled_cleanup import start_scheduled_cleanup
-
-@app.on_event("startup")
-async def startup_event():
- start_scheduled_cleanup()
- logger.info("application_started")
-```
-
----
-
-
-## ๐ข ESCENARIOS DE DEMO โ ESPECIFICACIรN DETALLADA POR TIER
-
-### ๐ฅ Tier Professional: "Panaderรญa Artesana Madrid"
-
-**Configuraciรณn base:** [`services/demo_session/app/core/config.py:39-46`](services/demo_session/app/core/config.py#L39-L46)
-
-| Dimensiรณn | Detalle |
-|-----------|---------|
-| **Ubicaciรณn** | Madrid, Espaรฑa (coordenadas ficticias) |
-| **Modelo** | Tienda + obrador integrado (standalone) |
-| **Equipo** | 6 personas:
- Marรญa Garcรญa (Owner/Admin)
- Juan Panadero (Baker)
- Ana Ventas (Sales)
- Pedro Calidad (Quality Control)
- Carlos Almacรฉn (Warehouse)
- Isabel Producciรณn (Production Manager) |
-| **Productos** | ~24 referencias:
- 12 panes (baguette, integral, centeno, payรฉs, etc.)
- 8 bollerรญa (croissant, napolitana, ensaimada, etc.)
- 4 pastelerรญa (tarta, pastel, galletas, brownies) |
-| **Recetas** | 18-24 activas (alineadas con productos) |
-| **Maquinaria** | - 1 horno de leรฑa (OPERATIONAL)
- 1 horno secundario (OPERATIONAL)
- 2 amasadoras (1 en MAINTENANCE)
- 1 laminadora (OPERATIONAL)
- 1 cortadora (OPERATIONAL) |
-| **Proveedores** | 5-6:
- Harinas del Norte (harina)
- Lรกcteos Gipuzkoa (leche/mantequilla)
- Frutas Frescas (frutas)
- Sal de Mar (sal)
- Envases Pro (packaging)
- Levaduras Spain (levadura) |
-| **Datos operativos** | **Stock crรญtico:**
- Harina: 5 kg (reorder_point: 50 kg) โ OC pendiente
- Levadura: 0 kg (reorder_point: 10 kg) โ SIN OC โ โ ALERTA
- Mantequilla: 2 kg (reorder_point: 15 kg) โ OC aprobada
**Lotes hoy:**
- OVERDUE: Baguette (debiรณ empezar hace 2h) โ โ ๏ธ ALERTA
- IN_PROGRESS: Croissant (empezรณ hace 1h45m, etapa: BAKING)
- UPCOMING: Pan Integral (empieza en 1h30m)
**Alertas activas:**
- ๐ด "Levadura agotada โ crear OC urgente"
- ๐ก "Producciรณn retrasada โ Baguette" |
-| **Dashboard KPIs** | - % cumplimiento producciรณn: 87%
- Stock crรญtico: 3 ingredientes
- Alertas abiertas: 2
- Forecasting precisiรณn: 92% |
-
-### ๐ข Tier Enterprise: "Panaderรญa Central Group"
-
-**Configuraciรณn base:** [`services/demo_session/app/core/config.py:47-72`](services/demo_session/app/core/config.py#L47-L72)
-
-| Dimensiรณn | Detalle |
-|-----------|----------|
-| **Estructura** | 1 obrador central (parent tenant) + 3 tiendas (child outlets):
- **Madrid Centro**
- **Barcelona Grร cia**
- **Valencia Ruzafa** |
-| **Equipo** | ~20 personas:
**Central:**
- Carlos Martรญnez (Owner/Director Operativo)
- Roberto Producciรณn (Production Manager)
- Marta Calidad (Quality Control)
- Luis Compras (Procurement)
- Miguel Mantenimiento (Maintenance)
**Por tienda (5 personas cada una):**
- Gerente
- 2 vendedores
- 1 panadero
- 1 warehouse |
-| **Producciรณn** | Centralizada en obrador principal:
- Produce para las 3 tiendas
- Distribuye por rutas nocturnas
- Capacidad: 500 kg/dรญa |
-| **Logรญstica** | **Rutas diarias:**
- Ruta 1: Madrid โ Barcelona (salida: 23:00, llegada: 05:00)
- Ruta 2: Madrid โ Valencia (salida: 01:00, llegada: 06:00)
- Reparto local Madrid (salida: 05:00) |
-| **Datos por tienda** | **Madrid Centro:**
- Alta rotaciรณn
- Stock ajustado (just-in-time)
- Pedidos: 15-20/dรญa
**Barcelona Grร cia:**
- Alta demanda turรญstica
- Productos premium (croissants mantequilla francesa)
- โ Alerta activa: "Stock bajo brioche premium"
**Valencia Ruzafa:**
- En crecimiento
- Stock mรกs conservador
- Pedidos: 10-15/dรญa |
-| **Alertas multisite** | - ๐ด BCN: "Stock bajo de brioche premium โ OC creada por IA"
- ๐ Obrador: "Capacidad horno al 95% โ riesgo cuello de botella"
- ๐ก Logรญstica: "Retraso ruta Barcelona โ ETA +30 min (trรกfico A-2)" |
-| **Dashboard Enterprise** | **Mapa de alertas:**
- Vista geogrรกfica con marcadores por tienda
- Drill-down por ubicaciรณn
**Comparativa KPIs:**
- Producciรณn por tienda
- Stock por ubicaciรณn
- Margen por tienda
**Forecasting agregado:**
- Precisiรณn: 88%
- Prรณxima semana: +12% demanda prevista
**ROI de automatizaciรณn IA:**
- Reducciรณn desperdicio: 17%
- Ahorro procurement: โฌ1,200/mes |
-
----
-
-
-## โ
VERIFICACIรN TรCNICA
-
-### Requisitos de Validaciรณn
-
-| Requisito | Cรณmo se Verifica | Tooling/Mรฉtrica | Umbral de รxito |
-|-----------|-------------------|-----------------|-----------------|
-| **Determinismo** | Ejecutar 100 clonaciones del mismo tier โ comparar hash SHA-256 de todos los datos (por servicio) | Script `scripts/test/demo_determinism.py` | 100% de hashes idรฉnticos (excluir timestamps audit) |
-| **Coherencia cruzada** | Post-clonado, ejecutar `CrossServiceIntegrityChecker` โ validar todas las referencias UUID | Script `scripts/validate_cross_refs.py` (CI/CD) | 0 errores de integridad |
-| **Latencia Professional** | P50, P95, P99 de tiempo de creaciรณn | Prometheus: `demo_session_creation_duration_seconds{tier="professional"}` | P50 < 7s, P95 < 12s, P99 < 15s |
-| **Latencia Enterprise** | P50, P95, P99 de tiempo de creaciรณn | Prometheus: `demo_session_creation_duration_seconds{tier="enterprise"}` | P50 < 12s, P95 < 18s, P99 < 22s |
-| **Realismo temporal** | En sesiรณn creada a las 10:00, lote programado para "session + 1.5h" โ debe ser 11:30 exacto | Validador `scripts/test/time_delta_validator.py` | 0 desviaciones > ยฑ1 minuto |
-| **Alertas inmediatas** | Tras creaciรณn, `GET /alerts?tenant_id={virtual}&status=open` debe devolver โฅ2 alertas | Cypress E2E: `cypress/e2e/demo_session_spec.js` | Professional: โฅ2 alertas
Enterprise: โฅ4 alertas |
-| **Limpieza atรณmica** | Tras `DELETE`, consulta directa a DB de cada servicio โ 0 registros con `tenant_id = virtual_tenant_id` | Script `scripts/test/cleanup_verification.py` | 0 registros residuales en todas las tablas |
-| **Escalabilidad** | 50 sesiones concurrentes โ tasa de รฉxito, sin timeouts | Locust: `locust -f scripts/load_test/demo_load_test.py --users 50 --spawn-rate 5` | โฅ99% tasa de รฉxito
0 timeouts HTTP |
-| **Idempotencia limpieza** | Llamar `DELETE` 3 veces consecutivas al mismo session_id โ todas devuelven 200, sin errores | Script `scripts/test/idempotency_test.py` | 3/3 llamadas exitosas |
-
-### Scripts de Validaciรณn
-
-#### 1. Determinismo
-
-```python
-# scripts/test/demo_determinism.py
-"""
-Test deterministic cloning by creating multiple sessions and comparing data hashes.
-"""
-import asyncio
-import hashlib
-import json
-from typing import List, Dict
-import httpx
-
-DEMO_API_URL = "http://localhost:8018"
-INTERNAL_API_KEY = "test-internal-key"
-
-async def create_demo_session(tier: str = "professional") -> dict:
- """Create a demo session"""
- async with httpx.AsyncClient() as client:
- response = await client.post(
- f"{DEMO_API_URL}/api/demo/sessions",
- json={"demo_account_type": tier}
- )
- return response.json()
-
-async def get_all_data_from_service(
- service_url: str,
- tenant_id: str
-) -> dict:
- """Fetch all data for a tenant from a service"""
- async with httpx.AsyncClient() as client:
- response = await client.get(
- f"{service_url}/internal/demo/export/{tenant_id}",
- headers={"X-Internal-API-Key": INTERNAL_API_KEY}
- )
- return response.json()
-
-def calculate_data_hash(data: dict) -> str:
- """
- Calculate SHA-256 hash of data, excluding audit timestamps.
- """
- # Remove non-deterministic fields
- clean_data = remove_audit_fields(data)
-
- # Sort keys for consistency
- json_str = json.dumps(clean_data, sort_keys=True)
-
- return hashlib.sha256(json_str.encode()).hexdigest()
-
-def remove_audit_fields(data: dict) -> dict:
- """Remove created_at, updated_at fields recursively"""
- if isinstance(data, dict):
- return {
- k: remove_audit_fields(v)
- for k, v in data.items()
- if k not in ["created_at", "updated_at", "id"] # IDs are UUIDs
- }
- elif isinstance(data, list):
- return [remove_audit_fields(item) for item in data]
- else:
- return data
-
-async def test_determinism(tier: str = "professional", iterations: int = 100):
- """
- Test that cloning is deterministic across multiple sessions.
- """
- print(f"Testing determinism for {tier} tier ({iterations} iterations)...")
-
- services = [
- ("inventory", "http://inventory-service:8002"),
- ("production", "http://production-service:8003"),
- ("recipes", "http://recipes-service:8004"),
- ]
-
- hashes_by_service = {svc[0]: [] for svc in services}
-
- for i in range(iterations):
- # Create session
- session = await create_demo_session(tier)
- tenant_id = session["virtual_tenant_id"]
-
- # Get data from each service
- for service_name, service_url in services:
- data = await get_all_data_from_service(service_url, tenant_id)
- data_hash = calculate_data_hash(data)
- hashes_by_service[service_name].append(data_hash)
-
- # Cleanup
- async with httpx.AsyncClient() as client:
- await client.delete(f"{DEMO_API_URL}/api/demo/sessions/{session['session_id']}")
-
- if (i + 1) % 10 == 0:
- print(f" Completed {i + 1}/{iterations} iterations")
-
- # Check consistency
- all_consistent = True
- for service_name, hashes in hashes_by_service.items():
- unique_hashes = set(hashes)
- if len(unique_hashes) == 1:
- print(f"โ
{service_name}: All {iterations} hashes identical")
- else:
- print(f"โ {service_name}: {len(unique_hashes)} different hashes found!")
- all_consistent = False
-
- if all_consistent:
- print("\nโ
DETERMINISM TEST PASSED")
- return 0
- else:
- print("\nโ DETERMINISM TEST FAILED")
- return 1
-
-if __name__ == "__main__":
- exit_code = asyncio.run(test_determinism())
- exit(exit_code)
-```
-
-#### 2. Latencia y Escalabilidad (Locust)
-
-```python
-# scripts/load_test/demo_load_test.py
-"""
-Load test for demo session creation using Locust.
-
-Usage:
- locust -f demo_load_test.py --users 50 --spawn-rate 5 --run-time 5m
-"""
-from locust import HttpUser, task, between
-import random
-
-class DemoSessionUser(HttpUser):
- wait_time = between(1, 3) # Wait 1-3s between tasks
-
- def on_start(self):
- """Called when a user starts"""
- self.session_id = None
-
- @task(3)
- def create_professional_session(self):
- """Create a professional tier demo session"""
- with self.client.post(
- "/api/demo/sessions",
- json={"demo_account_type": "professional"},
- catch_response=True
- ) as response:
- if response.status_code == 200:
- data = response.json()
- self.session_id = data.get("session_id")
-
- # Check if cloning completed successfully
- if data.get("status") == "READY":
- response.success()
- else:
- response.failure(f"Session not ready: {data.get('status')}")
- else:
- response.failure(f"Failed to create session: {response.status_code}")
-
- @task(1)
- def create_enterprise_session(self):
- """Create an enterprise tier demo session"""
- with self.client.post(
- "/api/demo/sessions",
- json={"demo_account_type": "enterprise"},
- catch_response=True
- ) as response:
- if response.status_code == 200:
- data = response.json()
- self.session_id = data.get("session_id")
-
- if data.get("status") == "READY":
- response.success()
- else:
- response.failure(f"Session not ready: {data.get('status')}")
- else:
- response.failure(f"Failed to create session: {response.status_code}")
-
- @task(1)
- def get_session_status(self):
- """Get status of current session"""
- if self.session_id:
- self.client.get(f"/api/demo/sessions/{self.session_id}/status")
-
- def on_stop(self):
- """Called when a user stops - cleanup session"""
- if self.session_id:
- self.client.delete(f"/api/demo/sessions/{self.session_id}")
-```
-
-**Ejecutar:**
-
-```bash
-# Test de carga: 50 usuarios concurrentes
-locust -f scripts/load_test/demo_load_test.py \
- --host http://localhost:8018 \
- --users 50 \
- --spawn-rate 5 \
- --run-time 5m \
- --html reports/demo_load_test_report.html
-
-# Analizar resultados
-# P95 latency debe ser < 12s (professional), < 18s (enterprise)
-# Failure rate debe ser < 1%
-```
-
-### Mรฉtricas de Prometheus
-
-**Archivo:** `services/demo_session/app/monitoring/metrics.py`
-
-```python
-from prometheus_client import Counter, Histogram, Gauge
-
-# Counters
-demo_sessions_created_total = Counter(
- 'demo_sessions_created_total',
- 'Total number of demo sessions created',
- ['tier', 'status']
-)
-
-demo_sessions_deleted_total = Counter(
- 'demo_sessions_deleted_total',
- 'Total number of demo sessions deleted',
- ['tier', 'status']
-)
-
-demo_cloning_errors_total = Counter(
- 'demo_cloning_errors_total',
- 'Total number of cloning errors',
- ['tier', 'service', 'error_type']
-)
-
-# Histograms (for latency percentiles)
-demo_session_creation_duration_seconds = Histogram(
- 'demo_session_creation_duration_seconds',
- 'Duration of demo session creation',
- ['tier'],
- buckets=[1, 2, 5, 7, 10, 12, 15, 18, 20, 25, 30, 40, 50, 60]
-)
-
-demo_service_clone_duration_seconds = Histogram(
- 'demo_service_clone_duration_seconds',
- 'Duration of individual service cloning',
- ['tier', 'service'],
- buckets=[0.5, 1, 2, 3, 5, 10, 15, 20, 30, 40, 50]
-)
-
-demo_session_cleanup_duration_seconds = Histogram(
- 'demo_session_cleanup_duration_seconds',
- 'Duration of demo session cleanup',
- ['tier'],
- buckets=[0.5, 1, 2, 5, 10, 15, 20, 30]
-)
-
-# Gauges
-demo_sessions_active = Gauge(
- 'demo_sessions_active',
- 'Number of currently active demo sessions',
- ['tier']
-)
-
-demo_sessions_pending_cleanup = Gauge(
- 'demo_sessions_pending_cleanup',
- 'Number of demo sessions pending cleanup'
-)
-```
-
-**Queries de ejemplo:**
-
-```promql
-# P95 latency for professional tier
-histogram_quantile(0.95,
- rate(demo_session_creation_duration_seconds_bucket{tier="professional"}[5m])
-)
-
-# P99 latency for enterprise tier
-histogram_quantile(0.99,
- rate(demo_session_creation_duration_seconds_bucket{tier="enterprise"}[5m])
-)
-
-# Error rate by service
-rate(demo_cloning_errors_total[5m])
-
-# Active sessions by tier
-demo_sessions_active
-```
-
----
-
-## ๐ APรNDICES
-
-### A. Endpoints Internos Completos
-
-#### Tenant Service
-
-```
-POST /internal/demo/clone
-POST /internal/demo/create-child (enterprise only)
-DELETE /internal/demo/tenant/{virtual_tenant_id}
-HEAD /internal/demo/exists?entity_type=tenant&id={id}&tenant_id={tenant_id}
-GET /internal/demo/export/{tenant_id} (for testing)
-```
-
-#### Otros Servicios (Auth, Inventory, Production, etc.)
-
-```
-POST /internal/demo/clone
-DELETE /internal/demo/tenant/{virtual_tenant_id}
-HEAD /internal/demo/exists?entity_type={type}&id={id}&tenant_id={tenant_id}
-GET /internal/demo/export/{tenant_id} (for testing)
-```
-
-#### Production Service (triggers post-clone)
-
-```
-POST /internal/alerts/trigger (trigger production alerts)
-POST /internal/insights/yield/trigger (trigger yield insights)
-```
-
-#### Inventory Service (triggers post-clone)
-
-```
-POST /internal/alerts/trigger (trigger inventory alerts)
-POST /internal/insights/safety-stock/trigger
-```
-
-#### Procurement Service (triggers post-clone)
-
-```
-POST /internal/delivery-tracking/trigger
-POST /internal/insights/price/trigger
-```
-
-### B. Variables de Entorno
-
-```bash
-# Demo Session Service
-DEMO_SESSION_DATABASE_URL=postgresql+asyncpg://user:pass@localhost:5432/demo_session_db
-DEMO_SESSION_DURATION_MINUTES=30
-DEMO_SESSION_MAX_EXTENSIONS=3
-DEMO_SESSION_CLEANUP_INTERVAL_MINUTES=60
-
-# Internal API Key (shared across services)
-INTERNAL_API_KEY=your-secret-internal-key
-
-# Service URLs (Kubernetes defaults)
-TENANT_SERVICE_URL=http://tenant-service:8000
-AUTH_SERVICE_URL=http://auth-service:8001
-INVENTORY_SERVICE_URL=http://inventory-service:8002
-PRODUCTION_SERVICE_URL=http://production-service:8003
-RECIPES_SERVICE_URL=http://recipes-service:8004
-PROCUREMENT_SERVICE_URL=http://procurement-service:8005
-SUPPLIERS_SERVICE_URL=http://suppliers-service:8006
-ORDERS_SERVICE_URL=http://orders-service:8007
-SALES_SERVICE_URL=http://sales-service:8008
-FORECASTING_SERVICE_URL=http://forecasting-service:8009
-ORCHESTRATOR_SERVICE_URL=http://orchestrator-service:8012
-
-# Redis
-REDIS_URL=redis://redis:6379/0
-REDIS_KEY_PREFIX=demo:session
-REDIS_SESSION_TTL=1800 # 30 minutes
-
-# Feature Flags
-ENABLE_DEMO_AI_INSIGHTS=true
-ENABLE_DEMO_ALERT_GENERATION=true
-```
-
-### C. Modelos de Base de Datos Clave
-
-Ver archivos de referencia:
-- [`services/production/app/models/production.py`](services/production/app/models/production.py)
-- [`services/inventory/app/models/inventory.py`](services/inventory/app/models/inventory.py)
-- [`services/tenant/app/models/tenants.py`](services/tenant/app/models/tenants.py)
-- [`services/auth/app/models/users.py`](services/auth/app/models/users.py)
-- [`services/demo_session/app/models/demo_session.py`](services/demo_session/app/models/demo_session.py)
-
-### D. Archivos de Referencia del Proyecto
-
-| Archivo | Descripciรณn |
-|---------|-------------|
-| [`services/demo_session/app/core/config.py`](services/demo_session/app/core/config.py) | Configuraciรณn de cuentas demo y settings |
-| [`services/demo_session/app/services/clone_orchestrator.py`](services/demo_session/app/services/clone_orchestrator.py) | Orquestador de clonaciรณn paralela |
-| [`shared/utils/demo_dates.py`](shared/utils/demo_dates.py) | Utilidades de ajuste temporal |
-| [`services/production/app/api/internal_demo.py`](services/production/app/api/internal_demo.py) | Implementaciรณn de clonaciรณn (ejemplo) |
-| [`SIMPLIFIED_DEMO_SESSION_ARCHITECTURE.md`](SIMPLIFIED_DEMO_SESSION_ARCHITECTURE.md) | Arquitectura simplificada actual |
-
----
-
-## ๐ฏ CONCLUSIรN
-
-Este documento especifica un sistema de demostraciรณn tรฉcnica **production-grade**, con:
-
-โ
**Integridad garantizada** mediante validaciรณn cross-service de referencias UUID
-โ
**Determinismo temporal** con ajuste dinรกmico relativo a `session_created_at`
-โ
**Edge cases realistas** que generan alertas y insights automรกticamente
-โ
**Clonaciรณn paralela** en 5-15 segundos (3-6x mรกs rรกpido que arquitectura anterior)
-โ
**Limpieza atรณmica** con idempotencia y registro de auditorรญa
-โ
**Validaciรณn CI/CD** de esquemas JSON y referencias cruzadas
-โ
**Mรฉtricas y observabilidad** con Prometheus
-
-**Resultado:** Demos tรฉcnicamente impecables que simulan entornos productivos reales, sin infraestructura pesada, con datos coherentes y reproducibles.
-
----
-
-**Versiรณn:** 1.0
-**Fecha:** 2025-12-13
-**Autor:** Basado en arquitectura real de bakery-ia
-**Mantenido por:** Equipo de Infraestructura y DevOps
diff --git a/DEMO_SESSION_ANALYSIS_REPORT.md b/DEMO_SESSION_ANALYSIS_REPORT.md
deleted file mode 100644
index 081a4a3d..00000000
--- a/DEMO_SESSION_ANALYSIS_REPORT.md
+++ /dev/null
@@ -1,451 +0,0 @@
-# Demo Session & AI Insights Analysis Report
-**Date**: 2025-12-16
-**Session ID**: demo_VvDEcVRsuM3HjWDRH67AEw
-**Virtual Tenant ID**: 740b96c4-d242-47d7-8a6e-a0a8b5c51d5e
-
----
-
-## Executive Summary
-
-โ
**Overall Status**: Demo session cloning **MOSTLY SUCCESSFUL** with **1 critical error** (orchestrator service)
-โ
**AI Insights**: **1 insight generated successfully**
-โ ๏ธ **Issues Found**: 2 issues (1 critical, 1 warning)
-
----
-
-## 1. Demo Session Cloning Results
-
-### Session Creation (06:10:28)
-- **Status**: โ
SUCCESS
-- **Session ID**: `demo_VvDEcVRsuM3HjWDRH67AEw`
-- **Virtual Tenant ID**: `740b96c4-d242-47d7-8a6e-a0a8b5c51d5e`
-- **Account Type**: Professional
-- **Total Duration**: ~30 seconds
-
-### Service-by-Service Cloning Results
-
-| Service | Status | Records Cloned | Duration (ms) | Notes |
-|---------|--------|----------------|---------------|-------|
-| **Tenant** | โ
Completed | 9 | 170 | No issues |
-| **Auth** | โ
Completed | 0 | 174 | No users cloned (expected) |
-| **Suppliers** | โ
Completed | 6 | 184 | No issues |
-| **Recipes** | โ
Completed | 28 | 194 | No issues |
-| **Sales** | โ
Completed | 44 | 105 | No issues |
-| **Forecasting** | โ
Completed | 0 | 181 | No forecasts cloned |
-| **Orders** | โ
Completed | 9 | 199 | No issues |
-| **Production** | โ
Completed | 106 | 538 | No issues |
-| **Inventory** | โ
Completed | **903** | 763 | **Largest dataset!** |
-| **Procurement** | โ
Completed | 28 | 1999 | Slow but successful |
-| **Orchestrator** | โ **FAILED** | 0 | 21 | **HTTP 500 ERROR** |
-
-**Total Records Cloned**: 1,133 (out of expected ~1,140)
-
-### Cloning Timeline
-```
-06:10:28.654 - Session created (status: pending)
-06:10:28.710 - Background cloning task started
-06:10:28.737 - Parallel service cloning initiated (11 services)
-06:10:28.903 - First services complete (sales, tenant, auth, suppliers, recipes)
-06:10:29.000 - Mid-tier services complete (forecasting, orders)
-06:10:29.329 - Production service complete (106 records)
-06:10:29.763 - Inventory service complete (903 records)
-06:10:30.000 - Procurement service complete (28 records)
-06:10:30.000 - Orchestrator service FAILED (HTTP 500)
-06:10:34.000 - Alert generation completed (11 alerts)
-06:10:58.000 - AI insights generation completed (1 insight)
-06:10:58.116 - Session status updated to 'ready'
-```
-
----
-
-## 2. Critical Issues Identified
-
-### ๐ด ISSUE #1: Orchestrator Service Clone Failure (CRITICAL)
-
-**Error Message**:
-```
-HTTP 500: {"detail":"Failed to clone orchestration runs: name 'OrchestrationStatus' is not defined"}
-```
-
-**Root Cause**:
-File: [services/orchestrator/app/api/internal_demo.py:112](services/orchestrator/app/api/internal_demo.py#L112)
-
-```python
-# Line 112 - BUG: OrchestrationStatus not imported
-status=OrchestrationStatus[orchestration_run_data["status"]],
-```
-
-The code references `OrchestrationStatus` but **never imports it**. Looking at the imports:
-
-```python
-from app.models.orchestration_run import OrchestrationRun # Line 16
-```
-
-It imports `OrchestrationRun` but NOT `OrchestrationStatus` enum!
-
-**Impact**:
-- Orchestrator service failed to clone demo data
-- No orchestration runs in demo session
-- Orchestration history page will be empty
-- **Does NOT impact AI insights** (they don't depend on orchestrator data)
-
-**Solution**:
-```python
-# Fix: Add OrchestrationStatus to imports (line 16)
-from app.models.orchestration_run import OrchestrationRun, OrchestrationStatus
-```
-
-### โ ๏ธ ISSUE #2: Demo Cleanup Worker Pods Failing (WARNING)
-
-**Error Message**:
-```
-demo-cleanup-worker-854c9b8688-klddf 0/1 ErrImageNeverPull
-demo-cleanup-worker-854c9b8688-spgvn 0/1 ErrImageNeverPull
-```
-
-**Root Cause**:
-The demo-cleanup-worker pods cannot pull their Docker image. This is likely due to:
-1. Image not built locally (using local Kubernetes cluster)
-2. ImagePullPolicy set to "Never" but image doesn't exist
-3. Missing image in local registry
-
-**Impact**:
-- Automatic cleanup of expired demo sessions may not work
-- Old demo sessions might accumulate in database
-- Manual cleanup required via cron job or API
-
-**Solution**:
-1. Build the image: `docker build -t demo-cleanup-worker:latest services/demo_session/`
-2. Or change ImagePullPolicy in deployment YAML
-3. Or rely on CronJob cleanup (which is working - see completed jobs)
-
----
-
-## 3. AI Insights Generation
-
-### โ
SUCCESS: 1 Insight Generated
-
-**Timeline**:
-```
-06:10:58 - AI insights generation post-clone completed
- tenant_id=740b96c4-d242-47d7-8a6e-a0a8b5c51d5e
- total_insights_generated=1
-```
-
-**Insight Posted**:
-```
-POST /api/v1/tenants/740b96c4-d242-47d7-8a6e-a0a8b5c51d5e/insights
-Response: 201 Created
-```
-
-**Insight Retrieval (Successful)**:
-```
-GET /api/v1/tenants/740b96c4-d242-47d7-8a6e-a0a8b5c51d5e/insights?priority=high&status=new&limit=5
-Response: 200 OK
-```
-
-### Why Only 1 Insight?
-
-Based on the architecture review, AI insights are generated by:
-1. **Inventory Service** - Safety Stock Optimizer (needs 90 days of stock movements)
-2. **Production Service** - Yield Predictor (needs worker assignments)
-3. **Forecasting Service** - Demand Analyzer (needs sales history)
-4. **Procurement Service** - Price/Supplier insights (needs purchase history)
-
-**Analysis of Demo Data**:
-
-| Service | Data Present | AI Model Triggered? | Insights Expected |
-|---------|--------------|---------------------|-------------------|
-| Inventory | โ
903 records | **Unknown** | 2-3 insights if stock movements present |
-| Production | โ
106 batches | **Unknown** | 2-3 insights if worker data present |
-| Forecasting | โ ๏ธ 0 forecasts | โ NO | 0 insights (no data) |
-| Procurement | โ
28 records | **Unknown** | 1-2 insights if PO history present |
-
-**Likely Reason for Only 1 Insight**:
-- The demo fixture files may NOT have been populated with the generated AI insights data yet
-- Need to verify if [generate_ai_insights_data.py](shared/demo/fixtures/professional/generate_ai_insights_data.py) was run
-- Without 90 days of stock movements and worker assignments, models can't generate insights
-
----
-
-## 4. Service Health Status
-
-All core services are **HEALTHY**:
-
-| Service | Status | Health Check | Database | Notes |
-|---------|--------|--------------|----------|-------|
-| AI Insights | โ
Running | โ
OK | โ
Connected | Accepting insights |
-| Demo Session | โ
Running | โ
OK | โ
Connected | Cloning works |
-| Inventory | โ
Running | โ
OK | โ
Connected | Publishing alerts |
-| Production | โ
Running | โ
OK | โ
Connected | No errors |
-| Forecasting | โ
Running | โ
OK | โ
Connected | No errors |
-| Procurement | โ
Running | โ
OK | โ
Connected | No errors |
-| Orchestrator | โ ๏ธ Running | โ
OK | โ
Connected | **Clone endpoint broken** |
-
-### Database Migrations
-All migrations completed successfully:
-- โ
ai-insights-migration (completed 5m ago)
-- โ
demo-session-migration (completed 4m ago)
-- โ
forecasting-migration (completed 4m ago)
-- โ
inventory-migration (completed 4m ago)
-- โ
orchestrator-migration (completed 4m ago)
-- โ
procurement-migration (completed 4m ago)
-- โ
production-migration (completed 4m ago)
-
----
-
-## 5. Alerts Generated (Post-Clone)
-
-### โ
SUCCESS: 11 Alerts Created
-
-**Alert Summary** (06:10:34):
-```
-Alert generation post-clone completed
-- delivery_alerts: 0
-- inventory_alerts: 10
-- production_alerts: 1
-- total: 11 alerts
-```
-
-**Inventory Alerts** (10):
-- Detected urgent expiry events for "Leche Entera Fresca"
-- Alerts published to RabbitMQ (`alert.inventory.high`)
-- Multiple tenants receiving alerts (including demo tenant `740b96c4-d242-47d7-8a6e-a0a8b5c51d5e`)
-
-**Production Alerts** (1):
-- Production alert generated for demo tenant
-
----
-
-## 6. HTTP Request Analysis
-
-### โ
All API Requests Successful (Except Orchestrator)
-
-**Demo Session API**:
-```
-POST /api/v1/demo/sessions โ 201 Created โ
-GET /api/v1/demo/sessions/{id} โ 200 OK โ
(multiple times for status polling)
-```
-
-**AI Insights API**:
-```
-POST /api/v1/tenants/{id}/insights โ 201 Created โ
-GET /api/v1/tenants/{id}/insights?priority=high&status=new&limit=5 โ 200 OK โ
-```
-
-**Orchestrator Clone API**:
-```
-POST /internal/demo/clone โ 500 Internal Server Error โ
-```
-
-### No 4xx/5xx Errors (Except Orchestrator Clone)
-- All inter-service communication working correctly
-- No authentication/authorization issues
-- No timeout errors
-- RabbitMQ message publishing successful
-
----
-
-## 7. Data Verification
-
-### Inventory Service - Stock Movements
-**Expected**: 800+ stock movements (if generate script was run)
-**Actual**: 903 records cloned
-**Status**: โ
**LIKELY INCLUDES GENERATED DATA**
-
-This suggests the [generate_ai_insights_data.py](shared/demo/fixtures/professional/generate_ai_insights_data.py) script **WAS run** before cloning!
-
-### Production Service - Batches
-**Expected**: 200+ batches with worker assignments
-**Actual**: 106 batches cloned
-**Status**: โ ๏ธ **May not have full worker data**
-
-If only 106 batches were cloned (instead of ~300), the fixture may not have complete worker assignments.
-
-### Forecasting Service - Forecasts
-**Expected**: Some forecasts
-**Actual**: 0 forecasts cloned
-**Status**: โ ๏ธ **NO FORECAST DATA**
-
-This explains why no demand forecasting insights were generated.
-
----
-
-## 8. Recommendations
-
-### ๐ด HIGH PRIORITY
-
-**1. Fix Orchestrator Import Bug** (CRITICAL)
-```bash
-# File: services/orchestrator/app/api/internal_demo.py
-# Line 16: Add OrchestrationStatus to imports
-
-# Before:
-from app.models.orchestration_run import OrchestrationRun
-
-# After:
-from app.models.orchestration_run import OrchestrationRun, OrchestrationStatus
-```
-
-**Action Required**: Edit file and redeploy orchestrator service
-
----
-
-### ๐ก MEDIUM PRIORITY
-
-**2. Verify AI Insights Data Generation**
-
-Run the data population script to ensure full AI insights support:
-
-```bash
-cd /Users/urtzialfaro/Documents/bakery-ia
-python shared/demo/fixtures/professional/generate_ai_insights_data.py
-```
-
-Expected output:
-- 800+ stock movements added
-- 200+ worker assignments added
-- 5-8 stockout events created
-
-**3. Check Fixture Files**
-
-Verify these files have the generated data:
-```bash
-# Check stock movements count
-cat shared/demo/fixtures/professional/03-inventory.json | jq '.stock_movements | length'
-# Should be 800+
-
-# Check worker assignments
-cat shared/demo/fixtures/professional/06-production.json | jq '[.batches[] | select(.staff_assigned != null)] | length'
-# Should be 200+
-```
-
----
-
-### ๐ข LOW PRIORITY
-
-**4. Fix Demo Cleanup Worker Image**
-
-Build the cleanup worker image:
-```bash
-cd services/demo_session
-docker build -t demo-cleanup-worker:latest .
-```
-
-Or update deployment to use `imagePullPolicy: IfNotPresent`
-
-**5. Add Forecasting Fixture Data**
-
-The forecasting service cloned 0 records. Consider adding forecast data to enable demand forecasting insights.
-
----
-
-## 9. Testing Recommendations
-
-### Test 1: Verify Orchestrator Fix
-```bash
-# After fixing the import bug, test cloning
-kubectl delete pod -n bakery-ia orchestrator-service-6d4c6dc948-v69q5
-
-# Wait for new pod, then create new demo session
-curl -X POST http://localhost:8000/api/demo/sessions \
- -H "Content-Type: application/json" \
- -d '{"demo_account_type":"professional"}'
-
-# Check orchestrator cloning succeeded
-kubectl logs -n bakery-ia demo-session-service-xxx | grep "orchestrator.*completed"
-```
-
-### Test 2: Verify AI Insights with Full Data
-```bash
-# 1. Run generator script
-python shared/demo/fixtures/professional/generate_ai_insights_data.py
-
-# 2. Create new demo session
-# 3. Wait 60 seconds for AI models to run
-# 4. Query AI insights
-
-curl "http://localhost:8000/api/ai-insights/tenants/{tenant_id}/insights" | jq '.total'
-# Expected: 5-10 insights
-```
-
-### Test 3: Check Orchestration History Page
-```
-# After fixing orchestrator bug:
-# Navigate to: http://localhost:3000/app/operations/orchestration
-# Should see 1 orchestration run with:
-# - Status: completed
-# - Production batches: 18
-# - Purchase orders: 6
-# - Duration: ~15 minutes
-```
-
----
-
-## 10. Summary
-
-### โ
What's Working
-1. **Demo session creation** - Fast and reliable
-2. **Service cloning** - 10/11 services successful (91% success rate)
-3. **Data persistence** - 1,133 records cloned successfully
-4. **AI insights service** - Accepting and serving insights
-5. **Alert generation** - 11 alerts created post-clone
-6. **Frontend polling** - Status updates working
-7. **RabbitMQ messaging** - Events publishing correctly
-
-### โ What's Broken
-1. **Orchestrator cloning** - Missing import causes 500 error
-2. **Demo cleanup workers** - Image pull errors (non-critical)
-
-### โ ๏ธ What's Incomplete
-1. **AI insights generation** - Only 1 insight (expected 5-10)
- - Likely missing 90-day stock movement history
- - Missing worker assignments in production batches
-2. **Forecasting data** - No forecasts in fixture (0 records)
-
-### ๐ฏ Priority Actions
-1. **FIX NOW**: Add `OrchestrationStatus` import to orchestrator service
-2. **VERIFY**: Run [generate_ai_insights_data.py](shared/demo/fixtures/professional/generate_ai_insights_data.py)
-3. **TEST**: Create new demo session and verify 5-10 insights generated
-4. **MONITOR**: Check orchestration history page shows data
-
----
-
-## 11. Files Requiring Changes
-
-### services/orchestrator/app/api/internal_demo.py
-```diff
-- from app.models.orchestration_run import OrchestrationRun
-+ from app.models.orchestration_run import OrchestrationRun, OrchestrationStatus
-```
-
-### Verification Commands
-```bash
-# 1. Verify fix applied
-grep "OrchestrationStatus" services/orchestrator/app/api/internal_demo.py
-
-# 2. Rebuild and redeploy orchestrator
-kubectl delete pod -n bakery-ia orchestrator-service-xxx
-
-# 3. Test new demo session
-curl -X POST http://localhost:8000/api/demo/sessions -d '{"demo_account_type":"professional"}'
-
-# 4. Verify all services succeeded
-kubectl logs -n bakery-ia demo-session-service-xxx | grep "status.*completed"
-```
-
----
-
-## Conclusion
-
-The demo session cloning infrastructure is **90% functional** with:
-- โ
Fast parallel cloning (30 seconds total)
-- โ
Robust error handling (partial success handled correctly)
-- โ
AI insights service integration working
-- โ 1 critical bug blocking orchestrator data
-- โ ๏ธ Incomplete AI insights data in fixtures
-
-**Immediate fix required**: Add missing import to orchestrator service
-**Follow-up**: Verify AI insights data generation script was run
-
-**Overall Assessment**: System is production-ready after fixing the orchestrator import bug. The architecture is solid, services communicate correctly, and the cloning process is well-designed. The only blocking issue is a simple missing import statement.
diff --git a/DEMO_SESSION_ANALYSIS_d67eaae4.md b/DEMO_SESSION_ANALYSIS_d67eaae4.md
deleted file mode 100644
index f7d89b1e..00000000
--- a/DEMO_SESSION_ANALYSIS_d67eaae4.md
+++ /dev/null
@@ -1,391 +0,0 @@
-# Demo Session Analysis Report
-**Session ID**: `demo_saL4qn4avR08__PBZSY9sA`
-**Virtual Tenant ID**: `d67eaae4-cfed-4e10-8f51-159962100a27`
-**Created At**: 2025-12-16T10:11:07.942477Z
-**Status**: โ
**SUCCESSFUL**
-**Analysis Date**: 2025-12-16
-
----
-
-## ๐ฏ Executive Summary
-
-**Result**: โ
**Demo session created successfully with all systems operational**
-
-| Metric | Expected | Actual | Status |
-|--------|----------|--------|--------|
-| **Services Cloned** | 11 | 11 | โ
PASS |
-| **Total Records** | ~1150 | 1163 | โ
PASS |
-| **Alerts Generated** | 10-11 | 10 | โ
PASS |
-| **AI Insights** | 1-2 (current) | 1 | โ
PASS |
-| **Cloning Duration** | <10s | 6.06s | โ
EXCELLENT |
-| **Overall Status** | completed | completed | โ
PASS |
-
----
-
-## ๐ Service-by-Service Cloning Analysis
-
-### โ
All Services Cloned Successfully
-
-| Service | Records | Duration (ms) | Status | Notes |
-|---------|---------|---------------|--------|-------|
-| **Inventory** | 903 | 366 | โ
Completed | Largest dataset, excellent performance |
-| **Production** | 106 | 104 | โ
Completed | 88 batches, no duplicate workers |
-| **Tenant** | 9 | 448 | โ
Completed | Complex tenant setup |
-| **Sales** | 44 | 92 | โ
Completed | Sales transactions cloned |
-| **Recipes** | 28 | 92 | โ
Completed | Recipe data loaded |
-| **Forecasting** | 29 | 52 | โ
Completed | **28 forecasts + 1 batch cloned!** |
-| **Procurement** | 28 | 5972 | โ
Completed | **10 POs + 18 items with price trends** |
-| **Suppliers** | 6 | 71 | โ
Completed | Supplier relationships |
-| **Orders** | 9 | 62 | โ
Completed | Order data |
-| **Orchestrator** | 1 | 17 | โ
Completed | **OrchestrationStatus import fix working!** |
-| **Auth** | 0 | 115 | โ
Completed | No auth data for demo |
-
-**Total Records**: 1,163
-**Total Duration**: 6.06 seconds
-**Failed Services**: 0
-
----
-
-## ๐จ Alerts Analysis
-
-### Alert Generation Summary
-
-| Service | Alerts Generated | Status | Details |
-|---------|------------------|--------|---------|
-| **Inventory** | 10 | โ
SUCCESS | Critical stock + urgent expiry alerts |
-| **Production** | 1 | โ
SUCCESS | Batch start delay alert |
-| **Procurement** | 0 | โ ๏ธ EXPECTED | Price trends available, but no critical procurement alerts |
-
-**Total Alerts**: 11 โ
-
-### Inventory Alerts Breakdown (10 alerts)
-
-#### Critical Stock Shortages (7 alerts - URGENT)
-1. **Agua Filtrada**: 0.0 kg current vs 800 kg required (-500 kg shortage)
-2. **Harina de Trigo T55**: 0.0 kg current vs 150 kg required (-100 kg shortage)
-3. **Huevos Frescos**: 134.16 units current vs 300 required (-65.84 shortage)
-4. **Azรบcar Blanco**: 24.98 kg current vs 120 kg required (-55 kg shortage)
-5. **Mantequilla sin Sal**: 8.0 kg current vs 40 kg required (-12 kg shortage)
-6. **Masa Madre Lรญquida**: 0.0 kg current vs 8 kg required (-5 kg shortage)
-7. **Levadura Fresca**: 4.46 kg current vs 10 kg required (-0.54 kg shortage)
-
-#### Urgent Expiry Alerts (3 alerts - HIGH)
-1. **Leche Entera Fresca** (Stock 1): 12.5 L expires in 2 days
-2. **Mantequilla sin Sal**: 8.0 kg expires in 3 days
-3. **Leche Entera Fresca** (Stock 2): 107.26 L expires in 6 days
-
-### Production Alerts (1 alert)
-
-**Batch Start Delayed** (HIGH severity)
-- Batch: `demo_saL-BATCH-LATE-0001`
-- Status: Production batch scheduled start time has passed
-- Published at: 2025-12-16T10:11:26
-- Re-published: 2025-12-16T10:15:02 (persistent alert)
-
----
-
-## ๐ค AI Insights Analysis
-
-### AI Insights Generated: 1 โ
-
-**Category**: Production
-**Type**: Opportunity
-**Priority**: HIGH
-**Confidence**: 80%
-
-**Title**: Yield Pattern Detected: low_yield_worker
-
-**Description**:
-Worker `50000000-0000-0000-0000-000000000005` consistently produces 76.4% yield vs best worker 95.1%
-
-**Impact**: Yield improvement opportunity
-**Source Service**: production
-**Actionable**: Yes
-
-### Expected AI Insights Status
-
-| Service | Expected | Current | Status | Notes |
-|---------|----------|---------|--------|-------|
-| **Inventory** | 2-3 | 0 | โ ๏ธ PENDING | Safety stock optimization (triggered but 0 insights) |
-| **Production** | 1-2 | 1 | โ
GENERATED | Yield improvement insight |
-| **Procurement** | 1-2 | 0 | โ ๏ธ DATA READY | Price trends available, ML triggered but 0 insights |
-| **Forecasting** | 1-2 | 0 | โ ๏ธ NOT TRIGGERED | No demand forecasting insights triggered |
-| **TOTAL** | 6-10 | 1 | โ ๏ธ LOW | Only production insight generated |
-
-### AI Insights Triggers (from demo-session logs)
-
-โ
**Price Forecasting Insights** (Procurement)
-- Triggered: 2025-12-16T10:11:29
-- Duration: 715ms
-- Result: `insights_posted=0`
-- Status: โ ๏ธ ML ran but generated 0 insights
-
-โ
**Safety Stock Optimization** (Inventory)
-- Triggered: 2025-12-16T10:11:31
-- Duration: 9000ms (9 seconds)
-- Result: `insights_posted=0`
-- Status: โ ๏ธ ML ran but generated 0 insights
-
-โ
**Yield Improvement Insights** (Production)
-- Triggered: 2025-12-16T10:11:40
-- Duration: ~1000ms
-- Result: `insights_posted=1` โ
-- Status: โ
SUCCESS - 1 insight generated
-
-โ **Demand Forecasting Insights** (Forecasting)
-- Triggered: NOT TRIGGERED
-- Status: โ ๏ธ No ML orchestrator call for demand forecasting
-
----
-
-## ๐ Procurement Data Verification
-
-### โ
Price Trends Implementation Verified
-
-**Procurement Cloning Log Excerpt**:
-```
-2025-12-16 10:11:08 [info] Starting procurement data cloning from seed files
-2025-12-16 10:11:08 [info] Found pending approval POs for alert emission count=2
-2025-12-16 10:11:08 [info] Procurement data loading completed
- stats={
- 'purchase_orders': 10,
- 'purchase_order_items': 18
- }
-```
-
-**Purchase Order Items with Price Trends** (18 items):
-
-Sample PO Items Verified:
-- **Harina T55**: unit_price: โฌ0.92 (trend: +8%)
-- **Harina T65**: unit_price: โฌ0.98 (trend: +6%)
-- **Mantequilla**: unit_price: โฌ7.17-7.33 (trend: +12%)
-- **Levadura**: unit_price: โฌ4.41 (trend: +4%)
-- **Azรบcar**: unit_price: โฌ1.10 (trend: +2%)
-
-**Structure Verification**:
-- โ
No nested `items` arrays in purchase_orders
-- โ
Separate `purchase_order_items` table used correctly
-- โ
Historical prices calculated based on order dates
-- โ
PO totals recalculated with updated prices
-
-**ML Price Insights Trigger**:
-```
-2025-12-16 10:11:31 [info] ML insights price forecasting requested
- tenant_id=d67eaae4-cfed-4e10-8f51-159962100a27
-2025-12-16 10:11:31 [info] Retrieved all ingredients from inventory service count=25
-2025-12-16 10:11:31 [info] ML insights price forecasting complete
- bulk_opportunities=0
- buy_now_recommendations=0
- total_insights=0
-```
-
-**Status**: โ ๏ธ **Price trend data is correctly stored and available, but ML model did not generate insights**
-
----
-
-## ๐ฎ Forecasting Service Analysis
-
-### โ
Forecasting Cloning SUCCESS!
-
-**Major Fix Verification**: The forecasting service Docker image was rebuilt and the fixes are now deployed!
-
-**Cloning Log**:
-```
-2025-12-16 10:11:08 [info] Starting forecasting data cloning with date adjustment
- base_tenant_id=a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6
- virtual_tenant_id=d67eaae4-cfed-4e10-8f51-159962100a27
- session_time=2025-12-16T10:11:08.036093+00:00
-
-2025-12-16 10:11:08 [info] Forecasting data cloned successfully
- batches_cloned=1
- forecasts_cloned=28
- records_cloned=29
- duration_ms=20
-```
-
-**Forecasts Cloned**:
-- **28 forecasts** for 4 products over 7 days
-- **1 prediction batch** (`20250116-001`)
-- Products: 4 (IDs: 20000000-...0001, 0002, 0003, 0004)
-- Date range: 2025-12-17 to 2025-12-23
-- Location: Main Bakery
-- Algorithm: Prophet (default-fallback-model v1.0)
-
-**Fix Status**:
-- โ
`batch_name` field mapping working
-- โ
UUID conversion working (inventory_product_id)
-- โ
Date parsing working (forecast_date, created_at)
-- โ
No HTTP 500 errors
-- โ
Status: COMPLETED
-
----
-
-## ๐ ๏ธ Orchestrator Service Analysis
-
-### โ
OrchestrationStatus Import Fix Verified
-
-**Cloning Log**:
-```
-2025-12-16 10:11:08 [info] Starting orchestration runs cloning
- base_tenant_id=a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6
- virtual_tenant_id=d67eaae4-cfed-4e10-8f51-159962100a27
-
-2025-12-16 10:11:08 [info] Loaded orchestration run from fixture
- run_number=ORCH-DEMO-PROF-2025-001-12642D1D
- tenant_id=d67eaae4-cfed-4e10-8f51-159962100a27
-
-2025-12-16 10:11:08 [info] Orchestration runs loaded successfully
- duration_ms=4
- runs_created=1
-```
-
-**Fix Status**:
-- โ
No `NameError: name 'OrchestrationStatus' is not defined`
-- โ
Orchestration run created successfully
-- โ
Status transitions working (`completed` status used)
-- โ
HTTP 200 response
-
-**Orchestrator Integration**:
-- โ
Recent actions API working (called 6+ times)
-- โ
Ingredient tracking operational
-- โ
Purchase order action logging functional
-
----
-
-## ๐ Issues and Recommendations
-
-### โ ๏ธ Issue 1: Low AI Insights Generation
-
-**Problem**: Only 1 out of expected 6-10 AI insights generated
-
-**Root Causes**:
-1. **Procurement ML**: Price trend data exists but ML model returned 0 insights
- - Possible reason: Insufficient historical data variance for ML to detect patterns
- - Data: 18 PO items with price trends over 90 days
-
-2. **Inventory ML**: Safety stock optimization triggered but returned 0 insights
- - Duration: 9 seconds (long processing time)
- - Possible reason: Current stock levels may not trigger optimization recommendations
-
-3. **Forecasting ML**: No demand forecasting insights triggered
- - 28 forecasts were cloned successfully
- - Issue: No ML orchestrator call to generate demand forecasting insights
-
-**Recommendations**:
-1. **Add Forecasting Insights Trigger** to demo session post-clone workflow
-2. **Review ML Model Thresholds** for procurement and inventory insights
-3. **Enhance Fixture Data** with more extreme scenarios to trigger ML insights
-4. **Add Logging** to ML insight generation to understand why models return 0 insights
-
-### โ
Issue 2: Forecasting Service - RESOLVED
-
-**Status**: โ
**FIXED**
-**Verification**: Docker image rebuilt, cloning successful
-
-### โ
Issue 3: Orchestrator Import - RESOLVED
-
-**Status**: โ
**FIXED**
-**Verification**: No import errors, orchestration runs cloned successfully
-
-### โ ๏ธ Issue 4: Procurement Alert Emission Error
-
-**Log Excerpt**:
-```
-2025-12-16 10:11:14 [error] Failed to emit PO approval alerts
- error="'RabbitMQClient' object has no attribute 'close'"
- virtual_tenant_id=d67eaae4-cfed-4e10-8f51-159962100a27
-```
-
-**Impact**: Non-critical - cloning succeeded, but PO approval alerts not emitted via RabbitMQ
-
-**Recommendation**: Fix RabbitMQ client cleanup in procurement service
-
----
-
-## ๐ Verification Checklist
-
-| Check | Expected | Actual | Status |
-|-------|----------|--------|--------|
-| Demo session created | 201 response | 201 response | โ
|
-| Virtual tenant ID assigned | UUID | `d67eaae4-cfed-4e10-8f51-159962100a27` | โ
|
-| All services cloned | 11 services | 11 services | โ
|
-| No cloning failures | 0 failures | 0 failures | โ
|
-| Total records cloned | ~1150 | 1163 | โ
|
-| Inventory alerts | 10 | 10 | โ
|
-| Production alerts | 1 | 1 | โ
|
-| Procurement alerts | 0 | 0 | โ
|
-| AI insights | 1-2 | 1 | โ ๏ธ |
-| Forecasting cloned | 28 forecasts | 28 forecasts | โ
|
-| Orchestrator cloned | 1 run | 1 run | โ
|
-| Procurement structure | Correct | Correct | โ
|
-| Price trends | Present | Present | โ
|
-| Session status | ready | ready | โ
|
-| Cloning duration | <10s | 6.06s | โ
|
-
----
-
-## ๐ฏ Conclusion
-
-### โ
Overall Assessment: **SUCCESSFUL**
-
-**Strengths**:
-1. โ
All 11 services cloned successfully without failures
-2. โ
Excellent cloning performance (6.06 seconds for 1,163 records)
-3. โ
Forecasting service Docker image rebuilt and working
-4. โ
Orchestrator import fix deployed and functional
-5. โ
Procurement data structure correct with price trends
-6. โ
10 inventory alerts generated correctly
-7. โ
1 production alert generated correctly
-8. โ
1 production AI insight generated
-
-**Areas for Improvement**:
-1. โ ๏ธ AI insights generation below expected (1 vs 6-10)
-2. โ ๏ธ Procurement ML triggered but returned 0 insights despite price trend data
-3. โ ๏ธ Inventory safety stock ML returned 0 insights after 9s processing
-4. โ ๏ธ Forecasting demand insights not triggered in post-clone workflow
-5. โ ๏ธ RabbitMQ client error in procurement service (non-critical)
-
-### ๐ Key Achievements
-
-1. **All Critical Bugs Fixed**:
- - โ
Orchestrator OrchestrationStatus import
- - โ
Forecasting clone endpoint (batch_name, UUID, dates)
- - โ
Procurement data structure (no nested items)
- - โ
Production duplicate workers removed
-
-2. **Demo Session Ready**:
- - โ
Session status: `ready`
- - โ
Data cloned: `true`
- - โ
Redis populated: `true`
- - โ
No errors in critical paths
-
-3. **Data Quality**:
- - โ
1,163 records across 11 services
- - โ
Realistic alerts (11 total)
- - โ
Price trends for procurement insights
- - โ
Forecasts for demand analysis
-
-### ๐ Performance Metrics
-
-- **Availability**: 100% (all services operational)
-- **Success Rate**: 100% (11/11 services cloned)
-- **Data Completeness**: 100% (1,163/1,163 records)
-- **Alert Generation**: 100% (11/11 expected alerts)
-- **AI Insights**: 16.7% (1/6 minimum expected)
-- **Cloning Speed**: Excellent (6.06s)
-
----
-
-## ๐ Related Documentation
-
-- [COMPLETE_FIX_SUMMARY.md](COMPLETE_FIX_SUMMARY.md) - All fixes completed
-- [FIX_MISSING_INSIGHTS.md](FIX_MISSING_INSIGHTS.md) - Forecasting & procurement fixes
-- [AI_INSIGHTS_DEMO_SETUP_GUIDE.md](AI_INSIGHTS_DEMO_SETUP_GUIDE.md) - Comprehensive setup
-- [fix_procurement_structure.py](shared/demo/fixtures/professional/fix_procurement_structure.py) - Procurement fix script
-
----
-
-**Report Generated**: 2025-12-16T10:16:00Z
-**Analysis Duration**: 5 minutes
-**Services Analyzed**: 11
-**Logs Reviewed**: 2000+ lines
diff --git a/FINAL_STATUS_SUMMARY.md b/FINAL_STATUS_SUMMARY.md
deleted file mode 100644
index c1750247..00000000
--- a/FINAL_STATUS_SUMMARY.md
+++ /dev/null
@@ -1,291 +0,0 @@
-# Final Status Summary - Demo Session & AI Insights
-
-**Date**: 2025-12-16
-**Status**: โ
**ALL ISSUES FIXED - READY FOR PRODUCTION**
-
----
-
-## ๐ฏ Completion Status
-
-| Component | Status | Details |
-|-----------|--------|---------|
-| **Orchestrator Bug** | โ
FIXED | Missing import added |
-| **Demo Session Cloning** | โ
WORKING | 10/11 services successful (91%) |
-| **Inventory Data** | โ
READY | 847 movements, 10 stockouts |
-| **Production Data** | โ
READY | 75 batches with workers, duplicates removed |
-| **Procurement Data** | โ
ENHANCED | 32 PO items with price trends |
-| **Forecasting Data** | โ ๏ธ NEEDS VERIFICATION | 28 forecasts in fixture, 0 cloned (investigate) |
-| **AI Insights** | โ
READY | 3-6 insights (will be 6-10 after forecasting fix) |
-
----
-
-## โ
Issues Fixed
-
-### 1. Orchestrator Import Bug (CRITICAL) โ
-**File**: [services/orchestrator/app/api/internal_demo.py](services/orchestrator/app/api/internal_demo.py#L16)
-
-**Fix Applied**:
-```python
-# Line 16
-from app.models.orchestration_run import OrchestrationRun, OrchestrationStatus
-```
-
-**Status**: โ
Fixed and deployed
-
----
-
-### 2. Production Duplicate Workers โ
-**Issue**: Workers were duplicated from running generator script multiple times
-
-**Fix Applied**: Removed 56 duplicate worker assignments
-
-**Verification**:
-```
-Total batches: 88
-With workers: 75 (all COMPLETED batches)
-```
-
-**Status**: โ
Fixed
-
----
-
-### 3. Procurement Data Enhancement โ
-**Issue**: No purchase order items = no price insights
-
-**Fix Applied**: Added 32 PO items across 10 purchase orders with price trends:
-- โ Mantequilla: +12% (highest increase)
-- โ Harina T55: +8%
-- โ Harina T65: +6%
-- โ Leche: -3% (seasonal decrease)
-
-**Status**: โ
Enhanced and ready
-
----
-
-## โ ๏ธ Remaining Issue
-
-### Forecasting Clone (0 forecasts cloned)
-**Status**: โ ๏ธ NEEDS INVESTIGATION
-
-**Current State**:
-- โ
Fixture file exists: `10-forecasting.json` with 28 forecasts
-- โ
Clone endpoint exists and coded correctly
-- โ Demo session shows "0 forecasts cloned"
-
-**Possible Causes**:
-1. Idempotency check triggered (unlikely for new virtual tenant)
-2. Database commit issue
-3. Field mapping mismatch
-4. Silent error in clone process
-
-**Recommended Actions**:
-1. Check forecasting DB directly:
- ```bash
- kubectl exec -it -n bakery-ia forecasting-db-xxxx -- psql -U postgres -d forecasting \
- -c "SELECT tenant_id, COUNT(*) FROM forecasts GROUP BY tenant_id;"
- ```
-
-2. Check forecasting service logs for errors during clone
-
-3. If DB is empty, manually create test forecasts or debug clone endpoint
-
-**Impact**: Without forecasts:
-- Missing 1-2 demand forecasting insights
-- Total insights: 3-6 instead of 6-10
-- Core functionality still works
-
----
-
-## ๐ Current AI Insights Capability
-
-### Data Status
-
-| Data Source | Records | Quality | AI Model Ready? |
-|-------------|---------|---------|-----------------|
-| **Stock Movements** | 847 | โ
Excellent | โ
YES |
-| **Stockout Events** | 10 | โ
Good | โ
YES |
-| **Worker Assignments** | 75 | โ
Good | โ
YES |
-| **Production Batches** | 75 (with yield) | โ
Good | โ
YES |
-| **PO Items** | 32 (with prices) | โ
Excellent | โ
YES |
-| **Price Trends** | 6 ingredients | โ
Excellent | โ
YES |
-| **Forecasts** | 0 cloned | โ ๏ธ Issue | โ NO |
-
-### Expected Insights (Current State)
-
-| Service | Insights | Confidence | Status |
-|---------|----------|------------|--------|
-| **Inventory** | 2-3 | High | โ
READY |
-| **Production** | 1-2 | High | โ
READY |
-| **Procurement** | 1-2 | High | โ
READY |
-| **Forecasting** | 0 | N/A | โ ๏ธ BLOCKED |
-| **TOTAL** | **4-7** | - | โ
**GOOD** |
-
-### Expected Insights (After Forecasting Fix)
-
-| Service | Insights | Status |
-|---------|----------|--------|
-| **Inventory** | 2-3 | โ
|
-| **Production** | 1-2 | โ
|
-| **Procurement** | 1-2 | โ
|
-| **Forecasting** | 1-2 | ๐ง After fix |
-| **TOTAL** | **6-10** | ๐ฏ **TARGET** |
-
----
-
-## ๐ Next Steps
-
-### Immediate (Now)
-1. โ
Orchestrator redeployed
-2. โ
Production data cleaned
-3. โ
Procurement data enhanced
-4. ๐ Test new demo session with current data
-
-### Short Term (Next Session)
-1. ๐ Investigate forecasting clone issue
-2. ๐ง Fix forecasting data persistence
-3. โ
Verify 6-10 insights generated
-4. ๐ Test all insight categories
-
-### Testing Plan
-```bash
-# 1. Create demo session
-curl -X POST http://localhost:8000/api/demo/sessions \
- -H "Content-Type: application/json" \
- -d '{"demo_account_type":"professional"}' | jq
-
-# Save virtual_tenant_id from response
-
-# 2. Monitor cloning (in separate terminal)
-kubectl logs -n bakery-ia -f $(kubectl get pods -n bakery-ia | grep demo-session | awk '{print $1}') \
- | grep -E "orchestrator.*completed|AI insights.*completed"
-
-# 3. Wait 60 seconds after "ready" status
-
-# 4. Check AI insights
-curl "http://localhost:8000/api/ai-insights/tenants/{virtual_tenant_id}/insights" | jq
-
-# 5. Verify insight categories
-curl "http://localhost:8000/api/ai-insights/tenants/{virtual_tenant_id}/insights/metrics/summary" | jq
-```
-
----
-
-## ๐ Files Modified
-
-| File | Change | Status |
-|------|--------|--------|
-| `services/orchestrator/app/api/internal_demo.py` | Added OrchestrationStatus import | โ
Committed |
-| `shared/demo/fixtures/professional/06-production.json` | Removed duplicate workers | โ
Committed |
-| `shared/demo/fixtures/professional/07-procurement.json` | Added 32 PO items with prices | โ
Committed |
-
----
-
-## ๐ Documentation Created
-
-1. **[DEMO_SESSION_ANALYSIS_REPORT.md](DEMO_SESSION_ANALYSIS_REPORT.md)** - Complete log analysis
-2. **[FIX_MISSING_INSIGHTS.md](FIX_MISSING_INSIGHTS.md)** - Forecasting & procurement fix guide
-3. **[AI_INSIGHTS_DEMO_SETUP_GUIDE.md](AI_INSIGHTS_DEMO_SETUP_GUIDE.md)** - Comprehensive setup guide
-4. **[AI_INSIGHTS_DATA_FLOW.md](AI_INSIGHTS_DATA_FLOW.md)** - Architecture diagrams
-5. **[AI_INSIGHTS_QUICK_START.md](AI_INSIGHTS_QUICK_START.md)** - Quick reference
-6. **[verify_fixes.sh](verify_fixes.sh)** - Automated verification script
-7. **[enhance_procurement_data.py](shared/demo/fixtures/professional/enhance_procurement_data.py)** - Data enhancement script
-
----
-
-## ๐ Success Metrics
-
-### What's Working Perfectly
-โ
Demo session creation (< 30 seconds)
-โ
Parallel service cloning (1,133 records)
-โ
Orchestrator service (bug fixed)
-โ
AI Insights service (accepting and serving insights)
-โ
Alert generation (11 alerts post-clone)
-โ
Inventory insights (safety stock optimization)
-โ
Production insights (yield predictions)
-โ
Procurement insights (price trends) - **NEW!**
-
-### Production Readiness
-- โ
**90%+ success rate** on service cloning
-- โ
**Robust error handling** (partial success handled correctly)
-- โ
**Fast performance** (30-second clone time)
-- โ
**Data quality** (realistic, well-structured fixtures)
-- โ
**AI model integration** (3+ services generating insights)
-
-### Outstanding Items
-- โ ๏ธ Forecasting clone issue (non-blocking, investigate next)
-- โน๏ธ Demo cleanup worker image (warning only, cron job works)
-
----
-
-## ๐ก Recommendations
-
-### For Next Demo Session
-1. **Create session and verify orchestrator cloning succeeds** (should see 1 record cloned)
-2. **Check total insights** (expect 4-7 with current data)
-3. **Verify procurement insights** (should see price trend alerts for Mantequilla +12%)
-4. **Test insight actions** (Apply/Dismiss buttons)
-
-### For Forecasting Fix
-1. Enable debug logging in forecasting service
-2. Create test demo session
-3. Monitor forecasting-service logs during clone
-4. If DB empty, use manual script to insert test forecasts
-5. Or debug why idempotency check might be triggering
-
-### For Production Deployment
-1. โ
Current state is production-ready for **inventory, production, procurement insights**
-2. โ ๏ธ Forecasting insights can be enabled later (non-blocking)
-3. โ
All critical bugs fixed
-4. โ
Documentation complete
-5. ๐ฏ System delivers **4-7 high-quality AI insights per demo session**
-
----
-
-## ๐ง Quick Commands Reference
-
-```bash
-# Verify all fixes applied
-./verify_fixes.sh
-
-# Create demo session
-curl -X POST http://localhost:8000/api/demo/sessions \
- -d '{"demo_account_type":"professional"}' | jq
-
-# Check insights count
-curl "http://localhost:8000/api/ai-insights/tenants/{tenant_id}/insights" | jq '.total'
-
-# View insights by category
-curl "http://localhost:8000/api/ai-insights/tenants/{tenant_id}/insights?category=inventory" | jq
-curl "http://localhost:8000/api/ai-insights/tenants/{tenant_id}/insights?category=production" | jq
-curl "http://localhost:8000/api/ai-insights/tenants/{tenant_id}/insights?category=procurement" | jq
-
-# Check orchestrator cloned successfully
-kubectl logs -n bakery-ia $(kubectl get pods -n bakery-ia | grep demo-session | awk '{print $1}') \
- | grep "orchestrator.*completed"
-
-# Monitor AI insights generation
-kubectl logs -n bakery-ia $(kubectl get pods -n bakery-ia | grep demo-session | awk '{print $1}') \
- | grep "AI insights.*completed"
-```
-
----
-
-## โจ Conclusion
-
-**System Status**: โ
**PRODUCTION READY**
-
-**Achievements**:
-- ๐ Fixed 1 critical bug (orchestrator import)
-- ๐งน Cleaned 56 duplicate worker assignments
-- โจ Enhanced procurement data with price trends
-- ๐ Enabled 4-7 AI insights per demo session
-- ๐ Created comprehensive documentation
-- โ
90%+ service cloning success rate
-
-**Remaining Work**:
-- ๐ Investigate forecasting clone issue (optional, non-blocking)
-- ๐ฏ Target: 6-10 insights (currently 4-7)
-
-**Bottom Line**: The demo session infrastructure is solid, AI insights are working for 3 out of 4 services, and the only remaining issue (forecasting) is non-critical and can be debugged separately. The system is **ready for testing and demonstration** with current capabilities.
-
-๐ **Ready to create a demo session and see the AI insights in action!**
diff --git a/FIX_MISSING_INSIGHTS.md b/FIX_MISSING_INSIGHTS.md
deleted file mode 100644
index 36ea7bf8..00000000
--- a/FIX_MISSING_INSIGHTS.md
+++ /dev/null
@@ -1,403 +0,0 @@
-# Fix Missing AI Insights - Forecasting & Procurement
-
-## Current Status
-
-| Insight Type | Current | Target | Status |
-|--------------|---------|--------|--------|
-| Inventory | 2-3 | 2-3 | โ
READY |
-| Production | 1-2 | 2-3 | โ
READY |
-| **Forecasting** | **0** | **1-2** | โ **BROKEN** |
-| **Procurement** | **0-1** | **1-2** | โ ๏ธ **LIMITED DATA** |
-
----
-
-## Issue #1: Forecasting Insights (0 forecasts cloned)
-
-### Root Cause
-The forecasting service returned "0 records cloned" even though [10-forecasting.json](shared/demo/fixtures/professional/10-forecasting.json) contains **28 forecasts**.
-
-### Investigation Findings
-
-1. **Fixture file exists** โ
- 28 forecasts present
-2. **Clone endpoint exists** โ
- [services/forecasting/app/api/internal_demo.py](services/forecasting/app/api/internal_demo.py)
-3. **Data structure correct** โ
- Has all required fields
-
-### Possible Causes
-
-**A. Idempotency Check Triggered**
-```python
-# Line 181-195 in internal_demo.py
-existing_check = await db.execute(
- select(Forecast).where(Forecast.tenant_id == virtual_uuid).limit(1)
-)
-existing_forecast = existing_check.scalar_one_or_none()
-
-if existing_forecast:
- logger.warning(
- "Demo data already exists, skipping clone",
- virtual_tenant_id=str(virtual_uuid)
- )
- return {
- "status": "skipped",
- "reason": "Data already exists",
- "records_cloned": 0
- }
-```
-
-**Solution**: The virtual tenant is new, so this shouldn't trigger. But need to verify.
-
-**B. Database Commit Issue**
-The code might insert forecasts but not commit them properly.
-
-**C. Field Mapping Issue**
-The forecast model might expect different fields than what's in the JSON.
-
-### Verification Commands
-
-```bash
-# 1. Check if forecasts were actually inserted for the virtual tenant
-kubectl exec -it -n bakery-ia forecasting-db-xxxx -- psql -U postgres -d forecasting -c \
- "SELECT COUNT(*) FROM forecasts WHERE tenant_id = '740b96c4-d242-47d7-8a6e-a0a8b5c51d5e';"
-
-# 2. Check forecasting service logs for errors
-kubectl logs -n bakery-ia forecasting-service-xxxx | grep -E "ERROR|error|failed|Failed" | tail -20
-
-# 3. Test clone endpoint directly
-curl -X POST http://forecasting-service:8000/internal/demo/clone \
- -H "X-Internal-API-Key: $INTERNAL_API_KEY" \
- -H "Content-Type: application/json" \
- -d '{
- "base_tenant_id": "a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6",
- "virtual_tenant_id": "test-uuid",
- "demo_account_type": "professional",
- "session_created_at": "'$(date -u +%Y-%m-%dT%H:%M:%SZ)'"
- }'
-```
-
-### Quick Fix (If DB Empty)
-
-Create forecasts manually for testing:
-
-```python
-# Script: create_test_forecasts.py
-import asyncio
-from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession
-from sqlalchemy.orm import sessionmaker
-from datetime import datetime, timezone, timedelta
-import uuid
-
-async def create_test_forecasts():
- engine = create_async_engine("postgresql+asyncpg://user:pass@host/forecasting")
- async_session = sessionmaker(engine, class_=AsyncSession, expire_on_commit=False)
-
- async with async_session() as session:
- # Get Forecast model
- from services.forecasting.app.models.forecasts import Forecast
-
- virtual_tenant_id = uuid.UUID("740b96c4-d242-47d7-8a6e-a0a8b5c51d5e")
-
- # Create 7 days of forecasts for 4 products
- products = [
- "20000000-0000-0000-0000-000000000001",
- "20000000-0000-0000-0000-000000000002",
- "20000000-0000-0000-0000-000000000003",
- "20000000-0000-0000-0000-000000000004",
- ]
-
- for day in range(7):
- for product_id in products:
- forecast = Forecast(
- id=uuid.uuid4(),
- tenant_id=virtual_tenant_id,
- inventory_product_id=uuid.UUID(product_id),
- forecast_date=datetime.now(timezone.utc) + timedelta(days=day),
- predicted_demand=20.0 + (day * 2.5),
- confidence=85.0 + (day % 5),
- model_version="hybrid_v1",
- forecast_type="daily",
- created_at=datetime.now(timezone.utc)
- )
- session.add(forecast)
-
- await session.commit()
- print("โ Created 28 test forecasts")
-
-if __name__ == "__main__":
- asyncio.run(create_test_forecasts())
-```
-
----
-
-## Issue #2: Procurement Insights (Limited Data)
-
-### Root Cause
-The procurement ML models need **purchase order items with unit prices** to detect price trends, but the fixture file [07-procurement.json](shared/demo/fixtures/professional/07-procurement.json) only has:
-- Purchase order headers (10 POs)
-- No `items` arrays with individual ingredient prices
-
-### What Procurement Insights Need
-
-**Price Forecaster**: Requires PO items showing price history over time:
-```json
-{
- "purchase_orders": [
- {
- "id": "po-uuid-1",
- "order_date": "BASE_TS - 60d",
- "items": [
- {
- "ingredient_id": "10000000-0000-0000-0000-000000000001",
- "ingredient_name": "Harina de Trigo T55",
- "ordered_quantity": 500.0,
- "unit_price": 0.85, // โ Price 60 days ago
- "total_price": 425.0
- }
- ]
- },
- {
- "id": "po-uuid-2",
- "order_date": "BASE_TS - 30d",
- "items": [
- {
- "ingredient_id": "10000000-0000-0000-0000-000000000001",
- "ingredient_name": "Harina de Trigo T55",
- "ordered_quantity": 500.0,
- "unit_price": 0.88, // โ Price increased!
- "total_price": 440.0
- }
- ]
- },
- {
- "id": "po-uuid-3",
- "order_date": "BASE_TS - 1d",
- "items": [
- {
- "ingredient_id": "10000000-0000-0000-0000-000000000001",
- "ingredient_name": "Harina de Trigo T55",
- "ordered_quantity": 500.0,
- "unit_price": 0.92, // โ 8% increase over 60 days!
- "total_price": 460.0
- }
- ]
- }
- ]
-}
-```
-
-**Supplier Performance Analyzer**: Needs delivery tracking (already present in fixture):
-```json
-{
- "delivery_delayed": true,
- "delay_hours": 4
-}
-```
-
-### Solution: Enhance 07-procurement.json
-
-Add `items` arrays to existing purchase orders with price trends:
-
-```python
-# Script: enhance_procurement_data.py
-import json
-import random
-from datetime import datetime, timedelta
-
-# Price trend data (8% increase over 90 days for some ingredients)
-INGREDIENTS_WITH_TRENDS = [
- {
- "id": "10000000-0000-0000-0000-000000000001",
- "name": "Harina de Trigo T55",
- "base_price": 0.85,
- "trend": 0.08, # 8% increase
- "variability": 0.02
- },
- {
- "id": "10000000-0000-0000-0000-000000000011",
- "name": "Mantequilla sin Sal",
- "base_price": 6.50,
- "trend": 0.12, # 12% increase
- "variability": 0.05
- },
- {
- "id": "10000000-0000-0000-0000-000000000012",
- "name": "Leche Entera Fresca",
- "base_price": 0.95,
- "trend": -0.03, # 3% decrease (seasonal)
- "variability": 0.02
- }
-]
-
-def calculate_price(ingredient, days_ago):
- """Calculate price based on trend"""
- trend_factor = 1 + (ingredient["trend"] * (90 - days_ago) / 90)
- variability = random.uniform(-ingredient["variability"], ingredient["variability"])
- return round(ingredient["base_price"] * trend_factor * (1 + variability), 2)
-
-def add_items_to_pos():
- with open('shared/demo/fixtures/professional/07-procurement.json') as f:
- data = json.load(f)
-
- for po in data['purchase_orders']:
- # Extract days ago from order_date
- order_date_str = po.get('order_date', 'BASE_TS - 1d')
- if 'BASE_TS' in order_date_str:
- # Parse "BASE_TS - 1d" to get days
- if '- ' in order_date_str:
- days_str = order_date_str.split('- ')[1].replace('d', '').strip()
- try:
- days_ago = int(days_str.split('d')[0])
- except:
- days_ago = 1
- else:
- days_ago = 0
- else:
- days_ago = 30 # Default
-
- # Add 2-3 items per PO
- items = []
- for ingredient in random.sample(INGREDIENTS_WITH_TRENDS, k=random.randint(2, 3)):
- unit_price = calculate_price(ingredient, days_ago)
- quantity = random.randint(200, 500)
-
- items.append({
- "ingredient_id": ingredient["id"],
- "ingredient_name": ingredient["name"],
- "ordered_quantity": float(quantity),
- "unit_price": unit_price,
- "total_price": round(quantity * unit_price, 2),
- "received_quantity": None,
- "status": "pending"
- })
-
- po['items'] = items
-
- # Save back
- with open('shared/demo/fixtures/professional/07-procurement.json', 'w') as f:
- json.dump(data, f, indent=2, ensure_ascii=False)
-
- print(f"โ Added items to {len(data['purchase_orders'])} purchase orders")
-
-if __name__ == "__main__":
- add_items_to_pos()
-```
-
-**Run it**:
-```bash
-python enhance_procurement_data.py
-```
-
-**Expected Result**:
-- 10 POs now have `items` arrays
-- Each PO has 2-3 items
-- Prices show trends over time
-- Procurement insights should generate:
- - "Mantequilla price up 12% in 90 days - consider bulk purchase"
- - "Harina T55 trending up 8% - lock in current supplier contract"
-
----
-
-## Summary of Actions
-
-### 1. Forecasting Fix (IMMEDIATE)
-```bash
-# Verify forecasts in database
-kubectl get pods -n bakery-ia | grep forecasting-db
-kubectl exec -it -n bakery-ia forecasting-db-xxxx -- psql -U postgres -d forecasting
-
-# In psql:
-SELECT tenant_id, COUNT(*) FROM forecasts GROUP BY tenant_id;
-
-# If virtual tenant has 0 forecasts:
-# - Check forecasting service logs for errors
-# - Manually trigger clone endpoint
-# - Or use the create_test_forecasts.py script above
-```
-
-### 2. Procurement Enhancement (15 minutes)
-```bash
-# Run the enhancement script
-python enhance_procurement_data.py
-
-# Verify
-cat shared/demo/fixtures/professional/07-procurement.json | jq '.purchase_orders[0].items'
-
-# Should see items array with prices
-```
-
-### 3. Create New Demo Session
-```bash
-# After fixes, create fresh demo session
-curl -X POST http://localhost:8000/api/demo/sessions \
- -H "Content-Type: application/json" \
- -d '{"demo_account_type":"professional"}' | jq
-
-# Wait 60 seconds for AI models to run
-
-# Check insights (should now have 5-8 total)
-curl "http://localhost:8000/api/ai-insights/tenants/{virtual_tenant_id}/insights" | jq '.total'
-```
-
----
-
-## Expected Results After Fixes
-
-| Service | Insights Before | Insights After | Status |
-|---------|----------------|----------------|--------|
-| Inventory | 2-3 | 2-3 | โ
No change |
-| Production | 1-2 | 1-2 | โ
No change |
-| **Forecasting** | **0** | **1-2** | โ
**FIXED** |
-| **Procurement** | **0** | **1-2** | โ
**FIXED** |
-| **TOTAL** | **3-6** | **6-10** | โ
**TARGET MET** |
-
-### Sample Insights After Fix
-
-**Forecasting**:
-- "Demand trending up 15% for Croissants - recommend increasing production by 12 units next week"
-- "Weekend sales pattern detected - reduce Saturday production by 40% to minimize waste"
-
-**Procurement**:
-- "Price alert: Mantequilla up 12% in 90 days - consider bulk purchase to lock in rates"
-- "Cost optimization: Harina T55 price trending up 8% - negotiate long-term contract with Harinas del Norte"
-- "Supplier performance: 3/10 deliveries delayed from Harinas del Norte - consider backup supplier"
-
----
-
-## Files to Modify
-
-1. **shared/demo/fixtures/professional/07-procurement.json** - Add `items` arrays
-2. **(Optional) services/forecasting/app/api/internal_demo.py** - Debug why 0 forecasts cloned
-
----
-
-## Testing Checklist
-
-- [ ] Run `enhance_procurement_data.py`
-- [ ] Verify PO items added: `jq '.purchase_orders[0].items' 07-procurement.json`
-- [ ] Check forecasting DB: `SELECT COUNT(*) FROM forecasts WHERE tenant_id = '{virtual_id}'`
-- [ ] Create new demo session
-- [ ] Wait 60 seconds
-- [ ] Query AI insights: Should see 6-10 total
-- [ ] Verify categories: inventory (2-3), production (1-2), forecasting (1-2), procurement (1-2)
-- [ ] Check insight quality: Prices, trends, recommendations present
-
----
-
-## Troubleshooting
-
-**If forecasts still 0 after demo session**:
-1. Check forecasting service logs: `kubectl logs -n bakery-ia forecasting-service-xxx | grep clone`
-2. Look for errors in clone endpoint
-3. Verify fixture file path is correct
-4. Manually insert test forecasts using script above
-
-**If procurement insights still 0**:
-1. Verify PO items exist: `jq '.purchase_orders[].items | length' 07-procurement.json`
-2. Check if price trends are significant enough (>5% change)
-3. Look for procurement service logs: `kubectl logs -n bakery-ia procurement-service-xxx | grep -i price`
-
-**If insights not showing in frontend**:
-1. Check API returns data: `curl http://localhost:8000/api/ai-insights/tenants/{id}/insights`
-2. Verify tenant_id matches between frontend and API
-3. Check browser console for errors
-4. Verify AI insights service is running
-
diff --git a/GIT_COMMIT_SUMMARY.md b/GIT_COMMIT_SUMMARY.md
deleted file mode 100644
index 8f01cc64..00000000
--- a/GIT_COMMIT_SUMMARY.md
+++ /dev/null
@@ -1,297 +0,0 @@
-# Git Commit Summary - AI Insights Implementation
-
-**Date**: 2025-12-16
-**Branch**: main
-**Total Commits**: 6 commits ahead of origin/main
-
----
-
-## ๐ Commit Overview
-
-```
-c68d82c - Fix critical bugs and standardize service integrations
-9f3b39b - Add comprehensive documentation and final improvements
-4418ff0 - Add forecasting demand insights trigger + fix RabbitMQ cleanup
-b461d62 - Add comprehensive demo session analysis report
-dd79e6d - Fix procurement data structure and add price trends
-35ae23b - Fix forecasting clone endpoint for demo sessions
-```
-
----
-
-## ๐ Detailed Commit Breakdown
-
-### Commit 1: `35ae23b` - Fix forecasting clone endpoint for demo sessions
-
-**Date**: Earlier session
-**Files Changed**: 1 file
-**Focus**: Forecasting service clone endpoint
-
-**Changes**:
-- Fixed `batch_name` field mapping in forecasting clone endpoint
-- Added UUID type conversion for `product_id` โ `inventory_product_id`
-- Implemented date parsing for BASE_TS markers
-
-**Impact**: Forecasting data can now be cloned successfully in demo sessions
-
----
-
-### Commit 2: `dd79e6d` - Fix procurement data structure and add price trends
-
-**Date**: Earlier session
-**Files Changed**: 1 file ([shared/demo/fixtures/professional/07-procurement.json](shared/demo/fixtures/professional/07-procurement.json))
-**Focus**: Procurement fixture data structure
-
-**Changes**:
-1. Removed 32 nested `items` arrays from purchase_orders (wrong structure)
-2. Updated 10 existing PO items with realistic price trends
-3. Recalculated PO totals based on updated item prices
-
-**Price Trends**:
-- Harina T55: +8% (โฌ0.85 โ โฌ0.92)
-- Harina T65: +6% (โฌ0.95 โ โฌ1.01)
-- Mantequilla: +12% (โฌ6.50 โ โฌ7.28)
-- Leche: -3% (โฌ0.95 โ โฌ0.92)
-- Levadura: +4% (โฌ4.20 โ โฌ4.37)
-- Azรบcar: +2% (โฌ1.10 โ โฌ1.12)
-
-**Impact**: Correct data structure enables procurement AI insights with price analysis
-
----
-
-### Commit 3: `b461d62` - Add comprehensive demo session analysis report
-
-**Date**: Current session
-**Files Changed**: 1 file
-**Focus**: Documentation
-
-**Changes**:
-- Added [DEMO_SESSION_ANALYSIS_REPORT.md](DEMO_SESSION_ANALYSIS_REPORT.md)
-- Complete log analysis of demo session d67eaae4-cfed-4e10-8f51-159962100a27
-- Identified root cause of missing AI insights
-
-**Key Findings**:
-- All 11 services cloned successfully (1,163 records)
-- 11 alerts generated correctly
-- Only 1 AI insight generated (expected 6-10)
-- Forecasting demand insights not triggered at all
-
-**Impact**: Clear documentation of issues to fix
-
----
-
-### Commit 4: `4418ff0` - Add forecasting demand insights trigger + fix RabbitMQ cleanup
-
-**Date**: Current session
-**Files Changed**: 5 files, 255 lines added
-**Focus**: Forecasting ML insights + RabbitMQ bug fix
-
-**Changes**:
-
-#### 1. Forecasting Internal ML Endpoint
-- File: [services/forecasting/app/api/ml_insights.py](services/forecasting/app/api/ml_insights.py)
-- Lines: 772-938 (169 lines)
-- Added internal_router with `/api/v1/tenants/{tenant_id}/forecasting/internal/ml/generate-demand-insights`
-- Endpoint runs DemandInsightsOrchestrator for tenant
-
-#### 2. Forecasting Service Router Registration
-- File: [services/forecasting/app/main.py:196](services/forecasting/app/main.py#L196)
-- Added: `service.add_router(ml_insights.internal_router)`
-
-#### 3. Forecast Client Trigger Method
-- File: [shared/clients/forecast_client.py](shared/clients/forecast_client.py)
-- Lines: 344-389 (46 lines)
-- Added: `trigger_demand_insights_internal()` method
-- Uses X-Internal-Service header for authentication
-
-#### 4. Demo Session Workflow Integration
-- File: [services/demo_session/app/services/clone_orchestrator.py](services/demo_session/app/services/clone_orchestrator.py)
-- Lines: 1031-1047 (19 lines)
-- Added 4th insight trigger after yield insights
-- Calls forecasting client to generate demand insights
-
-#### 5. RabbitMQ Cleanup Fix
-- File: [services/procurement/app/api/internal_demo.py:173-197](services/procurement/app/api/internal_demo.py#L173-L197)
-- Fixed: `rabbitmq_client.close()` โ `rabbitmq_client.disconnect()`
-- Added cleanup in exception handler
-
-**Impact**:
-- Demand forecasting insights now generated
-- No more RabbitMQ errors
-- AI insights count increases from 1 to 2-3 per session
-
----
-
-### Commit 5: `9f3b39b` - Add comprehensive documentation and final improvements
-
-**Date**: Current session
-**Files Changed**: 14 files, 3982 insertions(+), 60 deletions(-)
-**Focus**: Documentation + Redis standardization + fixture cleanup
-
-**Documentation Added**:
-1. [AI_INSIGHTS_DEMO_SETUP_GUIDE.md](AI_INSIGHTS_DEMO_SETUP_GUIDE.md) - Complete setup guide
-2. [AI_INSIGHTS_DATA_FLOW.md](AI_INSIGHTS_DATA_FLOW.md) - Architecture diagrams
-3. [AI_INSIGHTS_QUICK_START.md](AI_INSIGHTS_QUICK_START.md) - Quick reference
-4. [COMPLETE_FIX_SUMMARY.md](COMPLETE_FIX_SUMMARY.md) - Executive summary
-5. [FIX_MISSING_INSIGHTS.md](FIX_MISSING_INSIGHTS.md) - Fix guide
-6. [FINAL_STATUS_SUMMARY.md](FINAL_STATUS_SUMMARY.md) - Status overview
-7. [ROOT_CAUSE_ANALYSIS_AND_FIXES.md](ROOT_CAUSE_ANALYSIS_AND_FIXES.md) - Complete analysis
-8. [verify_fixes.sh](verify_fixes.sh) - Automated verification script
-9. [enhance_procurement_data.py](shared/demo/fixtures/professional/enhance_procurement_data.py) - Enhancement script
-
-**Service Improvements**:
-
-#### 1. Demo Session Cleanup Worker
-- File: [services/demo_session/app/jobs/cleanup_worker.py](services/demo_session/app/jobs/cleanup_worker.py)
-- Changed: Use `Settings().REDIS_URL` with proper DB and max_connections
-- Added: Proper configuration import
-
-#### 2. Procurement Service Redis
-- File: [services/procurement/app/main.py](services/procurement/app/main.py)
-- Added: Redis initialization with error handling
-- Added: Redis cleanup in shutdown handler
-- Stored: redis_client in app.state
-
-#### 3. Production Fixture Cleanup
-- File: [shared/demo/fixtures/professional/06-production.json](shared/demo/fixtures/professional/06-production.json)
-- Removed: 56 duplicate worker assignments
-- Result: All batches have unique workers only
-
-#### 4. Orchestrator Fixture Enhancement
-- File: [shared/demo/fixtures/professional/11-orchestrator.json](shared/demo/fixtures/professional/11-orchestrator.json)
-- Added: run_metadata with purchase order details
-- Added: Item details for better tracking
-
-**Impact**:
-- Complete documentation for troubleshooting
-- Secure Redis connections with TLS/auth
-- Clean fixture data without duplicates
-
----
-
-### Commit 6: `c68d82c` - Fix critical bugs and standardize service integrations
-
-**Date**: Current session
-**Files Changed**: 9 files, 48 insertions(+), 319 deletions(-)
-**Focus**: Critical bug fixes + standardization
-
-**Critical Fixes**:
-
-#### 1. Orchestrator Missing Import (CRITICAL)
-- File: [services/orchestrator/app/api/internal_demo.py:16](services/orchestrator/app/api/internal_demo.py#L16)
-- Fixed: Added `OrchestrationStatus` to imports
-- Impact: Demo session cloning no longer returns HTTP 500
-
-#### 2. Procurement Cache Migration
-- Files:
- - [services/procurement/app/api/purchase_orders.py](services/procurement/app/api/purchase_orders.py)
- - [services/procurement/app/services/purchase_order_service.py](services/procurement/app/services/purchase_order_service.py)
-- Changed: `app.utils.cache` โ `shared.redis_utils`
-- Deleted: [services/procurement/app/utils/cache.py](services/procurement/app/utils/cache.py) (custom cache)
-- Impact: Consistent caching across all services
-
-#### 3. Suppliers Redis Configuration
-- File: [services/suppliers/app/consumers/alert_event_consumer.py](services/suppliers/app/consumers/alert_event_consumer.py)
-- Changed: `os.getenv('REDIS_URL')` โ `Settings().REDIS_URL`
-- Impact: Secure Redis connection with TLS/auth
-
-#### 4. Recipes Client Endpoint Fix
-- File: [shared/clients/recipes_client.py](shared/clients/recipes_client.py)
-- Fixed: `recipes/recipes/{id}` โ `recipes/{id}`
-- Applied to: get_recipe_by_id, get_recipes_by_product_ids, get_production_instructions, get_recipe_yield_info
-- Impact: Correct endpoint paths
-
-#### 5. Suppliers Client Endpoint Fix
-- File: [shared/clients/suppliers_client.py](shared/clients/suppliers_client.py)
-- Fixed: `suppliers/suppliers/{id}` โ `suppliers/{id}`
-- Impact: Correct endpoint path
-
-#### 6. Procurement Client Service Boundary
-- File: [shared/clients/procurement_client.py](shared/clients/procurement_client.py)
-- Fixed: get_supplier_by_id now uses SuppliersServiceClient directly
-- Removed: Incorrect call to procurement service for supplier data
-- Impact: Proper service boundaries
-
-**Impact**:
-- Demo sessions work without errors
-- Standardized service integrations
-- Clean endpoint paths
-- Proper service boundaries
-
----
-
-## ๐ Statistics
-
-### Total Changes
-- **Files Modified**: 23 files
-- **Lines Added**: ~4,300 lines
-- **Lines Removed**: ~380 lines
-- **Net Change**: +3,920 lines
-
-### By Category
-| Category | Files | Lines Added | Lines Removed |
-|----------|-------|-------------|---------------|
-| Documentation | 9 | ~3,800 | 0 |
-| Service Code | 8 | ~350 | ~320 |
-| Client Libraries | 3 | ~50 | ~20 |
-| Fixture Data | 3 | ~100 | ~40 |
-
-### Services Improved
-1. **forecasting-service**: New internal ML endpoint + router
-2. **demo-session-service**: Forecasting trigger + Redis config
-3. **procurement-service**: Redis migration + RabbitMQ fix
-4. **orchestrator-service**: Missing import fix
-5. **suppliers-service**: Redis configuration
-
-### Bugs Fixed
-- โ
Forecasting demand insights not triggered (CRITICAL)
-- โ
RabbitMQ cleanup error (CRITICAL)
-- โ
Orchestrator missing import (CRITICAL)
-- โ
Procurement custom cache inconsistency
-- โ
Client endpoint path duplicates
-- โ
Redis configuration hardcoding
-- โ
Production fixture duplicates
-- โ
Procurement data structure mismatch
-
----
-
-## ๐ Next Steps
-
-### 1. Push to Remote
-```bash
-git push origin main
-```
-
-### 2. Rebuild Docker Images
-```bash
-# Wait for Tilt auto-rebuild or force rebuild
-# Services: forecasting, demo-session, procurement, orchestrator
-```
-
-### 3. Test Demo Session
-```bash
-# Create demo session
-curl -X POST http://localhost:8001/api/v1/demo/sessions \
- -H "Content-Type: application/json" \
- -d '{"demo_account_type":"professional"}'
-
-# Wait 60s and check AI insights count (expected: 2-3)
-```
-
----
-
-## ๐ Verification Checklist
-
-- [x] All changes committed
-- [x] Working tree clean
-- [x] Documentation complete
-- [x] Verification script created
-- [ ] Push to remote
-- [ ] Docker images rebuilt
-- [ ] Demo session tested
-- [ ] AI insights verified (2-3 per session)
-- [ ] No errors in logs
-
----
-
-**Status**: โ
**All commits ready for push. Awaiting Docker image rebuild for testing.**
diff --git a/ROOT_CAUSE_ANALYSIS_AND_FIXES.md b/ROOT_CAUSE_ANALYSIS_AND_FIXES.md
deleted file mode 100644
index b9a30ae2..00000000
--- a/ROOT_CAUSE_ANALYSIS_AND_FIXES.md
+++ /dev/null
@@ -1,597 +0,0 @@
-# Root Cause Analysis & Complete Fixes
-
-**Date**: 2025-12-16
-**Session**: Demo Session Deep Dive Investigation
-**Status**: โ
**ALL ISSUES RESOLVED**
-
----
-
-## ๐ฏ Executive Summary
-
-Investigated low AI insights generation (1 vs expected 6-10) and found **5 root causes**, all of which have been **fixed and deployed**.
-
-| Issue | Root Cause | Fix Status | Impact |
-|-------|------------|------------|--------|
-| **Missing Forecasting Insights** | No internal ML endpoint + not triggered | โ
FIXED | +1-2 insights per session |
-| **RabbitMQ Cleanup Error** | Wrong method name (close โ disconnect) | โ
FIXED | No more errors in logs |
-| **Procurement 0 Insights** | ML model needs historical variance data | โ ๏ธ DATA ISSUE | Need more varied price data |
-| **Inventory 0 Insights** | ML model thresholds too strict | โ ๏ธ TUNING NEEDED | Review safety stock algorithm |
-| **Forecasting Date Structure** | Fixed in previous session | โ
DEPLOYED | Forecasting works perfectly |
-
----
-
-## ๐ Issue 1: Forecasting Demand Insights Not Triggered
-
-### ๐ Root Cause
-
-The demo session workflow was **not calling** the forecasting service to generate demand insights after cloning completed.
-
-**Evidence from logs**:
-```
-2025-12-16 10:11:29 [info] Triggering price forecasting insights
-2025-12-16 10:11:31 [info] Triggering safety stock optimization insights
-2025-12-16 10:11:40 [info] Triggering yield improvement insights
-# โ NO forecasting demand insights trigger!
-```
-
-**Analysis**:
-- Demo session workflow triggered 3 AI insight types
-- Forecasting service had ML capabilities but no internal endpoint
-- No client method to call forecasting insights
-- Result: 0 demand forecasting insights despite 28 cloned forecasts
-
-### โ
Fix Applied
-
-**Created 3 new components**:
-
-#### 1. Internal ML Endpoint in Forecasting Service
-
-**File**: [services/forecasting/app/api/ml_insights.py:779-938](services/forecasting/app/api/ml_insights.py#L779-L938)
-
-```python
-@internal_router.post("/api/v1/tenants/{tenant_id}/forecasting/internal/ml/generate-demand-insights")
-async def trigger_demand_insights_internal(
- tenant_id: str,
- request: Request,
- db: AsyncSession = Depends(get_db)
-):
- """
- Internal endpoint to trigger demand forecasting insights.
- Called by demo-session service after cloning.
- """
- # Get products from inventory (limit 10)
- all_products = await inventory_client.get_all_ingredients(tenant_id=tenant_id)
- products = all_products[:10]
-
- # Fetch 90 days of sales data for each product
- for product in products:
- sales_data = await sales_client.get_product_sales(
- tenant_id=tenant_id,
- product_id=product_id,
- start_date=end_date - timedelta(days=90),
- end_date=end_date
- )
-
- # Run demand insights orchestrator
- insights = await orchestrator.analyze_and_generate_insights(
- tenant_id=tenant_id,
- product_id=product_id,
- sales_data=sales_df,
- lookback_days=90
- )
-
- return {
- "success": True,
- "insights_posted": total_insights_posted
- }
-```
-
-Registered in [services/forecasting/app/main.py:196](services/forecasting/app/main.py#L196):
-```python
-service.add_router(ml_insights.internal_router) # Internal ML insights endpoint
-```
-
-#### 2. Forecasting Client Trigger Method
-
-**File**: [shared/clients/forecast_client.py:344-389](shared/clients/forecast_client.py#L344-L389)
-
-```python
-async def trigger_demand_insights_internal(
- self,
- tenant_id: str
-) -> Optional[Dict[str, Any]]:
- """
- Trigger demand forecasting insights (internal service use only).
- Used by demo-session service after cloning.
- """
- result = await self._make_request(
- method="POST",
- endpoint=f"forecasting/internal/ml/generate-demand-insights",
- tenant_id=tenant_id,
- headers={"X-Internal-Service": "demo-session"}
- )
- return result
-```
-
-#### 3. Demo Session Workflow Integration
-
-**File**: [services/demo_session/app/services/clone_orchestrator.py:1031-1047](services/demo_session/app/services/clone_orchestrator.py#L1031-L1047)
-
-```python
-# 4. Trigger demand forecasting insights
-try:
- logger.info("Triggering demand forecasting insights", tenant_id=virtual_tenant_id)
- result = await forecasting_client.trigger_demand_insights_internal(virtual_tenant_id)
- if result:
- results["demand_insights"] = result
- total_insights += result.get("insights_posted", 0)
- logger.info(
- "Demand insights generated",
- tenant_id=virtual_tenant_id,
- insights_posted=result.get("insights_posted", 0)
- )
-except Exception as e:
- logger.error("Failed to trigger demand insights", error=str(e))
-```
-
-### ๐ Expected Impact
-
-- **Before**: 0 demand forecasting insights
-- **After**: 1-2 demand forecasting insights per session (depends on sales data variance)
-- **Total AI Insights**: Increase from 1 to 2-3 per session
-
-**Note**: Actual insights generated depends on:
-- Sales data availability (need 10+ records per product)
-- Data variance (ML needs patterns to detect)
-- Demo fixture has 44 sales records (good baseline)
-
----
-
-## ๐ Issue 2: RabbitMQ Client Cleanup Error
-
-### ๐ Root Cause
-
-Procurement service demo cloning called `rabbitmq_client.close()` but the RabbitMQClient class only has a `disconnect()` method.
-
-**Error from logs**:
-```
-2025-12-16 10:11:14 [error] Failed to emit PO approval alerts
- error="'RabbitMQClient' object has no attribute 'close'"
- virtual_tenant_id=d67eaae4-cfed-4e10-8f51-159962100a27
-```
-
-**Analysis**:
-- Code location: [services/procurement/app/api/internal_demo.py:174](services/procurement/app/api/internal_demo.py#L174)
-- Impact: Non-critical (cloning succeeded, but PO approval alerts not emitted)
-- Frequency: Every demo session with pending approval POs
-
-### โ
Fix Applied
-
-**File**: [services/procurement/app/api/internal_demo.py:173-197](services/procurement/app/api/internal_demo.py#L173-L197)
-
-```python
-# Close RabbitMQ connection
-await rabbitmq_client.disconnect() # โ
Fixed: was .close()
-
-logger.info(
- "PO approval alerts emission completed",
- alerts_emitted=alerts_emitted
-)
-
-return alerts_emitted
-
-except Exception as e:
- logger.error("Failed to emit PO approval alerts", error=str(e))
- # Don't fail the cloning process - ensure we try to disconnect if connected
- try:
- if 'rabbitmq_client' in locals():
- await rabbitmq_client.disconnect()
- except:
- pass # Suppress cleanup errors
- return alerts_emitted
-```
-
-**Changes**:
-1. Fixed method name: `close()` โ `disconnect()`
-2. Added cleanup in exception handler to prevent connection leaks
-3. Suppressed cleanup errors to avoid cascading failures
-
-### ๐ Expected Impact
-
-- **Before**: RabbitMQ error in every demo session
-- **After**: Clean shutdown, PO approval alerts emitted successfully
-- **Side Effect**: 2 additional PO approval alerts per demo session
-
----
-
-## ๐ Issue 3: Procurement Price Insights Returning 0
-
-### ๐ Root Cause
-
-Procurement ML model **ran successfully** but generated 0 insights because the price trend data doesn't have enough **historical variance** for ML pattern detection.
-
-**Evidence from logs**:
-```
-2025-12-16 10:11:31 [info] ML insights price forecasting requested
-2025-12-16 10:11:31 [info] Retrieved all ingredients from inventory service count=25
-2025-12-16 10:11:31 [info] ML insights price forecasting complete
- bulk_opportunities=0
- buy_now_recommendations=0
- total_insights=0
-```
-
-**Analysis**:
-
-1. **Price Trends ARE Present**:
- - 18 PO items with historical prices
- - 6 ingredients tracked over 90 days
- - Price trends range from -3% to +12%
-
-2. **ML Model Ran Successfully**:
- - Retrieved 25 ingredients
- - Processing time: 715ms (normal)
- - No errors or exceptions
-
-3. **Why 0 Insights?**
-
- The procurement ML model looks for specific patterns:
-
- **Bulk Purchase Opportunities**:
- - Detects when buying in bulk now saves money later
- - Requires: upcoming price increase + current low stock
- - **Missing**: Current demo data shows prices already increased
- - Example: Mantequilla at โฌ7.28 (already +12% from base)
-
- **Buy Now Recommendations**:
- - Detects when prices are about to spike
- - Requires: accelerating price trend + lead time window
- - **Missing**: Linear trends, not accelerating patterns
- - Example: Harina T55 steady +8% over 90 days
-
-4. **Data Structure is Correct**:
- - โ
No nested items in purchase_orders
- - โ
Separate purchase_order_items table used
- - โ
Historical prices calculated based on order dates
- - โ
PO totals recalculated correctly
-
-### โ ๏ธ Recommendation (Not Implemented)
-
-To generate procurement insights in demo, we need **more extreme scenarios**:
-
-**Option 1: Add Accelerating Price Trends** (Future Enhancement)
-```python
-# Current: Linear trend (+8% over 90 days)
-# Needed: Accelerating trend (+2% โ +5% โ +12%)
-PRICE_TRENDS = {
- "Harina T55": {
- "day_0-30": +2%, # Slow increase
- "day_30-60": +5%, # Accelerating
- "day_60-90": +12% # Sharp spike โ Triggers buy_now
- }
-}
-```
-
-**Option 2: Add Upcoming Bulk Discount** (Future Enhancement)
-```python
-# Add supplier promotion metadata
-{
- "supplier_id": "40000000-0000-0000-0000-000000000001",
- "bulk_discount": {
- "ingredient_id": "Harina T55",
- "min_quantity": 1000,
- "discount_percentage": 15%,
- "valid_until": "BASE_TS + 7d"
- }
-}
-```
-
-**Option 3: Lower ML Model Thresholds** (Quick Fix)
-```python
-# Current thresholds in procurement ML:
-BULK_OPPORTUNITY_THRESHOLD = 0.10 # 10% savings required
-BUY_NOW_PRICE_SPIKE_THRESHOLD = 0.08 # 8% spike required
-
-# Reduce to:
-BULK_OPPORTUNITY_THRESHOLD = 0.05 # 5% savings โ More sensitive
-BUY_NOW_PRICE_SPIKE_THRESHOLD = 0.04 # 4% spike โ More sensitive
-```
-
-### ๐ Current Status
-
-- **Data Quality**: โ
Excellent (18 items, 6 ingredients, realistic prices)
-- **ML Execution**: โ
Working (no errors, 715ms processing)
-- **Insights Generated**: โ 0 (ML thresholds not met by current data)
-- **Fix Priority**: ๐ก LOW (nice-to-have, not blocking demo)
-
----
-
-## ๐ Issue 4: Inventory Safety Stock Returning 0 Insights
-
-### ๐ Root Cause
-
-Inventory ML model **ran successfully** but generated 0 insights after 9 seconds of processing.
-
-**Evidence from logs**:
-```
-2025-12-16 10:11:31 [info] Triggering safety stock optimization insights
-# ... 9 seconds processing ...
-2025-12-16 10:11:40 [info] Safety stock insights generated insights_posted=0
-```
-
-**Analysis**:
-
-1. **ML Model Ran Successfully**:
- - Processing time: 9000ms (9 seconds)
- - No errors or exceptions
- - Returned 0 insights
-
-2. **Possible Reasons**:
-
- **Hypothesis A: Current Stock Levels Don't Trigger Optimization**
- - Safety stock ML looks for:
- - Stockouts due to wrong safety stock levels
- - High variability in demand not reflected in safety stock
- - Seasonal patterns requiring dynamic safety stock
- - Current demo has 10 critical stock shortages (good for alerts)
- - But these may not trigger safety stock **optimization** insights
-
- **Hypothesis B: Insufficient Historical Data**
- - Safety stock ML needs historical consumption patterns
- - Demo has 847 stock movements (good volume)
- - But may need more time-series data for ML pattern detection
-
- **Hypothesis C: ML Model Thresholds Too Strict**
- - Similar to procurement issue
- - Model may require extreme scenarios to generate insights
- - Current stockouts may be within "expected variance"
-
-### โ ๏ธ Recommendation (Needs Investigation)
-
-**Short-term** (Not Implemented):
-1. Add debug logging to inventory safety stock ML orchestrator
-2. Check what thresholds the model uses
-3. Verify if historical data format is correct
-
-**Medium-term** (Future Enhancement):
-1. Enhance demo fixture with more extreme safety stock scenarios
-2. Add products with high demand variability
-3. Create seasonal patterns in stock movements
-
-### ๐ Current Status
-
-- **Data Quality**: โ
Excellent (847 movements, 10 stockouts)
-- **ML Execution**: โ
Working (9s processing, no errors)
-- **Insights Generated**: โ 0 (model thresholds not met)
-- **Fix Priority**: ๐ก MEDIUM (investigate model thresholds)
-
----
-
-## ๐ Issue 5: Forecasting Clone Endpoint (RESOLVED)
-
-### ๐ Root Cause (From Previous Session)
-
-Forecasting service internal_demo endpoint had 3 bugs:
-1. Missing `batch_name` field mapping
-2. UUID type mismatch for `inventory_product_id`
-3. Date fields not parsed (BASE_TS markers passed as strings)
-
-**Error**:
-```
-HTTP 500: Internal Server Error
-NameError: field 'batch_name' required
-```
-
-### โ
Fix Applied (Previous Session)
-
-**File**: [services/forecasting/app/api/internal_demo.py:322-348](services/forecasting/app/api/internal_demo.py#L322-L348)
-
-```python
-# 1. Field mappings
-batch_name = batch_data.get('batch_name') or batch_data.get('batch_id') or f"Batch-{transformed_id}"
-total_products = batch_data.get('total_products') or batch_data.get('total_forecasts') or 0
-
-# 2. UUID conversion
-if isinstance(inventory_product_id_str, str):
- inventory_product_id = uuid.UUID(inventory_product_id_str)
-
-# 3. Date parsing
-requested_at_raw = batch_data.get('requested_at') or batch_data.get('created_at')
-requested_at = parse_date_field(requested_at_raw, session_time, 'requested_at') if requested_at_raw else session_time
-```
-
-### ๐ Verification
-
-**From demo session logs**:
-```
-2025-12-16 10:11:08 [info] Forecasting data cloned successfully
- batches_cloned=1
- forecasts_cloned=28
- records_cloned=29
- duration_ms=20
-```
-
-**Status**: โ
**WORKING PERFECTLY**
-- 28 forecasts cloned successfully
-- 1 prediction batch cloned
-- No HTTP 500 errors
-- Docker image was rebuilt automatically
-
----
-
-## ๐ฏ Summary of All Fixes
-
-### โ
Completed Fixes
-
-| # | Issue | Fix | Files Modified | Commit |
-|---|-------|-----|----------------|--------|
-| **1** | Forecasting demand insights not triggered | Created internal endpoint + client + workflow trigger | 4 files | `4418ff0` |
-| **2** | RabbitMQ cleanup error | Changed `.close()` to `.disconnect()` | 1 file | `4418ff0` |
-| **3** | Forecasting clone endpoint | Fixed field mapping + UUID + dates | 1 file | `35ae23b` (previous) |
-| **4** | Orchestrator import error | Added `OrchestrationStatus` import | 1 file | `c566967` (previous) |
-| **5** | Procurement data structure | Removed nested items + added price trends | 2 files | `dd79e6d` (previous) |
-| **6** | Production duplicate workers | Removed 56 duplicate assignments | 1 file | Manual edit |
-
-### โ ๏ธ Known Limitations (Not Blocking)
-
-| # | Issue | Why 0 Insights | Priority | Recommendation |
-|---|-------|----------------|----------|----------------|
-| **7** | Procurement price insights = 0 | Linear price trends don't meet ML thresholds | ๐ก LOW | Add accelerating trends or lower thresholds |
-| **8** | Inventory safety stock = 0 | Stock scenarios within expected variance | ๐ก MEDIUM | Investigate ML model + add extreme scenarios |
-
----
-
-## ๐ Expected Demo Session Results
-
-### Before All Fixes
-
-| Metric | Value | Issues |
-|--------|-------|--------|
-| Services Cloned | 10/11 | โ Forecasting HTTP 500 |
-| Total Records | ~1000 | โ Orchestrator clone failed |
-| Alerts Generated | 10 | โ ๏ธ RabbitMQ errors in logs |
-| AI Insights | 0-1 | โ Only production insights |
-
-### After All Fixes
-
-| Metric | Value | Status |
-|--------|-------|--------|
-| Services Cloned | 11/11 | โ
All working |
-| Total Records | 1,163 | โ
Complete dataset |
-| Alerts Generated | 11 | โ
Clean execution |
-| AI Insights | **2-3** | โ
Production + Demand (+ possibly more) |
-
-**AI Insights Breakdown**:
-- โ
**Production Yield**: 1 insight (low yield worker detected)
-- โ
**Demand Forecasting**: 0-1 insights (depends on sales data variance)
-- โ ๏ธ **Procurement Price**: 0 insights (ML thresholds not met by linear trends)
-- โ ๏ธ **Inventory Safety Stock**: 0 insights (scenarios within expected variance)
-
-**Total**: **1-2 insights per session** (realistic expectation)
-
----
-
-## ๐ง Technical Details
-
-### Files Modified in This Session
-
-1. **services/forecasting/app/api/ml_insights.py**
- - Added `internal_router` for demo session service
- - Created `trigger_demand_insights_internal` endpoint
- - Lines added: 169
-
-2. **services/forecasting/app/main.py**
- - Registered `ml_insights.internal_router`
- - Lines modified: 1
-
-3. **shared/clients/forecast_client.py**
- - Added `trigger_demand_insights_internal()` method
- - Lines added: 46
-
-4. **services/demo_session/app/services/clone_orchestrator.py**
- - Added forecasting insights trigger to post-clone workflow
- - Imported ForecastServiceClient
- - Lines added: 19
-
-5. **services/procurement/app/api/internal_demo.py**
- - Fixed: `rabbitmq_client.close()` โ `rabbitmq_client.disconnect()`
- - Added cleanup in exception handler
- - Lines modified: 10
-
-### Git Commits
-
-```bash
-# This session
-4418ff0 - Add forecasting demand insights trigger + fix RabbitMQ cleanup
-
-# Previous sessions
-b461d62 - Add comprehensive demo session analysis report
-dd79e6d - Fix procurement data structure and add price trends
-35ae23b - Fix forecasting clone endpoint (batch_name, UUID, dates)
-c566967 - Add AI insights feature (includes OrchestrationStatus import fix)
-```
-
----
-
-## ๐ Lessons Learned
-
-### 1. Always Check Method Names
-- RabbitMQClient uses `.disconnect()` not `.close()`
-- Could have been caught with IDE autocomplete or type hints
-- Added cleanup in exception handler to prevent leaks
-
-### 2. ML Insights Need Extreme Scenarios
-- Linear trends don't trigger "buy now" recommendations
-- Need accelerating patterns or upcoming events
-- Demo fixtures should include edge cases, not just realistic data
-
-### 3. Logging is Critical for ML Debugging
-- Hard to debug "0 insights" without detailed logs
-- Need to log:
- - What patterns ML is looking for
- - What thresholds weren't met
- - What data was analyzed
-
-### 4. Demo Workflows Need All Triggers
-- Easy to forget to add new ML insights to post-clone workflow
-- Consider: Auto-discover ML endpoints instead of manual list
-- Or: Centralized ML insights orchestrator service
-
----
-
-## ๐ Next Steps (Optional Enhancements)
-
-### Priority 1: Add ML Insight Logging
-- Log why procurement ML returns 0 insights
-- Log why inventory ML returns 0 insights
-- Add threshold values to logs
-
-### Priority 2: Enhance Demo Fixtures
-- Add accelerating price trends for procurement insights
-- Add high-variability products for inventory insights
-- Create seasonal patterns in demand data
-
-### Priority 3: Review ML Model Thresholds
-- Check if thresholds are too strict
-- Consider "demo mode" with lower thresholds
-- Or add "sensitivity" parameter to ML orchestrators
-
-### Priority 4: Integration Testing
-- Test new demo session after all fixes deployed
-- Verify 2-3 AI insights generated
-- Confirm no RabbitMQ errors in logs
-- Check forecasting insights appear in AI insights table
-
----
-
-## โ
Conclusion
-
-**All critical bugs fixed**:
-1. โ
Forecasting demand insights now triggered in demo workflow
-2. โ
RabbitMQ cleanup error resolved
-3. โ
Forecasting clone endpoint working (from previous session)
-4. โ
Orchestrator import working (from previous session)
-5. โ
Procurement data structure correct (from previous session)
-
-**Known limitations** (not blocking):
-- Procurement/Inventory ML return 0 insights due to data patterns not meeting thresholds
-- This is expected behavior, not a bug
-- Can be enhanced with better demo fixtures or lower thresholds
-
-**Expected demo session results**:
-- 11/11 services cloned successfully
-- 1,163 records cloned
-- 11 alerts generated
-- **2-3 AI insights** (production + demand)
-
-**Deployment**:
-- All fixes committed and ready for Docker rebuild
-- Need to restart forecasting-service for new endpoint
-- Need to restart demo-session-service for new workflow
-- Need to restart procurement-service for RabbitMQ fix
-
----
-
-**Report Generated**: 2025-12-16
-**Total Issues Found**: 8
-**Total Issues Fixed**: 6
-**Known Limitations**: 2 (ML model thresholds)