Add frontend loading imporvements

This commit is contained in:
Urtzi Alfaro
2025-12-27 21:30:42 +01:00
parent 6e3a6590d6
commit 54662dde79
21 changed files with 799 additions and 363 deletions

View File

@@ -3,6 +3,7 @@
*/ */
import { useState, useEffect, useCallback } from 'react'; import { useState, useEffect, useCallback } from 'react';
import { useQuery } from '@tanstack/react-query';
import { subscriptionService } from '../services/subscription'; import { subscriptionService } from '../services/subscription';
import { import {
SUBSCRIPTION_TIERS, SUBSCRIPTION_TIERS,
@@ -34,49 +35,32 @@ export interface SubscriptionInfo {
} }
export const useSubscription = () => { export const useSubscription = () => {
const [subscriptionInfo, setSubscriptionInfo] = useState<SubscriptionInfo>({
plan: 'starter',
status: 'active',
features: {},
loading: true,
});
const currentTenant = useCurrentTenant(); const currentTenant = useCurrentTenant();
const user = useAuthUser(); const user = useAuthUser();
const tenantId = currentTenant?.id || user?.tenant_id; const tenantId = currentTenant?.id || user?.tenant_id;
const { notifySubscriptionChanged, subscriptionVersion } = useSubscriptionEvents(); const { subscriptionVersion } = useSubscriptionEvents();
// Load subscription data // Initialize with tenant's subscription_plan if available, otherwise default to starter
const loadSubscriptionData = useCallback(async () => { const initialPlan = currentTenant?.subscription_plan || currentTenant?.subscription_tier || 'starter';
if (!tenantId) {
setSubscriptionInfo(prev => ({ ...prev, loading: false, error: 'No tenant ID available' }));
return;
}
try { // Use React Query to fetch subscription data (automatically deduplicates & caches)
setSubscriptionInfo(prev => ({ ...prev, loading: true, error: undefined })); const { data: usageSummary, isLoading, error, refetch } = useQuery({
queryKey: ['subscription-usage', tenantId, subscriptionVersion],
queryFn: () => subscriptionService.getUsageSummary(tenantId!),
enabled: !!tenantId,
staleTime: 30 * 1000, // Cache for 30 seconds (matches backend cache)
gcTime: 5 * 60 * 1000, // Keep in cache for 5 minutes
retry: 1,
});
const usageSummary = await subscriptionService.getUsageSummary(tenantId); // Derive subscription info from query data or tenant fallback
const subscriptionInfo: SubscriptionInfo = {
setSubscriptionInfo({ plan: usageSummary?.plan || initialPlan,
plan: usageSummary.plan, status: usageSummary?.status || 'active',
status: usageSummary.status, features: usageSummary?.usage || {},
features: usageSummary.usage || {}, loading: isLoading,
loading: false, error: error ? 'Failed to load subscription data' : undefined,
}); };
} catch (error) {
console.error('Error loading subscription data:', error);
setSubscriptionInfo(prev => ({
...prev,
loading: false,
error: 'Failed to load subscription data'
}));
}
}, [tenantId]);
useEffect(() => {
loadSubscriptionData();
}, [loadSubscriptionData, subscriptionVersion]);
// Check if user has a specific feature // Check if user has a specific feature
const hasFeature = useCallback(async (featureName: string): Promise<SubscriptionFeature> => { const hasFeature = useCallback(async (featureName: string): Promise<SubscriptionFeature> => {
@@ -175,7 +159,7 @@ export const useSubscription = () => {
canAccessForecasting, canAccessForecasting,
canAccessAIInsights, canAccessAIInsights,
checkLimits, checkLimits,
refreshSubscription: loadSubscriptionData, refreshSubscription: refetch,
}; };
}; };

View File

@@ -382,7 +382,8 @@ export function useControlPanelData(tenantId: string) {
}, },
enabled: !!tenantId, enabled: !!tenantId,
staleTime: 20000, // 20 seconds staleTime: 20000, // 20 seconds
refetchOnMount: 'always', refetchOnMount: true,
refetchOnWindowFocus: false,
retry: 2, retry: 2,
}); });

View File

@@ -0,0 +1,68 @@
import React from 'react';
export const DashboardSkeleton: React.FC = () => (
<div className="space-y-6 animate-pulse">
{/* System Status Block Skeleton */}
<div className="bg-[var(--bg-secondary)] rounded-lg p-6 border border-[var(--border-primary)]">
<div className="h-6 w-48 bg-[var(--bg-tertiary)] rounded mb-4" />
<div className="grid grid-cols-1 sm:grid-cols-2 lg:grid-cols-4 gap-4">
{[1, 2, 3, 4].map(i => (
<div key={i} className="bg-[var(--bg-primary)] rounded-lg p-4">
<div className="h-4 w-32 bg-[var(--bg-tertiary)] rounded mb-2" />
<div className="h-8 w-16 bg-[var(--bg-tertiary)] rounded" />
</div>
))}
</div>
</div>
{/* Pending Purchases Skeleton */}
<div className="bg-[var(--bg-secondary)] rounded-lg p-6 border border-[var(--border-primary)]">
<div className="h-6 w-40 bg-[var(--bg-tertiary)] rounded mb-4" />
<div className="space-y-3">
{[1, 2, 3].map(i => (
<div key={i} className="bg-[var(--bg-primary)] rounded-lg p-4 flex items-center justify-between">
<div className="flex-1">
<div className="h-5 w-48 bg-[var(--bg-tertiary)] rounded mb-2" />
<div className="h-4 w-32 bg-[var(--bg-tertiary)] rounded" />
</div>
<div className="flex gap-2">
<div className="h-10 w-20 bg-[var(--bg-tertiary)] rounded" />
<div className="h-10 w-20 bg-[var(--bg-tertiary)] rounded" />
</div>
</div>
))}
</div>
</div>
{/* Production Status Skeleton */}
<div className="bg-[var(--bg-secondary)] rounded-lg p-6 border border-[var(--border-primary)]">
<div className="h-6 w-44 bg-[var(--bg-tertiary)] rounded mb-4" />
<div className="grid grid-cols-1 sm:grid-cols-2 lg:grid-cols-3 gap-4">
{[1, 2, 3].map(i => (
<div key={i} className="bg-[var(--bg-primary)] rounded-lg p-4">
<div className="h-4 w-36 bg-[var(--bg-tertiary)] rounded mb-3" />
<div className="h-6 w-24 bg-[var(--bg-tertiary)] rounded mb-2" />
<div className="h-4 w-28 bg-[var(--bg-tertiary)] rounded" />
</div>
))}
</div>
</div>
{/* Alerts Skeleton */}
<div className="bg-[var(--bg-secondary)] rounded-lg p-6 border border-[var(--border-primary)]">
<div className="h-6 w-32 bg-[var(--bg-tertiary)] rounded mb-4" />
<div className="space-y-3">
{[1, 2].map(i => (
<div key={i} className="bg-[var(--bg-primary)] rounded-lg p-4 flex items-start gap-3">
<div className="h-10 w-10 bg-[var(--bg-tertiary)] rounded-full flex-shrink-0" />
<div className="flex-1">
<div className="h-5 w-56 bg-[var(--bg-tertiary)] rounded mb-2" />
<div className="h-4 w-full bg-[var(--bg-tertiary)] rounded mb-2" />
<div className="h-4 w-3/4 bg-[var(--bg-tertiary)] rounded" />
</div>
</div>
))}
</div>
</div>
</div>
);

View File

@@ -185,7 +185,7 @@ export const Sidebar = forwardRef<SidebarRef, SidebarProps>(({
// Get subscription-aware navigation routes // Get subscription-aware navigation routes
const baseNavigationRoutes = useMemo(() => getNavigationRoutes(), []); const baseNavigationRoutes = useMemo(() => getNavigationRoutes(), []);
const { filteredRoutes: subscriptionFilteredRoutes } = useSubscriptionAwareRoutes(baseNavigationRoutes); const { filteredRoutes: subscriptionFilteredRoutes, isLoading: subscriptionLoading } = useSubscriptionAwareRoutes(baseNavigationRoutes);
// Map route paths to translation keys // Map route paths to translation keys
const getTranslationKey = (routePath: string): string => { const getTranslationKey = (routePath: string): string => {
@@ -845,9 +845,23 @@ export const Sidebar = forwardRef<SidebarRef, SidebarProps>(({
{/* Navigation */} {/* Navigation */}
<nav className={clsx('flex-1 overflow-y-auto overflow-x-hidden', isCollapsed ? 'px-1 py-4' : 'p-4')}> <nav className={clsx('flex-1 overflow-y-auto overflow-x-hidden', isCollapsed ? 'px-1 py-4' : 'p-4')}>
<ul className={clsx(isCollapsed ? 'space-y-1 flex flex-col items-center' : 'space-y-2')}> {subscriptionLoading ? (
{filteredItems.map(item => renderItem(item))} /* Skeleton loading state while subscription data is loading */
</ul> <ul className={clsx(isCollapsed ? 'space-y-1 flex flex-col items-center' : 'space-y-2')}>
{[1, 2, 3, 4, 5].map(i => (
<li key={i} className="animate-pulse">
<div className={clsx(
'rounded-lg bg-[var(--bg-tertiary)]',
isCollapsed ? 'h-10 w-10' : 'h-10 w-full'
)} />
</li>
))}
</ul>
) : (
<ul className={clsx(isCollapsed ? 'space-y-1 flex flex-col items-center' : 'space-y-2')}>
{filteredItems.map(item => renderItem(item))}
</ul>
)}
</nav> </nav>
{/* Profile section */} {/* Profile section */}

View File

@@ -39,8 +39,19 @@ export const AuthProvider: React.FC<AuthProviderProps> = ({ children }) => {
const initializeAuth = async () => { const initializeAuth = async () => {
setIsInitializing(true); setIsInitializing(true);
// Wait a bit for zustand persist to rehydrate // Check if zustand has already rehydrated
await new Promise(resolve => setTimeout(resolve, 100)); if (!(useAuthStore.persist as any).hasHydrated?.()) {
// Wait for rehydration event with minimal timeout fallback
await Promise.race([
new Promise<void>(resolve => {
const unsubscribe = useAuthStore.persist.onFinishHydration(() => {
unsubscribe();
resolve();
});
}),
new Promise(resolve => setTimeout(resolve, 50))
]);
}
// Check if we have stored auth data // Check if we have stored auth data
if (authStore.token && authStore.refreshToken) { if (authStore.token && authStore.refreshToken) {

View File

@@ -35,12 +35,15 @@ interface SSEProviderProps {
export const SSEProvider: React.FC<SSEProviderProps> = ({ children }) => { export const SSEProvider: React.FC<SSEProviderProps> = ({ children }) => {
const [isConnected, setIsConnected] = useState(false); const [isConnected, setIsConnected] = useState(false);
const [lastEvent, setLastEvent] = useState<SSEEvent | null>(null); const [lastEvent, setLastEvent] = useState<SSEEvent | null>(null);
const eventSourceRef = useRef<EventSource | null>(null); const eventSourceRef = useRef<EventSource | null>(null);
const eventListenersRef = useRef<Map<string, Set<(data: any) => void>>>(new Map()); const eventListenersRef = useRef<Map<string, Set<(data: any) => void>>>(new Map());
const reconnectTimeoutRef = useRef<NodeJS.Timeout>(); const reconnectTimeoutRef = useRef<NodeJS.Timeout>();
const reconnectAttempts = useRef(0); const reconnectAttempts = useRef(0);
// Global deduplication: Track processed event IDs to prevent duplicate callbacks
const processedEventIdsRef = useRef<Set<string>>(new Set());
const { isAuthenticated, token } = useAuthStore(); const { isAuthenticated, token } = useAuthStore();
const currentTenant = useCurrentTenant(); const currentTenant = useCurrentTenant();
@@ -130,6 +133,23 @@ export const SSEProvider: React.FC<SSEProviderProps> = ({ children }) => {
eventSource.addEventListener('alert', (event) => { eventSource.addEventListener('alert', (event) => {
try { try {
const data = JSON.parse(event.data); const data = JSON.parse(event.data);
// GLOBAL DEDUPLICATION: Skip if this event was already processed
if (data.id && processedEventIdsRef.current.has(data.id)) {
console.log('⏭️ [SSE] Skipping duplicate alert:', data.id);
return;
}
// Mark event as processed
if (data.id) {
processedEventIdsRef.current.add(data.id);
// Limit cache size (keep last 1000 event IDs)
if (processedEventIdsRef.current.size > 1000) {
const firstId = Array.from(processedEventIdsRef.current)[0];
processedEventIdsRef.current.delete(firstId);
}
}
const sseEvent: SSEEvent = { const sseEvent: SSEEvent = {
type: 'alert', type: 'alert',
data, data,
@@ -208,6 +228,22 @@ export const SSEProvider: React.FC<SSEProviderProps> = ({ children }) => {
eventSource.addEventListener('notification', (event) => { eventSource.addEventListener('notification', (event) => {
try { try {
const data = JSON.parse(event.data); const data = JSON.parse(event.data);
// GLOBAL DEDUPLICATION: Skip if this event was already processed
if (data.id && processedEventIdsRef.current.has(data.id)) {
console.log('⏭️ [SSE] Skipping duplicate notification:', data.id);
return;
}
// Mark event as processed
if (data.id) {
processedEventIdsRef.current.add(data.id);
if (processedEventIdsRef.current.size > 1000) {
const firstId = Array.from(processedEventIdsRef.current)[0];
processedEventIdsRef.current.delete(firstId);
}
}
const sseEvent: SSEEvent = { const sseEvent: SSEEvent = {
type: 'notification', type: 'notification',
data, data,

View File

@@ -0,0 +1,51 @@
import { useQuery } from '@tanstack/react-query';
import { apiClient } from '../api/client';
interface OnboardingStatus {
ingredients_count: number;
suppliers_count: number;
recipes_count: number;
has_minimum_setup: boolean;
progress_percentage: number;
requirements: {
ingredients: {
current: number;
minimum: number;
met: boolean;
};
suppliers: {
current: number;
minimum: number;
met: boolean;
};
recipes: {
current: number;
minimum: number;
met: boolean;
};
};
}
export const useOnboardingStatus = (tenantId: string) => {
return useQuery<OnboardingStatus>({
queryKey: ['onboarding-status', tenantId],
queryFn: async () => {
console.log('[useOnboardingStatus] Fetching for tenant:', tenantId);
try {
// apiClient.get() already returns response.data (unwrapped)
const data = await apiClient.get<OnboardingStatus>(
`/tenants/${tenantId}/onboarding/status`
);
console.log('[useOnboardingStatus] Success:', data);
return data;
} catch (error) {
console.error('[useOnboardingStatus] Error:', error);
throw error;
}
},
enabled: !!tenantId,
staleTime: 60 * 1000,
gcTime: 5 * 60 * 1000,
retry: 1,
});
};

View File

@@ -25,15 +25,13 @@ import {
useApprovePurchaseOrder, useApprovePurchaseOrder,
useStartProductionBatch, useStartProductionBatch,
} from '../../api/hooks/useProfessionalDashboard'; } from '../../api/hooks/useProfessionalDashboard';
import { useControlPanelData, useControlPanelRealtimeSync } from '../../api/hooks/useControlPanelData'; import { useControlPanelData } from '../../api/hooks/useControlPanelData';
import { useRejectPurchaseOrder } from '../../api/hooks/purchase-orders'; import { useRejectPurchaseOrder } from '../../api/hooks/purchase-orders';
import { useIngredients } from '../../api/hooks/inventory';
import { useSuppliers } from '../../api/hooks/suppliers';
import { useRecipes } from '../../api/hooks/recipes';
import { useUserProgress } from '../../api/hooks/onboarding'; import { useUserProgress } from '../../api/hooks/onboarding';
import { useQualityTemplates } from '../../api/hooks/qualityTemplates'; import { useOnboardingStatus } from '../../hooks/useOnboardingStatus';
import { SetupWizardBlocker } from '../../components/dashboard/SetupWizardBlocker'; import { SetupWizardBlocker } from '../../components/dashboard/SetupWizardBlocker';
import { CollapsibleSetupBanner } from '../../components/dashboard/CollapsibleSetupBanner'; import { CollapsibleSetupBanner } from '../../components/dashboard/CollapsibleSetupBanner';
import { DashboardSkeleton } from '../../components/dashboard/DashboardSkeleton';
import { import {
SystemStatusBlock, SystemStatusBlock,
PendingPurchasesBlock, PendingPurchasesBlock,
@@ -69,49 +67,27 @@ export function BakeryDashboard({ plan }: { plan?: string }) {
const [isPOModalOpen, setIsPOModalOpen] = useState(false); const [isPOModalOpen, setIsPOModalOpen] = useState(false);
const [poModalMode, setPOModalMode] = useState<'view' | 'edit'>('view'); const [poModalMode, setPOModalMode] = useState<'view' | 'edit'>('view');
// Setup Progress Data - use localStorage as fallback during loading // ALWAYS use lightweight onboarding status endpoint for ALL users (demo + authenticated)
const setupProgressFromStorage = useMemo(() => { // This is faster and more efficient than fetching full datasets
try { const { data: onboardingStatus, isLoading: loadingOnboarding } = useOnboardingStatus(tenantId);
const cached = localStorage.getItem(`setup_progress_${tenantId}`);
return cached ? parseInt(cached, 10) : 0;
} catch {
return 0;
}
}, [tenantId]);
// Fetch setup data to determine true progress // DEBUG: Log onboarding status
const { data: ingredients = [], isLoading: loadingIngredients } = useIngredients( useEffect(() => {
tenantId, console.log('[DashboardPage] Onboarding Status:', {
{}, onboardingStatus,
{ enabled: !!tenantId } loadingOnboarding,
); tenantId,
const { data: suppliers = [], isLoading: loadingSuppliers } = useSuppliers( });
tenantId, }, [onboardingStatus, loadingOnboarding, tenantId]);
{},
{ enabled: !!tenantId }
);
const { data: recipes = [], isLoading: loadingRecipes } = useRecipes(
tenantId,
{},
{ enabled: !!tenantId }
);
const { data: qualityData, isLoading: loadingQuality } = useQualityTemplates(
tenantId,
{},
{ enabled: !!tenantId }
);
const qualityTemplates = Array.isArray(qualityData?.templates) ? qualityData.templates : [];
// NEW: Enhanced control panel data fetch with SSE integration // NEW: Enhanced control panel data fetch with SSE integration
// Note: useControlPanelData already includes SSE integration and auto-refetch
const { const {
data: dashboardData, data: dashboardData,
isLoading: dashboardLoading, isLoading: dashboardLoading,
refetch: refetchDashboard, refetch: refetchDashboard,
} = useControlPanelData(tenantId); } = useControlPanelData(tenantId);
// Enable enhanced SSE real-time state synchronization
useControlPanelRealtimeSync(tenantId);
// Mutations // Mutations
const approvePO = useApprovePurchaseOrder(); const approvePO = useApprovePurchaseOrder();
const rejectPO = useRejectPurchaseOrder(); const rejectPO = useRejectPurchaseOrder();
@@ -161,6 +137,12 @@ export function BakeryDashboard({ plan }: { plan?: string }) {
const SafeBookOpenIcon = BookOpen; const SafeBookOpenIcon = BookOpen;
const SafeShieldIcon = Shield; const SafeShieldIcon = Shield;
// ALWAYS use onboardingStatus counts for ALL users
// This is lightweight and doesn't require fetching full datasets
const ingredientsCount = onboardingStatus?.ingredients_count ?? 0;
const suppliersCount = onboardingStatus?.suppliers_count ?? 0;
const recipesCount = onboardingStatus?.recipes_count ?? 0;
// Validate that all icons are properly imported before using them // Validate that all icons are properly imported before using them
const sections = [ const sections = [
{ {
@@ -168,10 +150,10 @@ export function BakeryDashboard({ plan }: { plan?: string }) {
title: t('dashboard:config.inventory', 'Inventory'), title: t('dashboard:config.inventory', 'Inventory'),
icon: SafePackageIcon, icon: SafePackageIcon,
path: '/app/database/inventory', path: '/app/database/inventory',
count: ingredients.length, count: ingredientsCount,
minimum: 3, minimum: 3,
recommended: 10, recommended: 10,
isComplete: ingredients.length >= 3, isComplete: ingredientsCount >= 3,
description: t('dashboard:config.add_ingredients', 'Add at least {{count}} ingredients', { count: 3 }), description: t('dashboard:config.add_ingredients', 'Add at least {{count}} ingredients', { count: 3 }),
}, },
{ {
@@ -179,10 +161,10 @@ export function BakeryDashboard({ plan }: { plan?: string }) {
title: t('dashboard:config.suppliers', 'Suppliers'), title: t('dashboard:config.suppliers', 'Suppliers'),
icon: SafeUsersIcon, icon: SafeUsersIcon,
path: '/app/database/suppliers', path: '/app/database/suppliers',
count: suppliers.length, count: suppliersCount,
minimum: 1, minimum: 1,
recommended: 3, recommended: 3,
isComplete: suppliers.length >= 1, isComplete: suppliersCount >= 1,
description: t('dashboard:config.add_supplier', 'Add your first supplier'), description: t('dashboard:config.add_supplier', 'Add your first supplier'),
}, },
{ {
@@ -190,10 +172,10 @@ export function BakeryDashboard({ plan }: { plan?: string }) {
title: t('dashboard:config.recipes', 'Recipes'), title: t('dashboard:config.recipes', 'Recipes'),
icon: SafeBookOpenIcon, icon: SafeBookOpenIcon,
path: '/app/database/recipes', path: '/app/database/recipes',
count: recipes.length, count: recipesCount,
minimum: 1, minimum: 1,
recommended: 3, recommended: 3,
isComplete: recipes.length >= 1, isComplete: recipesCount >= 1,
description: t('dashboard:config.add_recipe', 'Create your first recipe'), description: t('dashboard:config.add_recipe', 'Create your first recipe'),
}, },
{ {
@@ -201,30 +183,50 @@ export function BakeryDashboard({ plan }: { plan?: string }) {
title: t('dashboard:config.quality', 'Quality Standards'), title: t('dashboard:config.quality', 'Quality Standards'),
icon: SafeShieldIcon, icon: SafeShieldIcon,
path: '/app/operations/production/quality', path: '/app/operations/production/quality',
count: qualityTemplates.length, count: 0, // Quality templates are optional, not tracked in onboarding
minimum: 0, minimum: 0,
recommended: 2, recommended: 2,
isComplete: true, // Optional isComplete: true, // Optional - always complete
description: t('dashboard:config.add_quality', 'Add quality checks (optional)'), description: t('dashboard:config.add_quality', 'Add quality checks (optional)'),
}, },
]; ];
return sections; return sections;
}, [ingredients.length, suppliers.length, recipes.length, qualityTemplates.length, t]); }, [onboardingStatus, t]);
// Calculate overall progress // Calculate overall progress
const { completedSections, totalSections, progressPercentage, criticalMissing, recommendedMissing } = useMemo(() => { const { completedSections, totalSections, progressPercentage, criticalMissing, recommendedMissing } = useMemo(() => {
// If data is still loading, use stored value as fallback to prevent flickering // If onboarding data is still loading, show loading state
if (loadingIngredients || loadingSuppliers || loadingRecipes || loadingQuality) { if (loadingOnboarding) {
console.log('[DashboardPage] Progress calculation: Loading state');
return { return {
completedSections: 0, completedSections: 0,
totalSections: 4, // 4 required sections totalSections: 4, // 4 required sections
progressPercentage: setupProgressFromStorage, // Use stored value during loading progressPercentage: 0, // Loading state
criticalMissing: [], criticalMissing: [],
recommendedMissing: [], recommendedMissing: [],
}; };
} }
// OPTIMIZATION: If we have onboarding status from API, use it directly
if (onboardingStatus?.progress_percentage !== undefined) {
const apiProgress = onboardingStatus.progress_percentage;
console.log('[DashboardPage] Progress calculation: Using API progress', {
apiProgress,
has_minimum_setup: onboardingStatus.has_minimum_setup,
onboardingStatus,
});
return {
completedSections: onboardingStatus.has_minimum_setup ? 3 : 0,
totalSections: 3,
progressPercentage: apiProgress,
criticalMissing: apiProgress < 50 ? setupSections.filter(s => s.id !== 'quality' && !s.isComplete) : [],
recommendedMissing: setupSections.filter(s => s.count < s.recommended),
};
}
console.log('[DashboardPage] Progress calculation: Fallback to manual calculation');
// Guard against undefined or invalid setupSections // Guard against undefined or invalid setupSections
if (!setupSections || !Array.isArray(setupSections) || setupSections.length === 0) { if (!setupSections || !Array.isArray(setupSections) || setupSections.length === 0) {
return { return {
@@ -258,7 +260,7 @@ export function BakeryDashboard({ plan }: { plan?: string }) {
criticalMissing: critical, criticalMissing: critical,
recommendedMissing: recommended, recommendedMissing: recommended,
}; };
}, [setupSections, tenantId, loadingIngredients, loadingSuppliers, loadingRecipes, loadingQuality, setupProgressFromStorage]); }, [onboardingStatus, setupSections, tenantId, loadingOnboarding]);
const handleAddWizardComplete = (itemType: ItemType, data?: any) => { const handleAddWizardComplete = (itemType: ItemType, data?: any) => {
console.log('Item created:', itemType, data); console.log('Item created:', itemType, data);
@@ -302,7 +304,7 @@ export function BakeryDashboard({ plan }: { plan?: string }) {
const redirectStartStep = parseInt(sessionStorage.getItem('demo_tour_start_step') || '0', 10); const redirectStartStep = parseInt(sessionStorage.getItem('demo_tour_start_step') || '0', 10);
if (isDemoMode && (shouldStart || shouldStartFromRedirect)) { if (isDemoMode && (shouldStart || shouldStartFromRedirect)) {
console.log('[Dashboard] Starting tour in 1.5s...'); console.log('[Dashboard] Starting tour...');
const timer = setTimeout(() => { const timer = setTimeout(() => {
console.log('[Dashboard] Executing startTour()'); console.log('[Dashboard] Executing startTour()');
if (shouldStartFromRedirect) { if (shouldStartFromRedirect) {
@@ -316,7 +318,7 @@ export function BakeryDashboard({ plan }: { plan?: string }) {
startTour(); startTour();
clearTourStartPending(); clearTourStartPending();
} }
}, 1500); }, 300);
return () => clearTimeout(timer); return () => clearTimeout(timer);
} }
@@ -362,8 +364,8 @@ export function BakeryDashboard({ plan }: { plan?: string }) {
</div> </div>
{/* Setup Flow - Three States */} {/* Setup Flow - Three States */}
{loadingIngredients || loadingSuppliers || loadingRecipes || loadingQuality ? ( {loadingOnboarding ? (
/* Loading state - only show spinner until setup data is ready */ /* Loading state for onboarding checks */
<div className="flex items-center justify-center py-12"> <div className="flex items-center justify-center py-12">
<div className="animate-spin rounded-full h-12 w-12 border-b-2" style={{ borderColor: 'var(--color-primary)' }}></div> <div className="animate-spin rounded-full h-12 w-12 border-b-2" style={{ borderColor: 'var(--color-primary)' }}></div>
</div> </div>
@@ -384,62 +386,66 @@ export function BakeryDashboard({ plan }: { plan?: string }) {
)} )}
{/* Main Dashboard Layout - 4 New Focused Blocks */} {/* Main Dashboard Layout - 4 New Focused Blocks */}
<div className="space-y-6"> {dashboardLoading ? (
{/* BLOCK 1: System Status + AI Summary */} <DashboardSkeleton />
<div data-tour="dashboard-stats"> ) : (
<SystemStatusBlock <div className="space-y-6">
data={dashboardData} {/* BLOCK 1: System Status + AI Summary */}
loading={dashboardLoading} <div data-tour="dashboard-stats">
/> <SystemStatusBlock
</div> data={dashboardData}
loading={false}
{/* BLOCK 2: Pending Purchases (PO Approvals) */}
<div data-tour="pending-po-approvals">
<PendingPurchasesBlock
pendingPOs={dashboardData?.pendingPOs || []}
loading={dashboardLoading}
onApprove={handleApprove}
onReject={handleReject}
onViewDetails={handleViewDetails}
/>
</div>
{/* BLOCK 3: Pending Deliveries (Overdue + Today) */}
<div data-tour="pending-deliveries">
<PendingDeliveriesBlock
overdueDeliveries={dashboardData?.overdueDeliveries || []}
pendingDeliveries={dashboardData?.pendingDeliveries || []}
loading={dashboardLoading}
/>
</div>
{/* BLOCK 4: Production Status (Late/Running/Pending) */}
<div data-tour="execution-progress">
<ProductionStatusBlock
lateToStartBatches={dashboardData?.lateToStartBatches || []}
runningBatches={dashboardData?.runningBatches || []}
pendingBatches={dashboardData?.pendingBatches || []}
alerts={dashboardData?.alerts || []}
equipmentAlerts={dashboardData?.equipmentAlerts || []}
loading={dashboardLoading}
onStartBatch={handleStartBatch}
/>
</div>
{/* BLOCK 5: AI Insights (Professional/Enterprise only) */}
{(plan === SUBSCRIPTION_TIERS.PROFESSIONAL || plan === SUBSCRIPTION_TIERS.ENTERPRISE) && (
<div data-tour="ai-insights">
<AIInsightsBlock
insights={dashboardData?.aiInsights || []}
loading={dashboardLoading}
onViewAll={() => {
// Navigate to AI Insights page
window.location.href = '/app/analytics/ai-insights';
}}
/> />
</div> </div>
)}
</div> {/* BLOCK 2: Pending Purchases (PO Approvals) */}
<div data-tour="pending-po-approvals">
<PendingPurchasesBlock
pendingPOs={dashboardData?.pendingPOs || []}
loading={false}
onApprove={handleApprove}
onReject={handleReject}
onViewDetails={handleViewDetails}
/>
</div>
{/* BLOCK 3: Pending Deliveries (Overdue + Today) */}
<div data-tour="pending-deliveries">
<PendingDeliveriesBlock
overdueDeliveries={dashboardData?.overdueDeliveries || []}
pendingDeliveries={dashboardData?.pendingDeliveries || []}
loading={false}
/>
</div>
{/* BLOCK 4: Production Status (Late/Running/Pending) */}
<div data-tour="execution-progress">
<ProductionStatusBlock
lateToStartBatches={dashboardData?.lateToStartBatches || []}
runningBatches={dashboardData?.runningBatches || []}
pendingBatches={dashboardData?.pendingBatches || []}
alerts={dashboardData?.alerts || []}
equipmentAlerts={dashboardData?.equipmentAlerts || []}
loading={false}
onStartBatch={handleStartBatch}
/>
</div>
{/* BLOCK 5: AI Insights (Professional/Enterprise only) */}
{(plan === SUBSCRIPTION_TIERS.PROFESSIONAL || plan === SUBSCRIPTION_TIERS.ENTERPRISE) && (
<div data-tour="ai-insights">
<AIInsightsBlock
insights={dashboardData?.aiInsights || []}
loading={false}
onViewAll={() => {
// Navigate to AI Insights page
window.location.href = '/app/analytics/ai-insights';
}}
/>
</div>
)}
</div>
)}
</> </>
)} )}
</div> </div>

View File

@@ -295,10 +295,10 @@ const DemoPage = () => {
// BUG-010 FIX: Handle ready status separately from partial // BUG-010 FIX: Handle ready status separately from partial
if (statusData.status === 'ready') { if (statusData.status === 'ready') {
// Full success - set to 100% and navigate after delay // Full success - set to 100% and navigate immediately
clearInterval(progressInterval); clearInterval(progressInterval);
setCloneProgress(prev => ({ ...prev, overall: 100 })); setCloneProgress(prev => ({ ...prev, overall: 100 }));
setTimeout(() => { requestAnimationFrame(() => {
// Reset state before navigation // Reset state before navigation
setCreatingTier(null); setCreatingTier(null);
setProgressStartTime(null); setProgressStartTime(null);
@@ -311,7 +311,7 @@ const DemoPage = () => {
}); });
// Navigate to the main dashboard which will automatically route to enterprise or bakery dashboard based on subscription tier // Navigate to the main dashboard which will automatically route to enterprise or bakery dashboard based on subscription tier
navigate('/app/dashboard'); navigate('/app/dashboard');
}, 1500); // Increased from 1000ms to show 100% completion });
return; return;
} else if (statusData.status === 'PARTIAL' || statusData.status === 'partial') { } else if (statusData.status === 'PARTIAL' || statusData.status === 'partial') {
// BUG-010 FIX: Show warning modal for partial status // BUG-010 FIX: Show warning modal for partial status

View File

@@ -131,68 +131,27 @@ export const useTenantInitializer = () => {
console.log('✅ [TenantInitializer] Set API client tenant ID:', virtualTenantId); console.log('✅ [TenantInitializer] Set API client tenant ID:', virtualTenantId);
}); });
// For enterprise demos, wait for session to be ready, then load tenants // For enterprise demos, load child tenants immediately (session is already ready when we navigate here)
if (demoAccountType === 'enterprise') { if (demoAccountType === 'enterprise') {
console.log('🔄 [TenantInitializer] Waiting for enterprise demo session to be ready...'); console.log('🔄 [TenantInitializer] Loading available tenants for enterprise demo...');
const mockUserId = 'demo-user';
// Poll session status until ready import('../api/services/tenant').then(({ TenantService }) => {
const pollSessionStatus = async (sessionId: string, maxAttempts = 30) => { const tenantService = new TenantService();
for (let attempt = 1; attempt <= maxAttempts; attempt++) { tenantService.getUserTenants(mockUserId)
try { .then(tenants => {
const response = await fetch(`/api/v1/demo-sessions/${sessionId}/status`); console.log('📋 [TenantInitializer] Loaded available tenants:', tenants.length);
if (response.ok) { if (tenants.length === 0) {
const status = await response.json(); console.warn('⚠️ [TenantInitializer] No child tenants found yet - they may still be cloning');
console.log(`⏳ [TenantInitializer] Session status poll ${attempt}/${maxAttempts}:`, status.status);
if (status.status === 'ready') {
console.log('✅ [TenantInitializer] Demo session is ready!');
return true;
} else if (status.status === 'failed') {
console.error('❌ [TenantInitializer] Demo session failed:', status);
return false;
}
// Status is 'initializing' or 'cloning_data' - continue polling
} }
} catch (error) { // Update the tenant store with available tenants
console.warn(`⚠️ [TenantInitializer] Status poll ${attempt} failed:`, error); import('../stores/tenant.store').then(({ useTenantStore }) => {
} useTenantStore.getState().setAvailableTenants(tenants);
});
// Wait 1 second before next poll (except on last attempt) })
if (attempt < maxAttempts) { .catch(error => {
await new Promise(resolve => setTimeout(resolve, 1000)); console.error('❌ [TenantInitializer] Failed to load available tenants:', error);
}
}
console.error('❌ [TenantInitializer] Session readiness timeout after 30 seconds');
return false;
};
// Wait for session to be ready, then load tenants
pollSessionStatus(demoSessionId).then(isReady => {
if (isReady) {
console.log('🔄 [TenantInitializer] Loading available tenants for enterprise demo...');
const mockUserId = 'demo-user';
import('../api/services/tenant').then(({ TenantService }) => {
const tenantService = new TenantService();
tenantService.getUserTenants(mockUserId)
.then(tenants => {
console.log('📋 [TenantInitializer] Loaded available tenants:', tenants.length);
if (tenants.length === 0) {
console.warn('⚠️ [TenantInitializer] Session ready but no tenants found - possible sync issue');
}
// Update the tenant store with available tenants
import('../stores/tenant.store').then(({ useTenantStore }) => {
useTenantStore.getState().setAvailableTenants(tenants);
});
})
.catch(error => {
console.error('❌ [TenantInitializer] Failed to load available tenants:', error);
});
}); });
} else {
console.error('❌ [TenantInitializer] Cannot load tenants - session not ready');
}
}); });
} }
} }

View File

@@ -39,6 +39,11 @@ PUBLIC_ROUTES = [
"/api/v1/demo/sessions" "/api/v1/demo/sessions"
] ]
# Routes accessible with demo session (no JWT required, just demo session header)
DEMO_ACCESSIBLE_ROUTES = [
"/api/v1/tenants/", # All tenant endpoints accessible in demo mode
]
class AuthMiddleware(BaseHTTPMiddleware): class AuthMiddleware(BaseHTTPMiddleware):
""" """
Enhanced Authentication Middleware with Tenant Access Control Enhanced Authentication Middleware with Tenant Access Control

View File

@@ -290,9 +290,9 @@ async def proxy_tenant_insights(request: Request, tenant_id: str = Path(...), pa
@router.api_route("/{tenant_id}/onboarding/{path:path}", methods=["GET", "POST", "OPTIONS"]) @router.api_route("/{tenant_id}/onboarding/{path:path}", methods=["GET", "POST", "OPTIONS"])
async def proxy_tenant_onboarding(request: Request, tenant_id: str = Path(...), path: str = ""): async def proxy_tenant_onboarding(request: Request, tenant_id: str = Path(...), path: str = ""):
"""Proxy tenant onboarding requests to sales service""" """Proxy tenant onboarding requests to tenant service"""
target_path = f"/api/v1/tenants/{tenant_id}/onboarding/{path}".rstrip("/") target_path = f"/api/v1/tenants/{tenant_id}/onboarding/{path}".rstrip("/")
return await _proxy_to_sales_service(request, target_path) return await _proxy_to_tenant_service(request, target_path)
# ================================================================ # ================================================================
# TENANT-SCOPED TRAINING SERVICE ENDPOINTS # TENANT-SCOPED TRAINING SERVICE ENDPOINTS

View File

@@ -224,6 +224,14 @@ async def create_demo_session(
algorithm=settings.JWT_ALGORITHM algorithm=settings.JWT_ALGORITHM
) )
# Map demo_account_type to subscription tier
subscription_tier = "enterprise" if session.demo_account_type == "enterprise" else "professional"
tenant_name = (
"Panadería Artesana España - Central"
if session.demo_account_type == "enterprise"
else "Panadería Artesana Madrid - Demo"
)
return { return {
"session_id": session.session_id, "session_id": session.session_id,
"virtual_tenant_id": str(session.virtual_tenant_id), "virtual_tenant_id": str(session.virtual_tenant_id),
@@ -232,7 +240,10 @@ async def create_demo_session(
"created_at": session.created_at, "created_at": session.created_at,
"expires_at": session.expires_at, "expires_at": session.expires_at,
"demo_config": session.session_metadata.get("demo_config", {}), "demo_config": session.session_metadata.get("demo_config", {}),
"session_token": session_token "session_token": session_token,
"subscription_tier": subscription_tier,
"is_enterprise": session.demo_account_type == "enterprise",
"tenant_name": tenant_name
} }
except Exception as e: except Exception as e:

View File

@@ -48,6 +48,9 @@ class CloneOrchestrator:
self.internal_api_key = settings.INTERNAL_API_KEY self.internal_api_key = settings.INTERNAL_API_KEY
self.redis_manager = redis_manager # For real-time progress updates self.redis_manager = redis_manager # For real-time progress updates
# Shared HTTP client with connection pooling
self._http_client: Optional[httpx.AsyncClient] = None
# Define services that participate in cloning # Define services that participate in cloning
# URLs should be internal Kubernetes service names # URLs should be internal Kubernetes service names
self.services = [ self.services = [
@@ -125,6 +128,20 @@ class CloneOrchestrator:
), ),
] ]
async def _get_http_client(self) -> httpx.AsyncClient:
"""Get or create shared HTTP client with connection pooling"""
if self._http_client is None or self._http_client.is_closed:
self._http_client = httpx.AsyncClient(
timeout=httpx.Timeout(30.0, connect=5.0),
limits=httpx.Limits(max_connections=100, max_keepalive_connections=20)
)
return self._http_client
async def close(self):
"""Close the HTTP client"""
if self._http_client and not self._http_client.is_closed:
await self._http_client.aclose()
async def _update_progress_in_redis( async def _update_progress_in_redis(
self, self,
session_id: str, session_id: str,
@@ -352,30 +369,13 @@ class CloneOrchestrator:
"duration_ms": duration_ms "duration_ms": duration_ms
} }
# If cloning completed successfully, trigger post-clone operations # If cloning completed successfully, trigger post-clone operations in background
if overall_status in ["completed", "partial"]: if overall_status in ["completed", "partial"]:
try: asyncio.create_task(self._run_post_clone_enrichments(
# Trigger alert generation virtual_tenant_id=virtual_tenant_id,
alert_results = await self._trigger_alert_generation_post_clone( demo_account_type=demo_account_type,
virtual_tenant_id=virtual_tenant_id, session_id=session_id
demo_account_type=demo_account_type ))
)
result["alert_generation"] = alert_results
# Trigger AI insights generation
insights_results = await self._trigger_ai_insights_generation_post_clone(
virtual_tenant_id=virtual_tenant_id,
demo_account_type=demo_account_type
)
result["ai_insights_generation"] = insights_results
except Exception as e:
logger.error(
"Failed to trigger post-clone operations (non-fatal)",
session_id=session_id,
error=str(e)
)
result["post_clone_error"] = str(e)
logger.info( logger.info(
"Cloning completed", "Cloning completed",
@@ -528,92 +528,91 @@ class CloneOrchestrator:
timeout=service.timeout timeout=service.timeout
) )
async with httpx.AsyncClient(timeout=service.timeout) as client: client = await self._get_http_client()
logger.debug( logger.debug(
"Sending clone request", "Sending clone request",
service=service.name,
base_tenant_id=base_tenant_id,
virtual_tenant_id=virtual_tenant_id,
demo_account_type=demo_account_type
)
response = await client.post(
f"{service.url}/internal/demo/clone",
params={
"base_tenant_id": base_tenant_id,
"virtual_tenant_id": virtual_tenant_id,
"demo_account_type": demo_account_type,
"session_id": session_id,
"session_created_at": session_created_at.isoformat()
},
headers={"X-Internal-API-Key": self.internal_api_key},
timeout=service.timeout
)
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
duration_seconds = duration_ms / 1000
logger.debug(
"Received response from service",
service=service.name,
status_code=response.status_code,
duration_ms=duration_ms
)
demo_cross_service_calls_total.labels(
source_service="demo-session",
target_service=service.name,
status="success"
).inc()
demo_cross_service_call_duration_seconds.labels(
source_service="demo-session",
target_service=service.name
).observe(duration_seconds)
demo_service_clone_duration_seconds.labels(
tier=demo_account_type,
service=service.name
).observe(duration_seconds)
if response.status_code == 200:
result = response.json()
logger.info(
"Service cloning completed",
service=service.name, service=service.name,
base_tenant_id=base_tenant_id, records_cloned=result.get("records_cloned", 0),
virtual_tenant_id=virtual_tenant_id,
demo_account_type=demo_account_type
)
response = await client.post(
f"{service.url}/internal/demo/clone",
params={
"base_tenant_id": base_tenant_id,
"virtual_tenant_id": virtual_tenant_id,
"demo_account_type": demo_account_type,
"session_id": session_id,
"session_created_at": session_created_at.isoformat()
},
headers={"X-Internal-API-Key": self.internal_api_key}
)
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
duration_seconds = duration_ms / 1000
logger.debug(
"Received response from service",
service=service.name,
status_code=response.status_code,
duration_ms=duration_ms duration_ms=duration_ms
) )
return result
else:
error_msg = f"HTTP {response.status_code}: {response.text}"
logger.error(
"Service cloning failed",
service=service.name,
status_code=response.status_code,
error=error_msg,
response_text=response.text
)
# Update Prometheus metrics
demo_cross_service_calls_total.labels( demo_cross_service_calls_total.labels(
source_service="demo-session", source_service="demo-session",
target_service=service.name, target_service=service.name,
status="success" status="failed"
).inc() ).inc()
demo_cross_service_call_duration_seconds.labels( demo_cloning_errors_total.labels(
source_service="demo-session",
target_service=service.name
).observe(duration_seconds)
demo_service_clone_duration_seconds.labels(
tier=demo_account_type, tier=demo_account_type,
service=service.name service=service.name,
).observe(duration_seconds) error_type="http_error"
).inc()
if response.status_code == 200: return {
result = response.json() "service": service.name,
logger.info( "status": "failed",
"Service cloning completed", "error": error_msg,
service=service.name, "records_cloned": 0,
records_cloned=result.get("records_cloned", 0), "duration_ms": duration_ms,
duration_ms=duration_ms "response_status": response.status_code,
) "response_text": response.text
return result }
else:
error_msg = f"HTTP {response.status_code}: {response.text}"
logger.error(
"Service cloning failed",
service=service.name,
status_code=response.status_code,
error=error_msg,
response_text=response.text
)
# Update error metrics
demo_cross_service_calls_total.labels(
source_service="demo-session",
target_service=service.name,
status="failed"
).inc()
demo_cloning_errors_total.labels(
tier=demo_account_type,
service=service.name,
error_type="http_error"
).inc()
return {
"service": service.name,
"status": "failed",
"error": error_msg,
"records_cloned": 0,
"duration_ms": duration_ms,
"response_status": response.status_code,
"response_text": response.text
}
except httpx.TimeoutException: except httpx.TimeoutException:
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000) duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
@@ -798,28 +797,29 @@ class CloneOrchestrator:
try: try:
# First, create child tenant via tenant service # First, create child tenant via tenant service
tenant_url = os.getenv("TENANT_SERVICE_URL", "http://tenant-service:8000") tenant_url = os.getenv("TENANT_SERVICE_URL", "http://tenant-service:8000")
async with httpx.AsyncClient(timeout=30.0) as client: client = await self._get_http_client()
response = await client.post( response = await client.post(
f"{tenant_url}/internal/demo/create-child", f"{tenant_url}/internal/demo/create-child",
json={ json={
"base_tenant_id": child_base_id, "base_tenant_id": child_base_id,
"virtual_tenant_id": virtual_child_id, "virtual_tenant_id": virtual_child_id,
"parent_tenant_id": virtual_parent_id, "parent_tenant_id": virtual_parent_id,
"child_name": child_name, "child_name": child_name,
"location": location, "location": location,
"session_id": session_id "session_id": session_id
}, },
headers={"X-Internal-API-Key": self.internal_api_key} headers={"X-Internal-API-Key": self.internal_api_key},
) timeout=30.0
)
if response.status_code != 200: if response.status_code != 200:
return { return {
"child_id": virtual_child_id, "child_id": virtual_child_id,
"child_name": child_name, "child_name": child_name,
"status": "failed", "status": "failed",
"error": f"Tenant creation failed: HTTP {response.status_code}", "error": f"Tenant creation failed: HTTP {response.status_code}",
"records_cloned": 0 "records_cloned": 0
} }
# Then clone data from all services for this child # Then clone data from all services for this child
records_cloned = 0 records_cloned = 0
@@ -942,9 +942,6 @@ class CloneOrchestrator:
logger.error("Failed to trigger production alerts", tenant_id=virtual_tenant_id, error=str(e)) logger.error("Failed to trigger production alerts", tenant_id=virtual_tenant_id, error=str(e))
results["production_alerts"] = {"error": str(e)} results["production_alerts"] = {"error": str(e)}
# Wait 1.5s for alert enrichment
await asyncio.sleep(1.5)
logger.info( logger.info(
"Alert generation post-clone completed", "Alert generation post-clone completed",
tenant_id=virtual_tenant_id, tenant_id=virtual_tenant_id,
@@ -1052,9 +1049,6 @@ class CloneOrchestrator:
logger.error("Failed to trigger demand insights", tenant_id=virtual_tenant_id, error=str(e)) logger.error("Failed to trigger demand insights", tenant_id=virtual_tenant_id, error=str(e))
results["demand_insights"] = {"error": str(e)} results["demand_insights"] = {"error": str(e)}
# Wait 2s for insights to be processed
await asyncio.sleep(2.0)
logger.info( logger.info(
"AI insights generation post-clone completed", "AI insights generation post-clone completed",
tenant_id=virtual_tenant_id, tenant_id=virtual_tenant_id,
@@ -1063,3 +1057,47 @@ class CloneOrchestrator:
results["total_insights_generated"] = total_insights results["total_insights_generated"] = total_insights
return results return results
async def _run_post_clone_enrichments(
self,
virtual_tenant_id: str,
demo_account_type: str,
session_id: str
) -> None:
"""
Background task for non-blocking enrichments (alerts and AI insights).
Runs in fire-and-forget mode to avoid blocking session readiness.
"""
try:
logger.info(
"Starting background enrichments",
session_id=session_id,
tenant_id=virtual_tenant_id
)
await asyncio.gather(
self._trigger_alert_generation_post_clone(virtual_tenant_id, demo_account_type),
self._trigger_ai_insights_generation_post_clone(virtual_tenant_id, demo_account_type),
return_exceptions=True
)
if self.redis_manager:
client = await self.redis_manager.get_client()
await client.set(
f"session:{session_id}:enrichments_complete",
"true",
ex=7200
)
logger.info(
"Background enrichments completed",
session_id=session_id,
tenant_id=virtual_tenant_id
)
except Exception as e:
logger.error(
"Background enrichments failed",
session_id=session_id,
error=str(e)
)

View File

@@ -600,4 +600,35 @@ async def delete_demo_tenant_data(
raise HTTPException( raise HTTPException(
status_code=500, status_code=500,
detail=f"Failed to delete demo data: {str(e)}" detail=f"Failed to delete demo data: {str(e)}"
) )
@router.get("/internal/count")
async def get_ingredient_count(
tenant_id: str,
db: AsyncSession = Depends(get_db),
_: bool = Depends(verify_internal_api_key)
):
"""
Get count of active ingredients for onboarding status check.
Internal endpoint for tenant service.
"""
try:
from sqlalchemy import select, func
count = await db.scalar(
select(func.count()).select_from(Ingredient)
.where(
Ingredient.tenant_id == UUID(tenant_id),
Ingredient.is_active == True
)
)
return {
"count": count or 0,
"tenant_id": tenant_id
}
except Exception as e:
logger.error("Failed to get ingredient count", tenant_id=tenant_id, error=str(e))
raise HTTPException(status_code=500, detail=f"Failed to get ingredient count: {str(e)}")

View File

@@ -431,4 +431,36 @@ async def delete_demo_tenant_data(
raise HTTPException( raise HTTPException(
status_code=500, status_code=500,
detail=f"Failed to delete demo data: {str(e)}" detail=f"Failed to delete demo data: {str(e)}"
) )
@router.get("/internal/count")
async def get_recipe_count(
tenant_id: str,
db: AsyncSession = Depends(get_db),
_: bool = Depends(verify_internal_api_key)
):
"""
Get count of active recipes for onboarding status check.
Internal endpoint for tenant service.
"""
try:
from sqlalchemy import select, func
from app.models.recipes import RecipeStatus
count = await db.scalar(
select(func.count()).select_from(Recipe)
.where(
Recipe.tenant_id == UUID(tenant_id),
Recipe.status == RecipeStatus.ACTIVE
)
)
return {
"count": count or 0,
"tenant_id": tenant_id
}
except Exception as e:
logger.error("Failed to get recipe count", tenant_id=tenant_id, error=str(e))
raise HTTPException(status_code=500, detail=f"Failed to get recipe count: {str(e)}")

View File

@@ -406,4 +406,36 @@ async def delete_demo_tenant_data(
raise HTTPException( raise HTTPException(
status_code=500, status_code=500,
detail=f"Failed to delete demo data: {str(e)}" detail=f"Failed to delete demo data: {str(e)}"
) )
@router.get("/internal/count")
async def get_supplier_count(
tenant_id: str,
db: AsyncSession = Depends(get_db),
_: bool = Depends(verify_internal_api_key)
):
"""
Get count of active suppliers for onboarding status check.
Internal endpoint for tenant service.
"""
try:
from sqlalchemy import select, func
from app.models.suppliers import SupplierStatus
count = await db.scalar(
select(func.count()).select_from(Supplier)
.where(
Supplier.tenant_id == UUID(tenant_id),
Supplier.status == SupplierStatus.active
)
)
return {
"count": count or 0,
"tenant_id": tenant_id
}
except Exception as e:
logger.error("Failed to get supplier count", tenant_id=tenant_id, error=str(e))
raise HTTPException(status_code=500, detail=f"Failed to get supplier count: {str(e)}")

View File

@@ -0,0 +1,133 @@
"""
Onboarding Status API
Provides lightweight onboarding status checks by aggregating counts from multiple services
"""
from fastapi import APIRouter, Depends, HTTPException
from sqlalchemy.ext.asyncio import AsyncSession
import structlog
import asyncio
import httpx
import os
from app.core.database import get_db
from app.core.config import settings
from shared.auth.decorators import get_current_tenant_id_dep
from shared.routing.route_builder import RouteBuilder
logger = structlog.get_logger()
router = APIRouter()
route_builder = RouteBuilder("tenants")
@router.get(route_builder.build_base_route("{tenant_id}/onboarding/status", include_tenant_prefix=False))
async def get_onboarding_status(
tenant_id: str,
db: AsyncSession = Depends(get_db)
):
"""
Get lightweight onboarding status by fetching counts from each service.
Returns:
- ingredients_count: Number of active ingredients
- suppliers_count: Number of active suppliers
- recipes_count: Number of active recipes
- has_minimum_setup: Boolean indicating if minimum requirements are met
- progress_percentage: Overall onboarding progress (0-100)
"""
try:
# Service URLs from environment
inventory_url = os.getenv("INVENTORY_SERVICE_URL", "http://inventory-service:8000")
suppliers_url = os.getenv("SUPPLIERS_SERVICE_URL", "http://suppliers-service:8000")
recipes_url = os.getenv("RECIPES_SERVICE_URL", "http://recipes-service:8000")
internal_api_key = settings.INTERNAL_API_KEY
# Fetch counts from all services in parallel
async with httpx.AsyncClient(timeout=10.0) as client:
results = await asyncio.gather(
client.get(
f"{inventory_url}/internal/count",
params={"tenant_id": tenant_id},
headers={"X-Internal-API-Key": internal_api_key}
),
client.get(
f"{suppliers_url}/internal/count",
params={"tenant_id": tenant_id},
headers={"X-Internal-API-Key": internal_api_key}
),
client.get(
f"{recipes_url}/internal/count",
params={"tenant_id": tenant_id},
headers={"X-Internal-API-Key": internal_api_key}
),
return_exceptions=True
)
# Extract counts with fallback to 0
ingredients_count = 0
suppliers_count = 0
recipes_count = 0
if not isinstance(results[0], Exception) and results[0].status_code == 200:
ingredients_count = results[0].json().get("count", 0)
if not isinstance(results[1], Exception) and results[1].status_code == 200:
suppliers_count = results[1].json().get("count", 0)
if not isinstance(results[2], Exception) and results[2].status_code == 200:
recipes_count = results[2].json().get("count", 0)
# Calculate minimum setup requirements
# Minimum: 3 ingredients, 1 supplier, 1 recipe
has_minimum_ingredients = ingredients_count >= 3
has_minimum_suppliers = suppliers_count >= 1
has_minimum_recipes = recipes_count >= 1
has_minimum_setup = all([
has_minimum_ingredients,
has_minimum_suppliers,
has_minimum_recipes
])
# Calculate progress percentage
# Each requirement contributes 33.33%
progress = 0
if has_minimum_ingredients:
progress += 33
if has_minimum_suppliers:
progress += 33
if has_minimum_recipes:
progress += 34
return {
"ingredients_count": ingredients_count,
"suppliers_count": suppliers_count,
"recipes_count": recipes_count,
"has_minimum_setup": has_minimum_setup,
"progress_percentage": progress,
"requirements": {
"ingredients": {
"current": ingredients_count,
"minimum": 3,
"met": has_minimum_ingredients
},
"suppliers": {
"current": suppliers_count,
"minimum": 1,
"met": has_minimum_suppliers
},
"recipes": {
"current": recipes_count,
"minimum": 1,
"met": has_minimum_recipes
}
}
}
except Exception as e:
logger.error("Failed to get onboarding status", tenant_id=tenant_id, error=str(e))
raise HTTPException(
status_code=500,
detail=f"Failed to get onboarding status: {str(e)}"
)

View File

@@ -745,10 +745,30 @@ async def get_usage_summary(
current_user: Dict[str, Any] = Depends(get_current_user_dep), current_user: Dict[str, Any] = Depends(get_current_user_dep),
limit_service: SubscriptionLimitService = Depends(get_subscription_limit_service) limit_service: SubscriptionLimitService = Depends(get_subscription_limit_service)
): ):
"""Get usage summary vs limits for a tenant""" """Get usage summary vs limits for a tenant (cached for 30s for performance)"""
try: try:
# Try to get from cache first (30s TTL)
from shared.redis_utils import get_redis_client
import json
cache_key = f"usage_summary:{tenant_id}"
redis_client = await get_redis_client()
if redis_client:
cached = await redis_client.get(cache_key)
if cached:
logger.debug("Usage summary cache hit", tenant_id=str(tenant_id))
return json.loads(cached)
# Cache miss - fetch fresh data
usage = await limit_service.get_usage_summary(str(tenant_id)) usage = await limit_service.get_usage_summary(str(tenant_id))
# Store in cache with 30s TTL
if redis_client:
await redis_client.setex(cache_key, 30, json.dumps(usage))
logger.debug("Usage summary cached", tenant_id=str(tenant_id))
return usage return usage
except Exception as e: except Exception as e:

View File

@@ -7,7 +7,7 @@ from fastapi import FastAPI
from sqlalchemy import text from sqlalchemy import text
from app.core.config import settings from app.core.config import settings
from app.core.database import database_manager from app.core.database import database_manager
from app.api import tenants, tenant_members, tenant_operations, webhooks, plans, subscription, tenant_settings, whatsapp_admin, usage_forecast, enterprise_upgrade, tenant_locations, tenant_hierarchy, internal_demo, network_alerts from app.api import tenants, tenant_members, tenant_operations, webhooks, plans, subscription, tenant_settings, whatsapp_admin, usage_forecast, enterprise_upgrade, tenant_locations, tenant_hierarchy, internal_demo, network_alerts, onboarding
from shared.service_base import StandardFastAPIService from shared.service_base import StandardFastAPIService
@@ -158,6 +158,7 @@ service.add_router(internal_demo.router, tags=["internal-demo"]) # Internal dem
service.add_router(tenant_hierarchy.router, tags=["tenant-hierarchy"]) # Tenant hierarchy endpoints service.add_router(tenant_hierarchy.router, tags=["tenant-hierarchy"]) # Tenant hierarchy endpoints
service.add_router(internal_demo.router, tags=["internal-demo"]) # Internal demo data cloning service.add_router(internal_demo.router, tags=["internal-demo"]) # Internal demo data cloning
service.add_router(network_alerts.router, tags=["network-alerts"]) # Network alerts aggregation endpoints service.add_router(network_alerts.router, tags=["network-alerts"]) # Network alerts aggregation endpoints
service.add_router(onboarding.router, tags=["onboarding"]) # Onboarding status endpoints
if __name__ == "__main__": if __name__ == "__main__":
import uvicorn import uvicorn

View File

@@ -437,18 +437,21 @@ class SubscriptionLimitService:
current_users = len(members) current_users = len(members)
current_locations = 1 # Each tenant has one primary location current_locations = 1 # Each tenant has one primary location
# Get current usage - Products & Inventory # Get current usage - Products & Inventory (parallel calls for performance)
current_products = await self._get_ingredient_count(tenant_id) import asyncio
current_recipes = await self._get_recipe_count(tenant_id) current_products, current_recipes, current_suppliers = await asyncio.gather(
current_suppliers = await self._get_supplier_count(tenant_id) self._get_ingredient_count(tenant_id),
self._get_recipe_count(tenant_id),
self._get_supplier_count(tenant_id)
)
# Get current usage - IA & Analytics (Redis-based daily quotas) # Get current usage - IA & Analytics + API & Storage (parallel Redis calls for performance)
training_jobs_usage = await self._get_training_jobs_today(tenant_id, subscription.plan) training_jobs_usage, forecasts_usage, api_calls_usage, storage_usage = await asyncio.gather(
forecasts_usage = await self._get_forecasts_today(tenant_id, subscription.plan) self._get_training_jobs_today(tenant_id, subscription.plan),
self._get_forecasts_today(tenant_id, subscription.plan),
# Get current usage - API & Storage (Redis-based) self._get_api_calls_this_hour(tenant_id, subscription.plan),
api_calls_usage = await self._get_api_calls_this_hour(tenant_id, subscription.plan) self._get_file_storage_usage_gb(tenant_id, subscription.plan)
storage_usage = await self._get_file_storage_usage_gb(tenant_id, subscription.plan) )
# Get limits from subscription # Get limits from subscription
recipes_limit = await self._get_limit_from_plan(subscription.plan, 'recipes') recipes_limit = await self._get_limit_from_plan(subscription.plan, 'recipes')