Improve the UI and training

This commit is contained in:
Urtzi Alfaro
2025-11-15 15:20:10 +01:00
parent c349b845a6
commit 843cd2bf5c
19 changed files with 2073 additions and 233 deletions

View File

@@ -0,0 +1,170 @@
import React from 'react';
interface FeatureComparison {
key: string;
name: string;
name_es?: string;
category: string;
starter: boolean;
professional: boolean;
enterprise: boolean;
tooltip?: string;
tooltip_es?: string;
}
interface CategoryInfo {
icon: string;
name: string;
name_es?: string;
}
interface PricingComparisonTableProps {
features: FeatureComparison[];
categories: Record<string, CategoryInfo>;
className?: string;
}
/**
* PricingComparisonTable - Full feature comparison across all tiers
* Expandable table showing all features side-by-side
*/
export const PricingComparisonTable: React.FC<PricingComparisonTableProps> = ({
features,
categories,
className = '',
}) => {
const currentLang = localStorage.getItem('language') || 'es';
// Group features by category
const featuresByCategory = features.reduce((acc, feature) => {
if (!acc[feature.category]) {
acc[feature.category] = [];
}
acc[feature.category].push(feature);
return acc;
}, {} as Record<string, FeatureComparison[]>);
const renderCheckmark = (hasFeature: boolean) => {
if (hasFeature) {
return (
<svg
className="w-5 h-5 text-green-500 mx-auto"
fill="currentColor"
viewBox="0 0 20 20"
>
<path
fillRule="evenodd"
d="M16.707 5.293a1 1 0 010 1.414l-8 8a1 1 0 01-1.414 0l-4-4a1 1 0 011.414-1.414L8 12.586l7.293-7.293a1 1 0 011.414 0z"
clipRule="evenodd"
/>
</svg>
);
}
return (
<svg
className="w-5 h-5 text-gray-300 mx-auto"
fill="currentColor"
viewBox="0 0 20 20"
>
<path
fillRule="evenodd"
d="M4.293 4.293a1 1 0 011.414 0L10 8.586l4.293-4.293a1 1 0 111.414 1.414L11.414 10l4.293 4.293a1 1 0 01-1.414 1.414L10 11.414l-4.293 4.293a1 1 0 01-1.414-1.414L8.586 10 4.293 5.707a1 1 0 010-1.414z"
clipRule="evenodd"
/>
</svg>
);
};
return (
<div className={`overflow-x-auto ${className}`}>
<table className="w-full border-collapse bg-white rounded-lg shadow-sm">
<thead>
<tr className="bg-gray-50 border-b border-gray-200">
<th className="px-6 py-4 text-left text-sm font-semibold text-gray-900 sticky left-0 bg-gray-50">
{currentLang === 'es' ? 'Funcionalidad' : 'Feature'}
</th>
<th className="px-6 py-4 text-center text-sm font-semibold text-gray-900">
Starter
</th>
<th className="px-6 py-4 text-center text-sm font-semibold text-gray-900 bg-orange-50">
Professional
<span className="ml-2 text-xs bg-orange-100 text-orange-700 px-2 py-0.5 rounded-full">
{currentLang === 'es' ? 'Más Popular' : 'Most Popular'}
</span>
</th>
<th className="px-6 py-4 text-center text-sm font-semibold text-gray-900">
Enterprise
</th>
</tr>
</thead>
<tbody>
{Object.entries(featuresByCategory).map(([categoryKey, categoryFeatures]) => {
const category = categories[categoryKey];
if (!category) return null;
const categoryName = currentLang === 'es' && category.name_es
? category.name_es
: category.name;
return (
<React.Fragment key={categoryKey}>
{/* Category Header */}
<tr className="bg-gray-100 border-t border-gray-200">
<td
colSpan={4}
className="px-6 py-3 text-sm font-semibold text-gray-700 sticky left-0 bg-gray-100"
>
<div className="flex items-center gap-2">
<span className="text-lg" role="img" aria-label={categoryName}>
{category.icon}
</span>
<span>{categoryName}</span>
</div>
</td>
</tr>
{/* Category Features */}
{categoryFeatures.map((feature, index) => {
const featureName = currentLang === 'es' && feature.name_es
? feature.name_es
: feature.name;
const tooltip = currentLang === 'es' && feature.tooltip_es
? feature.tooltip_es
: feature.tooltip;
return (
<tr
key={feature.key}
className={`border-b border-gray-100 hover:bg-gray-50 ${
index % 2 === 0 ? 'bg-white' : 'bg-gray-50'
}`}
title={tooltip}
>
<td className="px-6 py-3 text-sm text-gray-700 sticky left-0 bg-inherit">
{featureName}
{tooltip && (
<span className="ml-1 text-gray-400 text-xs"></span>
)}
</td>
<td className="px-6 py-3 text-center">
{renderCheckmark(feature.starter)}
</td>
<td className="px-6 py-3 text-center bg-orange-50/30">
{renderCheckmark(feature.professional)}
</td>
<td className="px-6 py-3 text-center">
{renderCheckmark(feature.enterprise)}
</td>
</tr>
);
})}
</React.Fragment>
);
})}
</tbody>
</table>
</div>
);
};
export default PricingComparisonTable;

View File

@@ -0,0 +1,121 @@
import React, { useState } from 'react';
import { useTranslation } from 'react-i18next';
interface Feature {
key: string;
translation_key: string;
tooltip_key?: string;
category: string;
}
interface Category {
icon: string;
translation_key: string;
}
interface PricingFeatureCategoryProps {
categoryKey: string;
category: Category;
features: Feature[];
className?: string;
}
/**
* PricingFeatureCategory - Displays a collapsible category of features
* Groups related features with icons for better organization
*/
export const PricingFeatureCategory: React.FC<PricingFeatureCategoryProps> = ({
categoryKey,
category,
features,
className = '',
}) => {
const [isExpanded, setIsExpanded] = useState(false);
const { t } = useTranslation('subscription');
if (features.length === 0) {
return null;
}
const categoryName = t(category.translation_key);
return (
<div className={`border-b border-gray-200 last:border-b-0 ${className}`}>
<button
onClick={() => setIsExpanded(!isExpanded)}
className="w-full flex items-center justify-between py-3 px-4 hover:bg-gray-50 transition-colors"
aria-expanded={isExpanded}
aria-controls={`category-${categoryKey}`}
>
<div className="flex items-center gap-2">
<span className="text-xl" role="img" aria-label={categoryName}>
{category.icon}
</span>
<span className="font-semibold text-gray-800 text-sm">
{categoryName}
</span>
<span className="text-xs text-gray-500 bg-gray-100 px-2 py-0.5 rounded-full">
{features.length}
</span>
</div>
<svg
className={`w-5 h-5 text-gray-400 transition-transform ${
isExpanded ? 'rotate-180' : ''
}`}
fill="none"
stroke="currentColor"
viewBox="0 0 24 24"
>
<path
strokeLinecap="round"
strokeLinejoin="round"
strokeWidth={2}
d="M19 9l-7 7-7-7"
/>
</svg>
</button>
{isExpanded && (
<div
id={`category-${categoryKey}`}
className="px-4 pb-3 space-y-2"
>
{features.map((feature) => {
const featureName = t(feature.translation_key);
const tooltip = feature.tooltip_key ? t(feature.tooltip_key) : undefined;
return (
<div
key={feature.key}
className="flex items-start gap-2 text-sm group"
title={tooltip}
>
<svg
className="w-4 h-4 text-green-500 flex-shrink-0 mt-0.5"
fill="currentColor"
viewBox="0 0 20 20"
>
<path
fillRule="evenodd"
d="M16.707 5.293a1 1 0 010 1.414l-8 8a1 1 0 01-1.414 0l-4-4a1 1 0 011.414-1.414L8 12.586l7.293-7.293a1 1 0 011.414 0z"
clipRule="evenodd"
/>
</svg>
<span className="text-gray-700 flex-1">
{featureName}
{tooltip && (
<span className="ml-1 text-gray-400 opacity-0 group-hover:opacity-100 transition-opacity">
</span>
)}
</span>
</div>
);
})}
</div>
)}
</div>
);
};
export default PricingFeatureCategory;

View File

@@ -21,7 +21,11 @@ export const PricingSection: React.FC = () => {
</div>
{/* Pricing Cards */}
<SubscriptionPricingCards mode="landing" />
<SubscriptionPricingCards
mode="landing"
showPilotBanner={true}
pilotTrialMonths={3}
/>
{/* Feature Comparison Link */}
<div className="text-center mt-12">

View File

@@ -1,7 +1,7 @@
import React, { useState, useEffect } from 'react';
import { useTranslation } from 'react-i18next';
import { Link } from 'react-router-dom';
import { Check, Star, ArrowRight, Package, TrendingUp, Settings, Loader, Users, MapPin, CheckCircle, Zap } from 'lucide-react';
import { Check, Star, ArrowRight, Package, TrendingUp, Settings, Loader, Users, MapPin, CheckCircle, Zap, ChevronDown, ChevronUp } from 'lucide-react';
import { Button, Card, Badge } from '../ui';
import {
subscriptionService,
@@ -10,6 +10,8 @@ import {
SUBSCRIPTION_TIERS
} from '../../api';
import { getRegisterUrl } from '../../utils/navigation';
import { ValuePropositionBadge } from './ValuePropositionBadge';
import { PricingFeatureCategory } from './PricingFeatureCategory';
type BillingCycle = 'monthly' | 'yearly';
type DisplayMode = 'landing' | 'selection';
@@ -33,11 +35,12 @@ export const SubscriptionPricingCards: React.FC<SubscriptionPricingCardsProps> =
pilotTrialMonths = 3,
className = ''
}) => {
const { t } = useTranslation();
const { t } = useTranslation('subscription');
const [plans, setPlans] = useState<Record<SubscriptionTier, PlanMetadata> | null>(null);
const [billingCycle, setBillingCycle] = useState<BillingCycle>('monthly');
const [loading, setLoading] = useState(true);
const [error, setError] = useState<string | null>(null);
const [expandedPlan, setExpandedPlan] = useState<string | null>(null);
useEffect(() => {
loadPlans();
@@ -243,7 +246,7 @@ export const SubscriptionPricingCards: React.FC<SubscriptionPricingCardsProps> =
${isSelected
? 'border-2 border-[var(--color-primary)] bg-gradient-to-br from-[var(--color-primary)]/10 via-[var(--color-primary)]/5 to-transparent shadow-2xl ring-4 ring-[var(--color-primary)]/30 scale-[1.02]'
: isPopular
? 'bg-gradient-to-br from-[var(--color-primary)] via-[var(--color-primary)] to-[var(--color-primary-dark)] shadow-2xl transform scale-105 z-10'
? 'bg-gradient-to-br from-blue-700 via-blue-800 to-blue-900 shadow-2xl transform scale-105 z-10 ring-4 ring-[var(--color-primary)]/20'
: 'bg-[var(--bg-secondary)] border-2 border-[var(--border-primary)] hover:border-[var(--color-primary)]/30 hover:shadow-xl hover:-translate-y-1'
}
`}
@@ -276,18 +279,18 @@ export const SubscriptionPricingCards: React.FC<SubscriptionPricingCardsProps> =
<h3 className={`text-2xl font-bold ${isPopular ? 'text-white' : isSelected ? 'text-[var(--color-primary)]' : 'text-[var(--text-primary)]'}`}>
{plan.name}
</h3>
<p className={`mt-3 leading-relaxed ${isPopular ? 'text-white/90' : 'text-[var(--text-secondary)]'}`}>
{plan.tagline}
<p className={`mt-3 text-sm leading-relaxed ${isPopular ? 'text-white' : 'text-[var(--text-secondary)]'}`}>
{plan.tagline_key ? t(plan.tagline_key) : plan.tagline || ''}
</p>
</div>
{/* Pricing */}
<div className="mb-8">
<div className="mb-6">
<div className="flex items-baseline">
<span className={`text-5xl font-bold ${isPopular ? 'text-white' : isSelected ? 'text-[var(--color-primary)]' : 'text-[var(--text-primary)]'}`}>
{subscriptionService.formatPrice(price)}
</span>
<span className={`ml-2 text-lg ${isPopular ? 'text-white/80' : 'text-[var(--text-secondary)]'}`}>
<span className={`ml-2 text-lg ${isPopular ? 'text-white/95' : 'text-[var(--text-secondary)]'}`}>
/{billingCycle === 'monthly' ? 'mes' : 'año'}
</span>
</div>
@@ -302,50 +305,99 @@ export const SubscriptionPricingCards: React.FC<SubscriptionPricingCardsProps> =
)}
{/* Trial Badge */}
{!savings && (
{!savings && showPilotBanner && (
<div className={`mt-2 px-3 py-1 text-sm font-medium rounded-full inline-block ${
isPopular ? 'bg-white/20 text-white' : 'bg-[var(--color-success)]/10 text-[var(--color-success)]'
}`}>
3 meses gratis
{pilotTrialMonths} meses gratis
</div>
)}
{!savings && !showPilotBanner && (
<div className={`mt-2 px-3 py-1 text-sm font-medium rounded-full inline-block ${
isPopular ? 'bg-white/20 text-white' : 'bg-[var(--color-success)]/10 text-[var(--color-success)]'
}`}>
{plan.trial_days} días gratis
</div>
)}
</div>
{/* ROI Badge */}
{plan.roi_badge && !isPopular && (
<div className="mb-4">
<ValuePropositionBadge roiBadge={plan.roi_badge} />
</div>
)}
{plan.roi_badge && isPopular && (
<div className="mb-4 bg-white/20 border border-white/30 rounded-lg px-4 py-3">
<p className="text-sm font-semibold text-white leading-tight flex items-center gap-2">
<svg className="w-5 h-5 text-white" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M12 8c-1.657 0-3 .895-3 2s1.343 2 3 2 3 .895 3 2-1.343 2-3 2m0-8c1.11 0 2.08.402 2.599 1M12 8V7m0 1v8m0 0v1m0-1c-1.11 0-2.08-.402-2.599-1M21 12a9 9 0 11-18 0 9 9 0 0118 0z" />
</svg>
{plan.roi_badge.translation_key ? t(plan.roi_badge.translation_key) : (plan.roi_badge.text_es || plan.roi_badge.text || '')}
</p>
</div>
)}
{/* Good For / Recommended For */}
{plan.recommended_for_key && (
<div className={`mb-6 text-center px-4 py-2 rounded-lg ${
isPopular
? 'bg-white/10 border border-white/20'
: 'bg-[var(--bg-secondary)] border border-[var(--border-primary)]'
}`}>
<p className={`text-xs font-medium ${isPopular ? 'text-white/95' : 'text-[var(--text-secondary)]'}`}>
{t(plan.recommended_for_key)}
</p>
</div>
)}
{/* Key Limits */}
<div className={`mb-6 p-4 rounded-lg ${
isPopular ? 'bg-white/10' : isSelected ? 'bg-[var(--color-primary)]/5' : 'bg-[var(--bg-primary)]'
<div className={`mb-6 p-3 rounded-lg ${
isPopular ? 'bg-white/15 border border-white/20' : isSelected ? 'bg-[var(--color-primary)]/5' : 'bg-[var(--bg-primary)]'
}`}>
<div className="grid grid-cols-2 gap-3 text-sm">
<div>
<span className={isPopular ? 'text-white/80' : 'text-[var(--text-secondary)]'}>Usuarios:</span>
<span className={`font-semibold ml-2 ${isPopular ? 'text-white' : isSelected ? 'text-[var(--color-primary)]' : 'text-[var(--text-primary)]'}`}>
{plan.limits.users || 'Ilimitado'}
<div className="space-y-2">
<div className="flex items-center justify-between text-xs">
<span className={isPopular ? 'text-white/95' : 'text-[var(--text-secondary)]'}>
<Users className="w-3 h-3 inline mr-1" />
Usuarios
</span>
<span className={`font-bold ${isPopular ? 'text-white' : isSelected ? 'text-[var(--color-primary)]' : 'text-[var(--text-primary)]'}`}>
{plan.limits.users || '∞'}
</span>
</div>
<div>
<span className={isPopular ? 'text-white/80' : 'text-[var(--text-secondary)]'}>Ubicaciones:</span>
<span className={`font-semibold ml-2 ${isPopular ? 'text-white' : isSelected ? 'text-[var(--color-primary)]' : 'text-[var(--text-primary)]'}`}>
{plan.limits.locations || 'Ilimitado'}
<div className="flex items-center justify-between text-xs">
<span className={isPopular ? 'text-white/95' : 'text-[var(--text-secondary)]'}>
<MapPin className="w-3 h-3 inline mr-1" />
Ubicaciones
</span>
<span className={`font-bold ${isPopular ? 'text-white' : isSelected ? 'text-[var(--color-primary)]' : 'text-[var(--text-primary)]'}`}>
{plan.limits.locations || '∞'}
</span>
</div>
<div>
<span className={isPopular ? 'text-white/80' : 'text-[var(--text-secondary)]'}>Productos:</span>
<span className={`font-semibold ml-2 ${isPopular ? 'text-white' : isSelected ? 'text-[var(--color-primary)]' : 'text-[var(--text-primary)]'}`}>
{plan.limits.products || 'Ilimitado'}
<div className="flex items-center justify-between text-xs">
<span className={isPopular ? 'text-white/95' : 'text-[var(--text-secondary)]'}>
<Package className="w-3 h-3 inline mr-1" />
Productos
</span>
<span className={`font-bold ${isPopular ? 'text-white' : isSelected ? 'text-[var(--color-primary)]' : 'text-[var(--text-primary)]'}`}>
{plan.limits.products || '∞'}
</span>
</div>
<div>
<span className={isPopular ? 'text-white/80' : 'text-[var(--text-secondary)]'}>Pronósticos/día:</span>
<span className={`font-semibold ml-2 ${isPopular ? 'text-white' : isSelected ? 'text-[var(--color-primary)]' : 'text-[var(--text-primary)]'}`}>
{plan.limits.forecasts_per_day || 'Ilimitado'}
<div className="flex items-center justify-between text-xs">
<span className={isPopular ? 'text-white/95' : 'text-[var(--text-secondary)]'}>
<TrendingUp className="w-3 h-3 inline mr-1" />
Pronóstico
</span>
<span className={`font-bold ${isPopular ? 'text-white' : isSelected ? 'text-[var(--color-primary)]' : 'text-[var(--text-primary)]'}`}>
{plan.limits.forecast_horizon_days ? `${plan.limits.forecast_horizon_days}d` : '∞'}
</span>
</div>
</div>
</div>
{/* Features List */}
<div className={`space-y-3 mb-8 max-h-80 overflow-y-auto pr-2 scrollbar-thin`}>
{plan.features.slice(0, 8).map((feature) => (
{/* Hero Features List */}
<div className={`space-y-3 mb-6`}>
{(plan.hero_features || plan.features.slice(0, 4)).map((feature) => (
<div key={feature} className="flex items-start">
<div className="flex-shrink-0 mt-1">
<div className={`w-5 h-5 rounded-full flex items-center justify-center ${
@@ -361,18 +413,63 @@ export const SubscriptionPricingCards: React.FC<SubscriptionPricingCardsProps> =
</span>
</div>
))}
{plan.features.length > 8 && (
<p className={`text-sm italic ${isPopular ? 'text-white/70' : 'text-[var(--text-secondary)]'}`}>
Y {plan.features.length - 8} características más...
</p>
)}
</div>
{/* Expandable Features - Show All Button */}
{plan.features.length > 4 && (
<div className="mb-8">
<button
onClick={(e) => {
e.preventDefault();
e.stopPropagation();
setExpandedPlan(expandedPlan === tier ? null : tier);
}}
className={`w-full py-2 px-4 rounded-lg text-sm font-medium transition-all flex items-center justify-center gap-2 ${
isPopular
? 'bg-white/10 hover:bg-white/20 text-white border border-white/20'
: 'bg-[var(--bg-secondary)] hover:bg-[var(--bg-primary)] text-[var(--text-secondary)] border border-[var(--border-primary)]'
}`}
>
{expandedPlan === tier ? (
<>
<ChevronUp className="w-4 h-4" />
Mostrar menos características
</>
) : (
<>
<ChevronDown className="w-4 h-4" />
Ver todas las {plan.features.length} características
</>
)}
</button>
{/* Expanded Features List */}
{expandedPlan === tier && (
<div className={`mt-4 p-4 rounded-lg max-h-96 overflow-y-auto ${
isPopular
? 'bg-white/10 border border-white/20'
: 'bg-[var(--bg-primary)] border border-[var(--border-primary)]'
}`}>
<div className="space-y-2">
{plan.features.map((feature) => (
<div key={feature} className="flex items-start py-1">
<Check className={`w-4 h-4 flex-shrink-0 mt-0.5 ${isPopular ? 'text-white' : 'text-[var(--color-success)]'}`} />
<span className={`ml-2 text-xs ${isPopular ? 'text-white/95' : 'text-[var(--text-primary)]'}`}>
{formatFeatureName(feature)}
</span>
</div>
))}
</div>
</div>
)}
</div>
)}
{/* Support */}
<div className={`mb-6 text-sm text-center border-t pt-4 ${
isPopular ? 'text-white/80 border-white/20' : 'text-[var(--text-secondary)] border-[var(--border-primary)]'
isPopular ? 'text-white/95 border-white/30' : 'text-[var(--text-secondary)] border-[var(--border-primary)]'
}`}>
{plan.support}
{plan.support_key ? t(plan.support_key) : plan.support || ''}
</div>
{/* CTA Button */}
@@ -418,7 +515,7 @@ export const SubscriptionPricingCards: React.FC<SubscriptionPricingCardsProps> =
</Button>
)}
<p className={`text-xs text-center mt-3 ${isPopular ? 'text-white/70' : 'text-[var(--text-secondary)]'}`}>
<p className={`text-xs text-center mt-3 ${isPopular ? 'text-white/90' : 'text-[var(--text-secondary)]'}`}>
3 meses gratis Tarjeta requerida para validación
</p>
</CardWrapper>

View File

@@ -0,0 +1,67 @@
import React from 'react';
import { useTranslation } from 'react-i18next';
interface ROIBadge {
savings_min?: number;
savings_max?: number;
currency?: string;
period?: string;
translation_key: string;
custom?: boolean;
}
interface ValuePropositionBadgeProps {
roiBadge: ROIBadge;
className?: string;
}
/**
* ValuePropositionBadge - Displays ROI and business value metrics for pricing tiers
* Shows savings potential to help bakery owners understand the value
*/
export const ValuePropositionBadge: React.FC<ValuePropositionBadgeProps> = ({
roiBadge,
className = '',
}) => {
const { t } = useTranslation('subscription');
const displayText = t(roiBadge.translation_key);
return (
<div
className={`
bg-gradient-to-r from-green-50 to-emerald-50
border border-green-200
rounded-lg
px-4 py-3
flex items-center gap-2
${className}
`}
role="status"
aria-label="ROI information"
>
<div className="flex-shrink-0">
<svg
className="w-5 h-5 text-green-600"
fill="none"
stroke="currentColor"
viewBox="0 0 24 24"
xmlns="http://www.w3.org/2000/svg"
>
<path
strokeLinecap="round"
strokeLinejoin="round"
strokeWidth={2}
d="M12 8c-1.657 0-3 .895-3 2s1.343 2 3 2 3 .895 3 2-1.343 2-3 2m0-8c1.11 0 2.08.402 2.599 1M12 8V7m0 1v8m0 0v1m0-1c-1.11 0-2.08-.402-2.599-1M21 12a9 9 0 11-18 0 9 9 0 0118 0z"
/>
</svg>
</div>
<div className="flex-1">
<p className="text-sm font-semibold text-green-800 leading-tight">
{displayText}
</p>
</div>
</div>
);
};
export default ValuePropositionBadge;

View File

@@ -0,0 +1,74 @@
{
"categories": {
"daily_operations": "Daily Operations",
"smart_forecasting": "Smart Forecasting",
"smart_ordering": "Smart Ordering",
"business_insights": "Business Insights",
"multi_location": "Multi-Location",
"integrations": "Integrations",
"support": "Support & Training"
},
"features": {
"inventory_management": "Track all your inventory in real-time",
"inventory_management_tooltip": "See stock levels, expiry dates, and get low-stock alerts",
"sales_tracking": "Record every sale automatically",
"sales_tracking_tooltip": "Connect your POS or manually track sales",
"basic_recipes": "Manage recipes & ingredients",
"basic_recipes_tooltip": "Track ingredient costs and recipe profitability",
"production_planning": "Plan daily production batches",
"production_planning_tooltip": "Know exactly what to bake each day",
"basic_forecasting": "AI predicts your daily demand (7 days)",
"basic_forecasting_tooltip": "AI learns your sales patterns to reduce waste",
"demand_prediction": "Know what to bake before you run out",
"seasonal_patterns": "AI detects seasonal trends",
"seasonal_patterns_tooltip": "Understand Christmas, summer, and holiday patterns",
"weather_data_integration": "Weather-based demand predictions",
"weather_data_integration_tooltip": "Rainy days = more pastries, sunny days = less bread",
"traffic_data_integration": "Traffic & event impact analysis",
"traffic_data_integration_tooltip": "Predict demand during local events and high traffic",
"supplier_management": "Never run out of ingredients",
"supplier_management_tooltip": "Automatic reorder alerts based on usage",
"waste_tracking": "Track & reduce waste",
"waste_tracking_tooltip": "See what's expiring and why products go unsold",
"expiry_alerts": "Expiry date alerts",
"expiry_alerts_tooltip": "Get notified before ingredients expire",
"basic_reporting": "Sales & inventory reports",
"advanced_analytics": "Advanced profit & trend analysis",
"advanced_analytics_tooltip": "Understand which products make you the most money",
"profitability_analysis": "See profit margins by product",
"multi_location_support": "Manage up to 3 bakery locations",
"inventory_transfer": "Transfer products between locations",
"location_comparison": "Compare performance across bakeries",
"pos_integration": "Connect your POS system",
"pos_integration_tooltip": "Automatic sales import from your cash register",
"accounting_export": "Export to accounting software",
"full_api_access": "Full API access for custom integrations",
"email_support": "Email support (48h response)",
"phone_support": "Phone support (24h response)",
"dedicated_account_manager": "Dedicated account manager",
"support_24_7": "24/7 priority support"
},
"plans": {
"starter": {
"description": "Perfect for small bakeries getting started",
"tagline": "Start reducing waste and selling more",
"roi_badge": "Bakeries save €300-500/month on waste",
"support": "Email support (48h response)",
"recommended_for": "Single bakery, up to 50 products, 5 team members"
},
"professional": {
"description": "For growing bakeries with multiple locations",
"tagline": "Grow smart with advanced AI",
"roi_badge": "Bakeries save €800-1,200/month on waste & ordering",
"support": "Priority email + phone support (24h response)",
"recommended_for": "Growing bakeries, 2-3 locations, 100-500 products"
},
"enterprise": {
"description": "For large bakery chains and franchises",
"tagline": "No limits, maximum control",
"roi_badge": "Contact us for custom ROI analysis",
"support": "24/7 dedicated support + account manager",
"recommended_for": "Bakery chains, franchises, unlimited scale"
}
}
}

View File

@@ -0,0 +1,74 @@
{
"categories": {
"daily_operations": "Operaciones Diarias",
"smart_forecasting": "Predicción Inteligente",
"smart_ordering": "Pedidos Inteligentes",
"business_insights": "Análisis de Negocio",
"multi_location": "Multi-Ubicación",
"integrations": "Integraciones",
"support": "Soporte y Formación"
},
"features": {
"inventory_management": "Controla todo tu inventario en tiempo real",
"inventory_management_tooltip": "Ve niveles de stock, fechas de caducidad y alertas de bajo stock",
"sales_tracking": "Registra cada venta automáticamente",
"sales_tracking_tooltip": "Conecta tu TPV o registra ventas manualmente",
"basic_recipes": "Gestiona recetas e ingredientes",
"basic_recipes_tooltip": "Controla costes de ingredientes y rentabilidad de recetas",
"production_planning": "Planifica producción diaria",
"production_planning_tooltip": "Sabe exactamente qué hornear cada día",
"basic_forecasting": "IA predice tu demanda diaria (7 días)",
"basic_forecasting_tooltip": "IA aprende tus patrones de venta para reducir desperdicio",
"demand_prediction": "Sabe qué hornear antes de quedarte sin stock",
"seasonal_patterns": "IA detecta tendencias estacionales",
"seasonal_patterns_tooltip": "Entiende patrones de Navidad, verano y festivos",
"weather_data_integration": "Predicciones basadas en el clima",
"weather_data_integration_tooltip": "Días lluviosos = más bollería, días soleados = menos pan",
"traffic_data_integration": "Análisis de tráfico y eventos",
"traffic_data_integration_tooltip": "Predice demanda durante eventos locales y alto tráfico",
"supplier_management": "Nunca te quedes sin ingredientes",
"supplier_management_tooltip": "Alertas automáticas de reorden según uso",
"waste_tracking": "Controla y reduce desperdicios",
"waste_tracking_tooltip": "Ve qué caduca y por qué productos no se venden",
"expiry_alerts": "Alertas de caducidad",
"expiry_alerts_tooltip": "Recibe avisos antes de que caduquen ingredientes",
"basic_reporting": "Informes de ventas e inventario",
"advanced_analytics": "Análisis avanzado de beneficios y tendencias",
"advanced_analytics_tooltip": "Entiende qué productos te dan más beneficios",
"profitability_analysis": "Ve márgenes de beneficio por producto",
"multi_location_support": "Gestiona hasta 3 panaderías",
"inventory_transfer": "Transfiere productos entre ubicaciones",
"location_comparison": "Compara rendimiento entre panaderías",
"pos_integration": "Conecta tu sistema TPV",
"pos_integration_tooltip": "Importación automática de ventas desde tu caja",
"accounting_export": "Exporta a software de contabilidad",
"full_api_access": "API completa para integraciones personalizadas",
"email_support": "Soporte por email (48h)",
"phone_support": "Soporte telefónico (24h)",
"dedicated_account_manager": "Gestor de cuenta dedicado",
"support_24_7": "Soporte prioritario 24/7"
},
"plans": {
"starter": {
"description": "Perfecto para panaderías pequeñas comenzando",
"tagline": "Empieza a reducir desperdicios y vender más",
"roi_badge": "Panaderías ahorran €300-500/mes en desperdicios",
"support": "Soporte por email (48h)",
"recommended_for": "Una panadería, hasta 50 productos, 5 miembros del equipo"
},
"professional": {
"description": "Para panaderías en crecimiento con múltiples ubicaciones",
"tagline": "Crece inteligentemente con IA avanzada",
"roi_badge": "Panaderías ahorran €800-1,200/mes en desperdicios y pedidos",
"support": "Soporte prioritario por email + teléfono (24h)",
"recommended_for": "Panaderías en crecimiento, 2-3 ubicaciones, 100-500 productos"
},
"enterprise": {
"description": "Para cadenas de panaderías y franquicias",
"tagline": "Sin límites, máximo control",
"roi_badge": "Contacta para análisis ROI personalizado",
"support": "Soporte dedicado 24/7 + gestor de cuenta",
"recommended_for": "Cadenas de panaderías, franquicias, escala ilimitada"
}
}
}

View File

@@ -0,0 +1,74 @@
{
"categories": {
"daily_operations": "Eguneroko Eragiketak",
"smart_forecasting": "Iragarpen Adimentsua",
"smart_ordering": "Eskaera Adimentsua",
"business_insights": "Negozioaren Analisia",
"multi_location": "Hainbat Kokapen",
"integrations": "Integrazioak",
"support": "Laguntza eta Prestakuntza"
},
"features": {
"inventory_management": "Kontrolatu zure inbentario guztia denbora errealean",
"inventory_management_tooltip": "Ikusi stock mailak, iraungitze datak eta stock baxuko alertak",
"sales_tracking": "Erregistratu salmenta guztiak automatikoki",
"sales_tracking_tooltip": "Konektatu zure TPV edo erregistratu salmentak eskuz",
"basic_recipes": "Kudeatu errezetak eta osagaiak",
"basic_recipes_tooltip": "Kontrolatu osagaien kostuak eta errezeten errentagarritasuna",
"production_planning": "Planifikatu eguneko ekoizpena",
"production_planning_tooltip": "Jakin zehazki zer labean egun bakoitzean",
"basic_forecasting": "AIk zure eguneroko eskaria aurreikusten du (7 egun)",
"basic_forecasting_tooltip": "AIk zure salmenten ereduak ikasten ditu hondakina murrizteko",
"demand_prediction": "Jakin zer labean stock gabe gelditu aurretik",
"seasonal_patterns": "AIk sasoiko joerak detektatzen ditu",
"seasonal_patterns_tooltip": "Ulertu Eguberriko, udako eta jaieguneko ereduak",
"weather_data_integration": "Eguraldian oinarritutako eskaeraren iragarpenak",
"weather_data_integration_tooltip": "Egun euritsua = gozoki gehiago, egun eguratsua = ogi gutxiago",
"traffic_data_integration": "Trafikoaren eta ekitaldien inpaktuaren analisia",
"traffic_data_integration_tooltip": "Iragarri eskaria tokiko ekitaldien eta trafikoko gehiengo denboran",
"supplier_management": "Ez gelditu inoiz osagairik gabe",
"supplier_management_tooltip": "Erabileraren arabera berrizatzeko alertak automatikoak",
"waste_tracking": "Kontrolatu eta murriztu hondakinak",
"waste_tracking_tooltip": "Ikusi zer iraungitzen den eta zergatik ez diren produktuak saltzen",
"expiry_alerts": "Iraungitze dataren alertak",
"expiry_alerts_tooltip": "Jaso jakinarazpenak osagaiak iraungi aurretik",
"basic_reporting": "Salmenten eta inbentarioaren txostenak",
"advanced_analytics": "Irabazien eta joeren analisi aurreratua",
"advanced_analytics_tooltip": "Ulertu zein produktuk ematen dizkizuten irabazi gehien",
"profitability_analysis": "Ikusi produktuko irabazi-marjinak",
"multi_location_support": "Kudeatu 3 ogi-denda arte",
"inventory_transfer": "Transferitu produktuak kokapenen artean",
"location_comparison": "Konparatu errendimendua ogi-denda artean",
"pos_integration": "Konektatu zure TPV sistema",
"pos_integration_tooltip": "Salmenten inportazio automatikoa zure kutxatik",
"accounting_export": "Esportatu kontabilitate softwarera",
"full_api_access": "API osoa integraz personaletarako",
"email_support": "Posta elektronikoko laguntza (48h)",
"phone_support": "Telefono laguntza (24h)",
"dedicated_account_manager": "Kontu kudeatzaile dedikatua",
"support_24_7": "24/7 lehentasunezko laguntza"
},
"plans": {
"starter": {
"description": "Egokia hasten diren ogi-denda txikientzat",
"tagline": "Hasi hondakinak murrizten eta gehiago saltzen",
"roi_badge": "Ogi-dendek €300-500/hilean aurrezten dituzte hondakinetan",
"support": "Posta elektronikoko laguntza (48h)",
"recommended_for": "Ogi-denda bat, 50 produktu arte, 5 taldekide"
},
"professional": {
"description": "Hazteko ogi-dendak hainbat kokapenekin",
"tagline": "Hazi adimentsua AI aurreratuarekin",
"roi_badge": "Ogi-dendek €800-1,200/hilean aurrezten dituzte hondakinak eta eskaerak",
"support": "Lehentasunezko posta + telefono laguntza (24h)",
"recommended_for": "Hazteko ogi-dendak, 2-3 kokapenekin, 100-500 produktu"
},
"enterprise": {
"description": "Ogi-denda kateak eta frantzizietarako",
"tagline": "Mugarik gabe, kontrol maximoa",
"roi_badge": "Jarri gurekin harremanetan ROI analisi pertsonalizaturako",
"support": "24/7 laguntza dedikatua + kontu kudeatzailea",
"recommended_for": "Ogi-denda kateak, frantziziak, eskala mugagabea"
}
}
}

View File

@@ -15,6 +15,7 @@ import settingsEs from './es/settings.json';
import ajustesEs from './es/ajustes.json';
import reasoningEs from './es/reasoning.json';
import wizardsEs from './es/wizards.json';
import subscriptionEs from './es/subscription.json';
// English translations
import commonEn from './en/common.json';
@@ -33,6 +34,7 @@ import settingsEn from './en/settings.json';
import ajustesEn from './en/ajustes.json';
import reasoningEn from './en/reasoning.json';
import wizardsEn from './en/wizards.json';
import subscriptionEn from './en/subscription.json';
// Basque translations
import commonEu from './eu/common.json';
@@ -51,6 +53,7 @@ import settingsEu from './eu/settings.json';
import ajustesEu from './eu/ajustes.json';
import reasoningEu from './eu/reasoning.json';
import wizardsEu from './eu/wizards.json';
import subscriptionEu from './eu/subscription.json';
// Translation resources by language
export const resources = {
@@ -71,6 +74,7 @@ export const resources = {
ajustes: ajustesEs,
reasoning: reasoningEs,
wizards: wizardsEs,
subscription: subscriptionEs,
},
en: {
common: commonEn,
@@ -89,6 +93,7 @@ export const resources = {
ajustes: ajustesEn,
reasoning: reasoningEn,
wizards: wizardsEn,
subscription: subscriptionEn,
},
eu: {
common: commonEu,
@@ -107,6 +112,7 @@ export const resources = {
ajustes: ajustesEu,
reasoning: reasoningEu,
wizards: wizardsEu,
subscription: subscriptionEu,
},
};
@@ -143,7 +149,7 @@ export const languageConfig = {
};
// Namespaces available in translations
export const namespaces = ['common', 'auth', 'inventory', 'foodSafety', 'suppliers', 'orders', 'recipes', 'errors', 'dashboard', 'production', 'equipment', 'landing', 'settings', 'ajustes', 'reasoning', 'wizards'] as const;
export const namespaces = ['common', 'auth', 'inventory', 'foodSafety', 'suppliers', 'orders', 'recipes', 'errors', 'dashboard', 'production', 'equipment', 'landing', 'settings', 'ajustes', 'reasoning', 'wizards', 'subscription'] as const;
export type Namespace = typeof namespaces[number];
// Helper function to get language display name

View File

@@ -4,7 +4,8 @@ Forecasting Operations API - Business operations for forecast generation and pre
"""
import structlog
from fastapi import APIRouter, Depends, HTTPException, status, Query, Path, Request
from fastapi import APIRouter, Depends, HTTPException, status, Query, Path, Request, BackgroundTasks
from fastapi.responses import JSONResponse
from typing import List, Dict, Any, Optional
from datetime import date, datetime, timezone
import uuid
@@ -202,6 +203,97 @@ async def generate_multi_day_forecast(
)
async def execute_batch_forecast_background(
tenant_id: str,
batch_id: str,
inventory_product_ids: List[str],
forecast_days: int,
batch_name: str
):
"""
Background task for batch forecast generation.
Prevents blocking the API thread for long-running batch operations.
"""
logger.info("Starting background batch forecast",
batch_id=batch_id,
tenant_id=tenant_id,
product_count=len(inventory_product_ids))
database_manager = create_database_manager(settings.DATABASE_URL, "forecasting-service")
forecasting_service = EnhancedForecastingService(database_manager)
try:
# Update batch status to running
async with database_manager.get_session() as session:
from app.repositories import PredictionBatchRepository
batch_repo = PredictionBatchRepository(session)
await batch_repo.update(
batch_id,
{"status": "processing", "completed_products": 0}
)
await session.commit()
# Generate forecasts for all products
from app.schemas.forecasts import BatchForecastRequest
batch_request = BatchForecastRequest(
tenant_id=tenant_id,
batch_name=batch_name,
inventory_product_ids=inventory_product_ids,
forecast_days=forecast_days
)
result = await forecasting_service.generate_batch_forecasts(
tenant_id=tenant_id,
request=batch_request
)
# Update batch status to completed
async with database_manager.get_session() as session:
from app.repositories import PredictionBatchRepository
batch_repo = PredictionBatchRepository(session)
await batch_repo.update(
batch_id,
{
"status": "completed",
"completed_at": datetime.now(timezone.utc),
"completed_products": result.get("successful_forecasts", 0),
"failed_products": result.get("failed_forecasts", 0)
}
)
await session.commit()
logger.info("Background batch forecast completed",
batch_id=batch_id,
successful=result.get("successful_forecasts", 0),
failed=result.get("failed_forecasts", 0))
except Exception as e:
logger.error("Background batch forecast failed",
batch_id=batch_id,
error=str(e))
try:
async with database_manager.get_session() as session:
from app.repositories import PredictionBatchRepository
batch_repo = PredictionBatchRepository(session)
await batch_repo.update(
batch_id,
{
"status": "failed",
"completed_at": datetime.now(timezone.utc),
"error_message": str(e)
}
)
await session.commit()
except Exception as update_error:
logger.error("Failed to update batch status after error",
batch_id=batch_id,
error=str(update_error))
@router.post(
route_builder.build_operations_route("batch"),
response_model=BatchForecastResponse
@@ -211,11 +303,17 @@ async def generate_multi_day_forecast(
async def generate_batch_forecast(
request: BatchForecastRequest,
tenant_id: str = Path(..., description="Tenant ID"),
background_tasks: BackgroundTasks = BackgroundTasks(),
request_obj: Request = None,
current_user: dict = Depends(get_current_user_dep),
enhanced_forecasting_service: EnhancedForecastingService = Depends(get_enhanced_forecasting_service)
):
"""Generate forecasts for multiple products in batch (Admin+ only, quota enforced)"""
"""
Generate forecasts for multiple products in batch (Admin+ only, quota enforced).
IMPROVEMENT: Now uses background tasks for large batches to prevent API timeouts.
Returns immediately with batch_id for status tracking.
"""
metrics = get_metrics_collector(request_obj)
try:
@@ -258,48 +356,104 @@ async def generate_batch_forecast(
error_message=None
)
# Skip rate limiting for service-to-service calls (orchestrator)
# Rate limiting is handled at the gateway level for user requests
# IMPROVEMENT: For large batches (>5 products), use background task
# For small batches, execute synchronously for immediate results
batch_name = getattr(request, 'batch_name', f"batch-{datetime.now().strftime('%Y%m%d_%H%M%S')}")
forecast_days = getattr(request, 'forecast_days', 7)
# Create a copy of the request with the actual list of product IDs to forecast
# (whether originally provided or fetched from inventory service)
from app.schemas.forecasts import BatchForecastRequest
updated_request = BatchForecastRequest(
tenant_id=tenant_id, # Use the tenant_id from the path parameter
batch_name=getattr(request, 'batch_name', f"orchestrator-batch-{datetime.now().strftime('%Y%m%d')}"),
inventory_product_ids=inventory_product_ids,
forecast_days=getattr(request, 'forecast_days', 7)
)
batch_result = await enhanced_forecasting_service.generate_batch_forecasts(
tenant_id=tenant_id,
request=updated_request
)
if metrics:
metrics.increment_counter("batch_forecasts_success_total")
logger.info("Batch forecast generated successfully",
tenant_id=tenant_id,
total_forecasts=batch_result.get('total_forecasts', 0))
# Convert the service result to BatchForecastResponse format
from app.schemas.forecasts import BatchForecastResponse
# Create batch record first
batch_id = str(uuid.uuid4())
now = datetime.now(timezone.utc)
return BatchForecastResponse(
id=batch_result.get('id', str(uuid.uuid4())), # Use 'id' field (UUID) instead of 'batch_id' (string)
tenant_id=tenant_id,
batch_name=updated_request.batch_name,
status="completed",
total_products=batch_result.get('total_forecasts', 0),
completed_products=batch_result.get('successful_forecasts', 0),
failed_products=batch_result.get('failed_forecasts', 0),
requested_at=now,
completed_at=now,
processing_time_ms=0,
forecasts=[],
error_message=None
)
async with enhanced_forecasting_service.database_manager.get_session() as session:
from app.repositories import PredictionBatchRepository
batch_repo = PredictionBatchRepository(session)
batch_data = {
"tenant_id": tenant_id,
"batch_name": batch_name,
"total_products": len(inventory_product_ids),
"forecast_days": forecast_days,
"status": "pending"
}
batch = await batch_repo.create_batch(batch_data)
batch_id = str(batch.id)
await session.commit()
# Use background task for large batches to prevent API timeout
use_background = len(inventory_product_ids) > 5
if use_background:
# Queue background task
background_tasks.add_task(
execute_batch_forecast_background,
tenant_id=tenant_id,
batch_id=batch_id,
inventory_product_ids=inventory_product_ids,
forecast_days=forecast_days,
batch_name=batch_name
)
logger.info("Batch forecast queued for background processing",
tenant_id=tenant_id,
batch_id=batch_id,
product_count=len(inventory_product_ids))
# Return immediately with pending status
from app.schemas.forecasts import BatchForecastResponse
return BatchForecastResponse(
id=batch_id,
tenant_id=tenant_id,
batch_name=batch_name,
status="pending",
total_products=len(inventory_product_ids),
completed_products=0,
failed_products=0,
requested_at=now,
completed_at=None,
processing_time_ms=0,
forecasts=None,
error_message=None
)
else:
# Small batch - execute synchronously
from app.schemas.forecasts import BatchForecastRequest
updated_request = BatchForecastRequest(
tenant_id=tenant_id,
batch_name=batch_name,
inventory_product_ids=inventory_product_ids,
forecast_days=forecast_days
)
batch_result = await enhanced_forecasting_service.generate_batch_forecasts(
tenant_id=tenant_id,
request=updated_request
)
if metrics:
metrics.increment_counter("batch_forecasts_success_total")
logger.info("Batch forecast completed synchronously",
tenant_id=tenant_id,
total_forecasts=batch_result.get('total_forecasts', 0))
# Convert the service result to BatchForecastResponse format
from app.schemas.forecasts import BatchForecastResponse
return BatchForecastResponse(
id=batch_id,
tenant_id=tenant_id,
batch_name=batch_name,
status="completed",
total_products=batch_result.get('total_forecasts', 0),
completed_products=batch_result.get('successful_forecasts', 0),
failed_products=batch_result.get('failed_forecasts', 0),
requested_at=now,
completed_at=datetime.now(timezone.utc),
processing_time_ms=0,
forecasts=[],
error_message=None
)
except ValueError as e:
if metrics:
@@ -806,3 +960,50 @@ async def preview_tenant_data_deletion(
status_code=500,
detail=f"Failed to preview tenant data deletion: {str(e)}"
)
@router.get("/health/database")
async def database_health():
"""
Database health check endpoint with connection pool monitoring.
Returns detailed connection pool statistics for monitoring and alerting.
Useful for detecting connection pool exhaustion before it causes issues.
"""
from app.core.database import get_db_health, get_connection_pool_stats
from datetime import datetime
try:
# Check database connectivity
db_healthy = await get_db_health()
# Get connection pool statistics
pool_stats = await get_connection_pool_stats()
response = {
"service": "forecasting",
"timestamp": datetime.now(timezone.utc).isoformat(),
"database_connected": db_healthy,
"connection_pool": pool_stats,
"overall_status": "healthy" if db_healthy and pool_stats.get("status") == "healthy" else "degraded"
}
# Return appropriate status code based on health
if not db_healthy or pool_stats.get("status") == "critical":
return JSONResponse(status_code=503, content=response)
elif pool_stats.get("status") == "warning":
return JSONResponse(status_code=200, content=response)
else:
return response
except Exception as e:
logger.error("Health check failed", error=str(e))
return JSONResponse(
status_code=503,
content={
"service": "forecasting",
"timestamp": datetime.now(timezone.utc).isoformat(),
"overall_status": "unhealthy",
"error": str(e)
}
)

View File

@@ -70,6 +70,47 @@ async def get_db_health() -> bool:
logger.error("Database health check failed", error=str(e))
return False
async def get_connection_pool_stats() -> dict:
"""
Get current connection pool statistics for monitoring.
Returns:
Dictionary with pool statistics including usage and capacity
"""
try:
pool = async_engine.pool
# Get pool stats
stats = {
"pool_size": pool.size(),
"checked_in_connections": pool.checkedin(),
"checked_out_connections": pool.checkedout(),
"overflow_connections": pool.overflow(),
"total_connections": pool.size() + pool.overflow(),
"max_capacity": 10 + 20, # pool_size + max_overflow
"usage_percentage": round(((pool.size() + pool.overflow()) / 30) * 100, 2)
}
# Add health status
if stats["usage_percentage"] > 90:
stats["status"] = "critical"
stats["message"] = "Connection pool near capacity"
elif stats["usage_percentage"] > 80:
stats["status"] = "warning"
stats["message"] = "Connection pool usage high"
else:
stats["status"] = "healthy"
stats["message"] = "Connection pool healthy"
return stats
except Exception as e:
logger.error("Failed to get connection pool stats", error=str(e))
return {
"status": "error",
"message": f"Failed to get pool stats: {str(e)}"
}
# Database manager instance for service_base compatibility
database_manager = DatabaseManager(
database_url=settings.DATABASE_URL,

View File

@@ -5,7 +5,7 @@
Forecast models for the forecasting service
"""
from sqlalchemy import Column, String, Integer, Float, DateTime, Boolean, Text, JSON
from sqlalchemy import Column, String, Integer, Float, DateTime, Boolean, Text, JSON, UniqueConstraint, Index
from sqlalchemy.dialects.postgresql import UUID
from datetime import datetime, timezone
import uuid
@@ -16,6 +16,17 @@ class Forecast(Base):
"""Forecast model for storing prediction results"""
__tablename__ = "forecasts"
__table_args__ = (
# Unique constraint to prevent duplicate forecasts
# Ensures only one forecast per (tenant, product, date, location) combination
UniqueConstraint(
'tenant_id', 'inventory_product_id', 'forecast_date', 'location',
name='uq_forecast_tenant_product_date_location'
),
# Composite index for common query patterns
Index('ix_forecasts_tenant_product_date', 'tenant_id', 'inventory_product_id', 'forecast_date'),
)
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
inventory_product_id = Column(UUID(as_uuid=True), nullable=False, index=True) # Reference to inventory service

View File

@@ -6,6 +6,7 @@ Repository for forecast operations
from typing import Optional, List, Dict, Any
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select, and_, text, desc, func
from sqlalchemy.exc import IntegrityError
from datetime import datetime, timedelta, date, timezone
import structlog
@@ -24,7 +25,13 @@ class ForecastRepository(ForecastingBaseRepository):
super().__init__(Forecast, session, cache_ttl)
async def create_forecast(self, forecast_data: Dict[str, Any]) -> Forecast:
"""Create a new forecast with validation"""
"""
Create a new forecast with validation.
Handles duplicate forecast race condition gracefully:
If a forecast already exists for the same (tenant, product, date, location),
it will be updated instead of creating a duplicate.
"""
try:
# Validate forecast data
validation_result = self._validate_forecast_data(
@@ -44,19 +51,79 @@ class ForecastRepository(ForecastingBaseRepository):
if "business_type" not in forecast_data:
forecast_data["business_type"] = "individual"
# Create forecast
forecast = await self.create(forecast_data)
# Try to create forecast
try:
forecast = await self.create(forecast_data)
logger.info("Forecast created successfully",
forecast_id=forecast.id,
tenant_id=forecast.tenant_id,
inventory_product_id=forecast.inventory_product_id,
forecast_date=forecast.forecast_date.isoformat())
logger.info("Forecast created successfully",
forecast_id=forecast.id,
tenant_id=forecast.tenant_id,
inventory_product_id=forecast.inventory_product_id,
forecast_date=forecast.forecast_date.isoformat())
return forecast
return forecast
except IntegrityError as ie:
# Handle unique constraint violation (duplicate forecast)
error_msg = str(ie).lower()
if "unique constraint" in error_msg or "duplicate" in error_msg or "uq_forecast_tenant_product_date_location" in error_msg:
logger.warning("Forecast already exists (race condition), updating instead",
tenant_id=forecast_data.get("tenant_id"),
inventory_product_id=forecast_data.get("inventory_product_id"),
forecast_date=str(forecast_data.get("forecast_date")))
# Rollback the failed insert
await self.session.rollback()
# Fetch the existing forecast
existing_forecast = await self.get_existing_forecast(
tenant_id=forecast_data["tenant_id"],
inventory_product_id=forecast_data["inventory_product_id"],
forecast_date=forecast_data["forecast_date"],
location=forecast_data["location"]
)
if existing_forecast:
# Update existing forecast with new prediction data
update_data = {
"predicted_demand": forecast_data["predicted_demand"],
"confidence_lower": forecast_data["confidence_lower"],
"confidence_upper": forecast_data["confidence_upper"],
"confidence_level": forecast_data.get("confidence_level", 0.8),
"model_id": forecast_data["model_id"],
"model_version": forecast_data.get("model_version"),
"algorithm": forecast_data.get("algorithm", "prophet"),
"processing_time_ms": forecast_data.get("processing_time_ms"),
"features_used": forecast_data.get("features_used"),
"weather_temperature": forecast_data.get("weather_temperature"),
"weather_precipitation": forecast_data.get("weather_precipitation"),
"weather_description": forecast_data.get("weather_description"),
}
updated_forecast = await self.update(str(existing_forecast.id), update_data)
logger.info("Existing forecast updated after duplicate detection",
forecast_id=updated_forecast.id,
tenant_id=updated_forecast.tenant_id,
inventory_product_id=updated_forecast.inventory_product_id)
return updated_forecast
else:
# This shouldn't happen, but log it
logger.error("Duplicate forecast detected but not found in database")
raise DatabaseError("Duplicate forecast detected but not found")
else:
# Different integrity error, re-raise
raise
except ValidationError:
raise
except IntegrityError as ie:
# Re-raise integrity errors that weren't handled above
logger.error("Database integrity error creating forecast",
tenant_id=forecast_data.get("tenant_id"),
error=str(ie))
raise DatabaseError(f"Database integrity error: {str(ie)}")
except Exception as e:
logger.error("Failed to create forecast",
tenant_id=forecast_data.get("tenant_id"),
@@ -64,6 +131,29 @@ class ForecastRepository(ForecastingBaseRepository):
error=str(e))
raise DatabaseError(f"Failed to create forecast: {str(e)}")
async def get_existing_forecast(
self,
tenant_id: str,
inventory_product_id: str,
forecast_date: datetime,
location: str
) -> Optional[Forecast]:
"""Get an existing forecast by unique key (tenant, product, date, location)"""
try:
query = select(Forecast).where(
and_(
Forecast.tenant_id == tenant_id,
Forecast.inventory_product_id == inventory_product_id,
Forecast.forecast_date == forecast_date,
Forecast.location == location
)
)
result = await self.session.execute(query)
return result.scalar_one_or_none()
except Exception as e:
logger.error("Failed to get existing forecast", error=str(e))
return None
async def get_forecasts_by_date_range(
self,
tenant_id: str,

View File

@@ -15,6 +15,7 @@ from app.schemas.forecasts import ForecastRequest, ForecastResponse
from app.services.prediction_service import PredictionService
from app.services.model_client import ModelClient
from app.services.data_client import DataClient
from app.utils.distributed_lock import get_forecast_lock, get_batch_forecast_lock, LockAcquisitionError
# Import repositories
from app.repositories import (
@@ -291,6 +292,12 @@ class EnhancedForecastingService:
) -> ForecastResponse:
"""
Generate forecast using repository pattern with caching.
CRITICAL FIXES:
1. External HTTP calls are performed BEFORE opening database session
to prevent connection pool exhaustion and blocking.
2. Advisory locks prevent concurrent forecast generation for same product/date
to avoid duplicate work and race conditions.
"""
start_time = datetime.now(timezone.utc)
@@ -300,97 +307,149 @@ class EnhancedForecastingService:
inventory_product_id=request.inventory_product_id,
date=request.forecast_date.isoformat())
# Get session and initialize repositories
# CRITICAL FIX: Get model BEFORE opening database session
# This prevents holding database connections during potentially slow external API calls
logger.debug("Fetching model data before opening database session",
tenant_id=tenant_id,
inventory_product_id=request.inventory_product_id)
model_data = await self._get_latest_model_with_fallback(tenant_id, request.inventory_product_id)
if not model_data:
raise ValueError(f"No valid model available for product: {request.inventory_product_id}")
logger.debug("Model data fetched successfully",
tenant_id=tenant_id,
model_id=model_data.get('model_id'))
# Step 3: Prepare features with fallbacks (includes external API calls for weather)
features = await self._prepare_forecast_features_with_fallbacks(tenant_id, request)
# Now open database session AFTER external HTTP calls are complete
# CRITICAL FIX: Acquire distributed lock to prevent concurrent forecast generation
async with self.database_manager.get_background_session() as session:
repos = await self._init_repositories(session)
# Step 1: Check cache first
cached_prediction = await repos['cache'].get_cached_prediction(
tenant_id, request.inventory_product_id, request.location, request.forecast_date
)
if cached_prediction:
logger.debug("Using cached prediction",
tenant_id=tenant_id,
inventory_product_id=request.inventory_product_id)
return self._create_forecast_response_from_cache(cached_prediction)
# Step 2: Get model with validation
model_data = await self._get_latest_model_with_fallback(tenant_id, request.inventory_product_id)
if not model_data:
raise ValueError(f"No valid model available for product: {request.inventory_product_id}")
# Step 3: Prepare features with fallbacks
features = await self._prepare_forecast_features_with_fallbacks(tenant_id, request)
# Step 4: Generate prediction
prediction_result = await self.prediction_service.predict(
model_id=model_data['model_id'],
model_path=model_data['model_path'],
features=features,
confidence_level=request.confidence_level
)
# Step 5: Apply business rules
adjusted_prediction = self._apply_business_rules(
prediction_result, request, features
)
# Step 6: Save forecast using repository
# Convert forecast_date to datetime if it's a string
forecast_datetime = request.forecast_date
if isinstance(forecast_datetime, str):
from dateutil.parser import parse
forecast_datetime = parse(forecast_datetime)
forecast_data = {
"tenant_id": tenant_id,
"inventory_product_id": request.inventory_product_id,
"product_name": None, # Field is now nullable, use inventory_product_id as reference
"location": request.location,
"forecast_date": forecast_datetime,
"predicted_demand": adjusted_prediction['prediction'],
"confidence_lower": adjusted_prediction.get('lower_bound', adjusted_prediction['prediction'] * 0.8),
"confidence_upper": adjusted_prediction.get('upper_bound', adjusted_prediction['prediction'] * 1.2),
"confidence_level": request.confidence_level,
"model_id": model_data['model_id'],
"model_version": str(model_data.get('version', '1.0')),
"algorithm": model_data.get('algorithm', 'prophet'),
"business_type": features.get('business_type', 'individual'),
"is_holiday": features.get('is_holiday', False),
"is_weekend": features.get('is_weekend', False),
"day_of_week": features.get('day_of_week', 0),
"weather_temperature": features.get('temperature'),
"weather_precipitation": features.get('precipitation'),
"weather_description": features.get('weather_description'),
"traffic_volume": features.get('traffic_volume'),
"processing_time_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000),
"features_used": features
}
forecast = await repos['forecast'].create_forecast(forecast_data)
# Step 6: Cache the prediction
await repos['cache'].cache_prediction(
# Get lock for this specific forecast (tenant + product + date)
forecast_date_str = request.forecast_date.isoformat().split('T')[0] if hasattr(request.forecast_date, 'isoformat') else str(request.forecast_date).split('T')[0]
lock = get_forecast_lock(
tenant_id=tenant_id,
inventory_product_id=request.inventory_product_id,
location=request.location,
forecast_date=forecast_datetime,
predicted_demand=adjusted_prediction['prediction'],
confidence_lower=adjusted_prediction.get('lower_bound', adjusted_prediction['prediction'] * 0.8),
confidence_upper=adjusted_prediction.get('upper_bound', adjusted_prediction['prediction'] * 1.2),
model_id=model_data['model_id'],
expires_in_hours=24
product_id=str(request.inventory_product_id),
forecast_date=forecast_date_str
)
try:
async with lock.acquire(session):
repos = await self._init_repositories(session)
logger.info("Enhanced forecast generated successfully",
forecast_id=forecast.id,
tenant_id=tenant_id,
prediction=adjusted_prediction['prediction'])
# Step 1: Check cache first (inside lock for consistency)
# If another request generated the forecast while we waited for the lock,
# we'll find it in the cache
cached_prediction = await repos['cache'].get_cached_prediction(
tenant_id, request.inventory_product_id, request.location, request.forecast_date
)
return self._create_forecast_response_from_model(forecast)
if cached_prediction:
logger.info("Found cached prediction after acquiring lock (concurrent request completed first)",
tenant_id=tenant_id,
inventory_product_id=request.inventory_product_id)
return self._create_forecast_response_from_cache(cached_prediction)
# Step 2: Model data already fetched above (before session opened)
# Step 4: Generate prediction (in-memory operation)
prediction_result = await self.prediction_service.predict(
model_id=model_data['model_id'],
model_path=model_data['model_path'],
features=features,
confidence_level=request.confidence_level
)
# Step 5: Apply business rules
adjusted_prediction = self._apply_business_rules(
prediction_result, request, features
)
# Step 6: Save forecast using repository
# Convert forecast_date to datetime if it's a string
forecast_datetime = request.forecast_date
if isinstance(forecast_datetime, str):
from dateutil.parser import parse
forecast_datetime = parse(forecast_datetime)
forecast_data = {
"tenant_id": tenant_id,
"inventory_product_id": request.inventory_product_id,
"product_name": None, # Field is now nullable, use inventory_product_id as reference
"location": request.location,
"forecast_date": forecast_datetime,
"predicted_demand": adjusted_prediction['prediction'],
"confidence_lower": adjusted_prediction.get('lower_bound', adjusted_prediction['prediction'] * 0.8),
"confidence_upper": adjusted_prediction.get('upper_bound', adjusted_prediction['prediction'] * 1.2),
"confidence_level": request.confidence_level,
"model_id": model_data['model_id'],
"model_version": str(model_data.get('version', '1.0')),
"algorithm": model_data.get('algorithm', 'prophet'),
"business_type": features.get('business_type', 'individual'),
"is_holiday": features.get('is_holiday', False),
"is_weekend": features.get('is_weekend', False),
"day_of_week": features.get('day_of_week', 0),
"weather_temperature": features.get('temperature'),
"weather_precipitation": features.get('precipitation'),
"weather_description": features.get('weather_description'),
"traffic_volume": features.get('traffic_volume'),
"processing_time_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000),
"features_used": features
}
forecast = await repos['forecast'].create_forecast(forecast_data)
await session.commit()
# Step 7: Cache the prediction
await repos['cache'].cache_prediction(
tenant_id=tenant_id,
inventory_product_id=request.inventory_product_id,
location=request.location,
forecast_date=forecast_datetime,
predicted_demand=adjusted_prediction['prediction'],
confidence_lower=adjusted_prediction.get('lower_bound', adjusted_prediction['prediction'] * 0.8),
confidence_upper=adjusted_prediction.get('upper_bound', adjusted_prediction['prediction'] * 1.2),
model_id=model_data['model_id'],
expires_in_hours=24
)
logger.info("Enhanced forecast generated successfully",
forecast_id=forecast.id,
tenant_id=tenant_id,
prediction=adjusted_prediction['prediction'])
return self._create_forecast_response_from_model(forecast)
except LockAcquisitionError:
# Could not acquire lock - another forecast request is in progress
logger.warning("Could not acquire forecast lock, checking cache for concurrent request result",
tenant_id=tenant_id,
inventory_product_id=request.inventory_product_id,
forecast_date=forecast_date_str)
# Wait a moment and check cache - maybe the concurrent request finished
await asyncio.sleep(1)
repos = await self._init_repositories(session)
cached_prediction = await repos['cache'].get_cached_prediction(
tenant_id, request.inventory_product_id, request.location, request.forecast_date
)
if cached_prediction:
logger.info("Found forecast in cache after lock timeout (concurrent request completed)",
tenant_id=tenant_id,
inventory_product_id=request.inventory_product_id)
return self._create_forecast_response_from_cache(cached_prediction)
# No cached result, raise error
raise ValueError(
f"Forecast generation already in progress for product {request.inventory_product_id}. "
"Please try again in a few seconds."
)
except Exception as e:
processing_time = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)

View File

@@ -0,0 +1,3 @@
"""
Utility modules for forecasting service
"""

View File

@@ -0,0 +1,258 @@
"""
Distributed Locking Mechanisms for Forecasting Service
Prevents concurrent forecast generation for the same product/date
"""
import asyncio
import time
from typing import Optional
import logging
from contextlib import asynccontextmanager
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import text
from datetime import datetime, timezone, timedelta
logger = logging.getLogger(__name__)
class LockAcquisitionError(Exception):
"""Raised when lock cannot be acquired"""
pass
class DatabaseLock:
"""
Database-based distributed lock using PostgreSQL advisory locks.
Works across multiple service instances.
"""
def __init__(self, lock_name: str, timeout: float = 30.0):
"""
Initialize database lock.
Args:
lock_name: Unique identifier for the lock
timeout: Maximum seconds to wait for lock acquisition
"""
self.lock_name = lock_name
self.timeout = timeout
self.lock_id = self._hash_lock_name(lock_name)
def _hash_lock_name(self, name: str) -> int:
"""Convert lock name to integer ID for PostgreSQL advisory lock"""
# Use hash and modulo to get a positive 32-bit integer
return abs(hash(name)) % (2**31)
@asynccontextmanager
async def acquire(self, session: AsyncSession):
"""
Acquire distributed lock as async context manager.
Args:
session: Database session for lock operations
Raises:
LockAcquisitionError: If lock cannot be acquired within timeout
"""
acquired = False
start_time = time.time()
try:
# Try to acquire lock with timeout
while time.time() - start_time < self.timeout:
# Try non-blocking lock acquisition
result = await session.execute(
text("SELECT pg_try_advisory_lock(:lock_id)"),
{"lock_id": self.lock_id}
)
acquired = result.scalar()
if acquired:
logger.info(f"Acquired lock: {self.lock_name} (id={self.lock_id})")
break
# Wait a bit before retrying
await asyncio.sleep(0.1)
if not acquired:
raise LockAcquisitionError(
f"Could not acquire lock '{self.lock_name}' within {self.timeout}s"
)
yield
finally:
if acquired:
# Release lock
await session.execute(
text("SELECT pg_advisory_unlock(:lock_id)"),
{"lock_id": self.lock_id}
)
logger.info(f"Released lock: {self.lock_name} (id={self.lock_id})")
class SimpleDatabaseLock:
"""
Simple table-based distributed lock.
Alternative to advisory locks, uses a dedicated locks table.
"""
def __init__(self, lock_name: str, timeout: float = 30.0, ttl: float = 300.0):
"""
Initialize simple database lock.
Args:
lock_name: Unique identifier for the lock
timeout: Maximum seconds to wait for lock acquisition
ttl: Time-to-live for stale lock cleanup (seconds)
"""
self.lock_name = lock_name
self.timeout = timeout
self.ttl = ttl
async def _ensure_lock_table(self, session: AsyncSession):
"""Ensure locks table exists"""
create_table_sql = """
CREATE TABLE IF NOT EXISTS distributed_locks (
lock_name VARCHAR(255) PRIMARY KEY,
acquired_at TIMESTAMP WITH TIME ZONE NOT NULL,
acquired_by VARCHAR(255),
expires_at TIMESTAMP WITH TIME ZONE NOT NULL
)
"""
await session.execute(text(create_table_sql))
await session.commit()
async def _cleanup_stale_locks(self, session: AsyncSession):
"""Remove expired locks"""
cleanup_sql = """
DELETE FROM distributed_locks
WHERE expires_at < :now
"""
await session.execute(
text(cleanup_sql),
{"now": datetime.now(timezone.utc)}
)
await session.commit()
@asynccontextmanager
async def acquire(self, session: AsyncSession, owner: str = "forecasting-service"):
"""
Acquire simple database lock.
Args:
session: Database session
owner: Identifier for lock owner
Raises:
LockAcquisitionError: If lock cannot be acquired
"""
await self._ensure_lock_table(session)
await self._cleanup_stale_locks(session)
acquired = False
start_time = time.time()
try:
# Try to acquire lock
while time.time() - start_time < self.timeout:
now = datetime.now(timezone.utc)
expires_at = now + timedelta(seconds=self.ttl)
try:
# Try to insert lock record
insert_sql = """
INSERT INTO distributed_locks (lock_name, acquired_at, acquired_by, expires_at)
VALUES (:lock_name, :acquired_at, :acquired_by, :expires_at)
ON CONFLICT (lock_name) DO NOTHING
RETURNING lock_name
"""
result = await session.execute(
text(insert_sql),
{
"lock_name": self.lock_name,
"acquired_at": now,
"acquired_by": owner,
"expires_at": expires_at
}
)
await session.commit()
if result.rowcount > 0:
acquired = True
logger.info(f"Acquired simple lock: {self.lock_name}")
break
except Exception as e:
logger.debug(f"Lock acquisition attempt failed: {e}")
await session.rollback()
# Wait before retrying
await asyncio.sleep(0.5)
if not acquired:
raise LockAcquisitionError(
f"Could not acquire lock '{self.lock_name}' within {self.timeout}s"
)
yield
finally:
if acquired:
# Release lock
delete_sql = """
DELETE FROM distributed_locks
WHERE lock_name = :lock_name
"""
await session.execute(
text(delete_sql),
{"lock_name": self.lock_name}
)
await session.commit()
logger.info(f"Released simple lock: {self.lock_name}")
def get_forecast_lock(
tenant_id: str,
product_id: str,
forecast_date: str,
use_advisory: bool = True
) -> DatabaseLock:
"""
Get distributed lock for generating a forecast for a specific product and date.
Args:
tenant_id: Tenant identifier
product_id: Product identifier
forecast_date: Forecast date (ISO format)
use_advisory: Use PostgreSQL advisory locks (True) or table-based (False)
Returns:
Lock instance
"""
lock_name = f"forecast:{tenant_id}:{product_id}:{forecast_date}"
if use_advisory:
return DatabaseLock(lock_name, timeout=30.0)
else:
return SimpleDatabaseLock(lock_name, timeout=30.0, ttl=300.0)
def get_batch_forecast_lock(tenant_id: str, use_advisory: bool = True) -> DatabaseLock:
"""
Get distributed lock for batch forecast generation for a tenant.
Args:
tenant_id: Tenant identifier
use_advisory: Use PostgreSQL advisory locks (True) or table-based (False)
Returns:
Lock instance
"""
lock_name = f"forecast_batch:{tenant_id}"
if use_advisory:
return DatabaseLock(lock_name, timeout=60.0)
else:
return SimpleDatabaseLock(lock_name, timeout=60.0, ttl=600.0)

View File

@@ -12,7 +12,9 @@ from shared.subscription.plans import (
SubscriptionPlanMetadata,
PlanPricing,
QuotaLimits,
PlanFeatures
PlanFeatures,
FeatureCategories,
UserFacingFeatures
)
logger = structlog.get_logger()
@@ -55,22 +57,21 @@ async def get_available_plans():
# Convert Decimal to float for JSON serialization
plans_data[tier.value] = {
"name": metadata["name"],
"description": metadata["description"],
"tagline": metadata["tagline"],
"description_key": metadata["description_key"],
"tagline_key": metadata["tagline_key"],
"popular": metadata["popular"],
"monthly_price": float(metadata["monthly_price"]),
"yearly_price": float(metadata["yearly_price"]),
"trial_days": metadata["trial_days"],
"features": metadata["features"],
"limits": {
"users": metadata["limits"]["users"],
"locations": metadata["limits"]["locations"],
"products": metadata["limits"]["products"],
"forecasts_per_day": metadata["limits"]["forecasts_per_day"],
},
"support": metadata["support"],
"recommended_for": metadata["recommended_for"],
"hero_features": metadata.get("hero_features", []),
"roi_badge": metadata.get("roi_badge"),
"business_metrics": metadata.get("business_metrics"),
"limits": metadata["limits"],
"support_key": metadata["support_key"],
"recommended_for_key": metadata["recommended_for_key"],
"contact_sales": metadata.get("contact_sales", False),
"custom_pricing": metadata.get("custom_pricing", False),
}
logger.info("subscription_plans_fetched", tier_count=len(plans_data))
@@ -110,22 +111,21 @@ async def get_plan_by_tier(tier: str):
plan_data = {
"tier": tier_enum.value,
"name": metadata["name"],
"description": metadata["description"],
"tagline": metadata["tagline"],
"description_key": metadata["description_key"],
"tagline_key": metadata["tagline_key"],
"popular": metadata["popular"],
"monthly_price": float(metadata["monthly_price"]),
"yearly_price": float(metadata["yearly_price"]),
"trial_days": metadata["trial_days"],
"features": metadata["features"],
"limits": {
"users": metadata["limits"]["users"],
"locations": metadata["limits"]["locations"],
"products": metadata["limits"]["products"],
"forecasts_per_day": metadata["limits"]["forecasts_per_day"],
},
"support": metadata["support"],
"recommended_for": metadata["recommended_for"],
"hero_features": metadata.get("hero_features", []),
"roi_badge": metadata.get("roi_badge"),
"business_metrics": metadata.get("business_metrics"),
"limits": metadata["limits"],
"support_key": metadata["support_key"],
"recommended_for_key": metadata["recommended_for_key"],
"contact_sales": metadata.get("contact_sales", False),
"custom_pricing": metadata.get("custom_pricing", False),
}
logger.info("subscription_plan_fetched", tier=tier)
@@ -233,6 +233,50 @@ async def get_plan_limits(tier: str):
)
@router.get("/feature-categories")
async def get_feature_categories():
"""
Get all feature categories with icons and translation keys
**Public endpoint** - No authentication required
Returns:
Dictionary of feature categories
"""
try:
return {
"categories": FeatureCategories.CATEGORIES
}
except Exception as e:
logger.error("failed_to_fetch_feature_categories", error=str(e))
raise HTTPException(
status_code=500,
detail="Failed to fetch feature categories"
)
@router.get("/feature-descriptions")
async def get_feature_descriptions():
"""
Get user-facing feature descriptions with translation keys
**Public endpoint** - No authentication required
Returns:
Dictionary of feature descriptions mapped by feature key
"""
try:
return {
"features": UserFacingFeatures.FEATURE_DISPLAY
}
except Exception as e:
logger.error("failed_to_fetch_feature_descriptions", error=str(e))
raise HTTPException(
status_code=500,
detail="Failed to fetch feature descriptions"
)
@router.get("/compare")
async def compare_plans():
"""

View File

@@ -158,6 +158,56 @@ async def start_training_job(
# Continue with job creation but log the error
try:
# CRITICAL FIX: Check for existing running jobs before starting new one
# This prevents duplicate tenant-level training jobs
async with enhanced_training_service.database_manager.get_session() as check_session:
from app.repositories.training_log_repository import TrainingLogRepository
log_repo = TrainingLogRepository(check_session)
# Check for active jobs (running or pending)
active_jobs = await log_repo.get_active_jobs(tenant_id=tenant_id)
pending_jobs = await log_repo.get_logs_by_tenant(
tenant_id=tenant_id,
status="pending",
limit=10
)
all_active = active_jobs + pending_jobs
if all_active:
# Training job already in progress, return existing job info
existing_job = all_active[0]
logger.info("Training job already in progress, returning existing job",
existing_job_id=existing_job.job_id,
tenant_id=tenant_id,
status=existing_job.status)
return TrainingJobResponse(
job_id=existing_job.job_id,
tenant_id=tenant_id,
status=existing_job.status,
message=f"Training job already in progress (started {existing_job.created_at.isoformat() if existing_job.created_at else 'recently'})",
created_at=existing_job.created_at or datetime.now(timezone.utc),
estimated_duration_minutes=existing_job.config.get("estimated_duration_minutes", 15) if existing_job.config else 15,
training_results={
"total_products": 0,
"successful_trainings": 0,
"failed_trainings": 0,
"products": [],
"overall_training_time_seconds": 0.0
},
data_summary=None,
completed_at=None,
error_details=None,
processing_metadata={
"background_task": True,
"async_execution": True,
"existing_job": True,
"deduplication": True
}
)
# No existing job, proceed with creating new one
# Generate enhanced job ID
job_id = f"enhanced_training_{tenant_id}_{uuid.uuid4().hex[:8]}"
@@ -407,6 +457,7 @@ async def start_single_product_training(
request: SingleProductTrainingRequest,
tenant_id: str = Path(..., description="Tenant ID"),
inventory_product_id: str = Path(..., description="Inventory product UUID"),
background_tasks: BackgroundTasks = BackgroundTasks(),
request_obj: Request = None,
current_user: Dict[str, Any] = Depends(get_current_user_dep),
enhanced_training_service: EnhancedTrainingService = Depends(get_enhanced_training_service)
@@ -421,6 +472,7 @@ async def start_single_product_training(
- Enhanced error handling and validation
- Metrics tracking
- Transactional operations
- Background execution to prevent blocking
"""
metrics = get_metrics_collector(request_obj)
@@ -429,6 +481,53 @@ async def start_single_product_training(
inventory_product_id=inventory_product_id,
tenant_id=tenant_id)
# CRITICAL FIX: Check if this product is currently being trained
# This prevents duplicate training from rapid-click scenarios
async with enhanced_training_service.database_manager.get_session() as check_session:
from app.repositories.training_log_repository import TrainingLogRepository
log_repo = TrainingLogRepository(check_session)
# Check for active jobs for this specific product
active_jobs = await log_repo.get_active_jobs(tenant_id=tenant_id)
pending_jobs = await log_repo.get_logs_by_tenant(
tenant_id=tenant_id,
status="pending",
limit=20
)
all_active = active_jobs + pending_jobs
# Filter for jobs that include this specific product
product_jobs = [
job for job in all_active
if job.config and (
# Single product job for this product
job.config.get("product_id") == inventory_product_id or
# Tenant-wide job that would include this product
job.config.get("job_type") == "tenant_training"
)
]
if product_jobs:
existing_job = product_jobs[0]
logger.warning("Product training already in progress, rejecting duplicate request",
existing_job_id=existing_job.job_id,
tenant_id=tenant_id,
inventory_product_id=inventory_product_id,
status=existing_job.status)
raise HTTPException(
status_code=status.HTTP_409_CONFLICT,
detail={
"error": "Product training already in progress",
"message": f"Product {inventory_product_id} is currently being trained in job {existing_job.job_id}",
"existing_job_id": existing_job.job_id,
"status": existing_job.status,
"started_at": existing_job.created_at.isoformat() if existing_job.created_at else None
}
)
# No existing job, proceed with training
# Record metrics
if metrics:
metrics.increment_counter("enhanced_single_product_training_total")
@@ -436,22 +535,60 @@ async def start_single_product_training(
# Generate enhanced job ID
job_id = f"enhanced_single_{tenant_id}_{inventory_product_id}_{uuid.uuid4().hex[:8]}"
# Delegate to enhanced training service
result = await enhanced_training_service.start_single_product_training(
# CRITICAL FIX: Add initial training log entry
await enhanced_training_service._update_job_status_repository(
job_id=job_id,
status="pending",
progress=0,
current_step="Initializing single product training",
tenant_id=tenant_id
)
# Add enhanced background task for single product training
background_tasks.add_task(
execute_single_product_training_background,
tenant_id=tenant_id,
inventory_product_id=inventory_product_id,
job_id=job_id,
bakery_location=request.bakery_location or (40.4168, -3.7038)
bakery_location=request.bakery_location or (40.4168, -3.7038),
database_manager=enhanced_training_service.database_manager
)
if metrics:
metrics.increment_counter("enhanced_single_product_training_success_total")
# Return immediate response with job info
response_data = {
"job_id": job_id,
"tenant_id": tenant_id,
"status": "pending",
"message": "Enhanced single product training started successfully",
"created_at": datetime.now(timezone.utc),
"estimated_duration_minutes": 15, # Default estimate for single product
"training_results": {
"total_products": 1,
"successful_trainings": 0,
"failed_trainings": 0,
"products": [],
"overall_training_time_seconds": 0.0
},
"data_summary": None,
"completed_at": None,
"error_details": None,
"processing_metadata": {
"background_task": True,
"async_execution": True,
"enhanced_features": True,
"repository_pattern": True,
"dependency_injection": True
}
}
logger.info("Enhanced single product training completed",
logger.info("Enhanced single product training queued successfully",
inventory_product_id=inventory_product_id,
job_id=job_id)
return TrainingJobResponse(**result)
if metrics:
metrics.increment_counter("enhanced_single_product_training_queued_total")
return TrainingJobResponse(**response_data)
except ValueError as e:
if metrics:
@@ -475,6 +612,74 @@ async def start_single_product_training(
)
async def execute_single_product_training_background(
tenant_id: str,
inventory_product_id: str,
job_id: str,
bakery_location: tuple,
database_manager
):
"""
Enhanced background task that executes single product training using repository pattern.
Uses a separate service instance to avoid session conflicts.
"""
logger.info("Enhanced background single product training started",
job_id=job_id,
tenant_id=tenant_id,
inventory_product_id=inventory_product_id)
# Create a new service instance with a fresh database session to avoid conflicts
from app.services.training_service import EnhancedTrainingService
fresh_training_service = EnhancedTrainingService(database_manager)
try:
# Update job status to running
await fresh_training_service._update_job_status_repository(
job_id=job_id,
status="running",
progress=0,
current_step="Starting single product training",
tenant_id=tenant_id
)
# Execute the enhanced single product training with repository pattern
result = await fresh_training_service.start_single_product_training(
tenant_id=tenant_id,
inventory_product_id=inventory_product_id,
job_id=job_id,
bakery_location=bakery_location
)
logger.info("Enhanced background single product training completed successfully",
job_id=job_id,
inventory_product_id=inventory_product_id)
except Exception as training_error:
logger.error("Enhanced single product training failed",
job_id=job_id,
inventory_product_id=inventory_product_id,
error=str(training_error))
try:
await fresh_training_service._update_job_status_repository(
job_id=job_id,
status="failed",
progress=0,
current_step="Single product training failed",
error_message=str(training_error),
tenant_id=tenant_id
)
except Exception as status_error:
logger.error("Failed to update job status after training error",
job_id=job_id,
status_error=str(status_error))
finally:
logger.info("Enhanced background single product training cleanup completed",
job_id=job_id,
inventory_product_id=inventory_product_id)
@router.get("/health")
async def health_check():
"""Health check endpoint for the training operations"""

View File

@@ -353,6 +353,178 @@ class PlanFeatures:
)
# ============================================================================
# FEATURE DISPLAY CONFIGURATION (User-Facing)
# ============================================================================
class FeatureCategories:
"""User-friendly feature categorization for pricing display"""
CATEGORIES = {
"daily_operations": {
"icon": "🏪",
"translation_key": "categories.daily_operations",
},
"smart_forecasting": {
"icon": "🤖",
"translation_key": "categories.smart_forecasting",
},
"smart_ordering": {
"icon": "📦",
"translation_key": "categories.smart_ordering",
},
"business_insights": {
"icon": "📊",
"translation_key": "categories.business_insights",
},
"multi_location": {
"icon": "🏢",
"translation_key": "categories.multi_location",
},
"integrations": {
"icon": "🔌",
"translation_key": "categories.integrations",
},
"support": {
"icon": "👥",
"translation_key": "categories.support",
},
}
class UserFacingFeatures:
"""User-friendly feature descriptions for non-technical bakery owners"""
FEATURE_DISPLAY = {
# Daily Operations
"inventory_management": {
"translation_key": "features.inventory_management",
"tooltip_key": "features.inventory_management_tooltip",
"category": "daily_operations",
},
"sales_tracking": {
"translation_key": "features.sales_tracking",
"tooltip_key": "features.sales_tracking_tooltip",
"category": "daily_operations",
},
"basic_recipes": {
"translation_key": "features.basic_recipes",
"tooltip_key": "features.basic_recipes_tooltip",
"category": "daily_operations",
},
"production_planning": {
"translation_key": "features.production_planning",
"tooltip_key": "features.production_planning_tooltip",
"category": "daily_operations",
},
# Smart Forecasting
"basic_forecasting": {
"translation_key": "features.basic_forecasting",
"tooltip_key": "features.basic_forecasting_tooltip",
"category": "smart_forecasting",
},
"demand_prediction": {
"translation_key": "features.demand_prediction",
"category": "smart_forecasting",
},
"seasonal_patterns": {
"translation_key": "features.seasonal_patterns",
"tooltip_key": "features.seasonal_patterns_tooltip",
"category": "smart_forecasting",
},
"weather_data_integration": {
"translation_key": "features.weather_data_integration",
"tooltip_key": "features.weather_data_integration_tooltip",
"category": "smart_forecasting",
},
"traffic_data_integration": {
"translation_key": "features.traffic_data_integration",
"tooltip_key": "features.traffic_data_integration_tooltip",
"category": "smart_forecasting",
},
# Smart Ordering
"supplier_management": {
"translation_key": "features.supplier_management",
"tooltip_key": "features.supplier_management_tooltip",
"category": "smart_ordering",
},
"waste_tracking": {
"translation_key": "features.waste_tracking",
"tooltip_key": "features.waste_tracking_tooltip",
"category": "smart_ordering",
},
"expiry_alerts": {
"translation_key": "features.expiry_alerts",
"tooltip_key": "features.expiry_alerts_tooltip",
"category": "smart_ordering",
},
# Business Insights
"basic_reporting": {
"translation_key": "features.basic_reporting",
"category": "business_insights",
},
"advanced_analytics": {
"translation_key": "features.advanced_analytics",
"tooltip_key": "features.advanced_analytics_tooltip",
"category": "business_insights",
},
"profitability_analysis": {
"translation_key": "features.profitability_analysis",
"category": "business_insights",
},
# Multi-Location
"multi_location_support": {
"translation_key": "features.multi_location_support",
"category": "multi_location",
},
"inventory_transfer": {
"translation_key": "features.inventory_transfer",
"category": "multi_location",
},
"location_comparison": {
"translation_key": "features.location_comparison",
"category": "multi_location",
},
# Integrations
"pos_integration": {
"translation_key": "features.pos_integration",
"tooltip_key": "features.pos_integration_tooltip",
"category": "integrations",
},
"accounting_export": {
"translation_key": "features.accounting_export",
"category": "integrations",
},
"full_api_access": {
"translation_key": "features.full_api_access",
"category": "integrations",
},
# Support
"email_support": {
"translation_key": "features.email_support",
"category": "support",
},
"phone_support": {
"translation_key": "features.phone_support",
"category": "support",
},
"dedicated_account_manager": {
"translation_key": "features.dedicated_account_manager",
"category": "support",
},
"24_7_support": {
"translation_key": "features.support_24_7",
"category": "support",
},
}
# ============================================================================
# SUBSCRIPTION PLAN METADATA
# ============================================================================
@@ -363,57 +535,126 @@ class SubscriptionPlanMetadata:
PLANS = {
SubscriptionTier.STARTER: {
"name": "Starter",
"description": "Perfect for small bakeries getting started",
"tagline": "Essential tools for small operations",
"description_key": "plans.starter.description",
"tagline_key": "plans.starter.tagline",
"popular": False,
"monthly_price": PlanPricing.MONTHLY_PRICES[SubscriptionTier.STARTER],
"yearly_price": PlanPricing.YEARLY_PRICES[SubscriptionTier.STARTER],
"trial_days": 14,
"features": PlanFeatures.STARTER_FEATURES,
# Hero features (displayed prominently)
"hero_features": [
"inventory_management",
"basic_forecasting",
"supplier_management",
"waste_tracking",
],
# ROI & Business Value
"roi_badge": {
"savings_min": 300,
"savings_max": 500,
"currency": "EUR",
"period": "month",
"translation_key": "plans.starter.roi_badge",
},
"business_metrics": {
"waste_reduction": "20-30%",
"time_saved_hours_week": "5-8",
"stockout_reduction": "85-95%",
},
"limits": {
"users": QuotaLimits.MAX_USERS[SubscriptionTier.STARTER],
"locations": QuotaLimits.MAX_LOCATIONS[SubscriptionTier.STARTER],
"products": QuotaLimits.MAX_PRODUCTS[SubscriptionTier.STARTER],
"forecasts_per_day": QuotaLimits.FORECAST_GENERATION_PER_DAY[SubscriptionTier.STARTER],
"forecast_horizon_days": QuotaLimits.FORECAST_HORIZON_DAYS[SubscriptionTier.STARTER],
},
"support": "Email support (48h response)",
"recommended_for": "Single location, up to 5 team members",
"support_key": "plans.starter.support",
"recommended_for_key": "plans.starter.recommended_for",
},
SubscriptionTier.PROFESSIONAL: {
"name": "Professional",
"description": "For growing bakeries with multiple locations",
"tagline": "Advanced features & analytics",
"description_key": "plans.professional.description",
"tagline_key": "plans.professional.tagline",
"popular": True, # Most popular plan
"monthly_price": PlanPricing.MONTHLY_PRICES[SubscriptionTier.PROFESSIONAL],
"yearly_price": PlanPricing.YEARLY_PRICES[SubscriptionTier.PROFESSIONAL],
"trial_days": 14,
"features": PlanFeatures.PROFESSIONAL_FEATURES,
# Hero features (displayed prominently)
"hero_features": [
"weather_data_integration",
"multi_location_support",
"advanced_analytics",
"phone_support",
],
# ROI & Business Value
"roi_badge": {
"savings_min": 800,
"savings_max": 1200,
"currency": "EUR",
"period": "month",
"translation_key": "plans.professional.roi_badge",
},
"business_metrics": {
"waste_reduction": "30-40%",
"time_saved_hours_week": "11-17",
"procurement_cost_savings": "5-15%",
},
"limits": {
"users": QuotaLimits.MAX_USERS[SubscriptionTier.PROFESSIONAL],
"locations": QuotaLimits.MAX_LOCATIONS[SubscriptionTier.PROFESSIONAL],
"products": QuotaLimits.MAX_PRODUCTS[SubscriptionTier.PROFESSIONAL],
"forecasts_per_day": QuotaLimits.FORECAST_GENERATION_PER_DAY[SubscriptionTier.PROFESSIONAL],
"forecast_horizon_days": QuotaLimits.FORECAST_HORIZON_DAYS[SubscriptionTier.PROFESSIONAL],
},
"support": "Priority email + phone support (24h response)",
"recommended_for": "Multi-location operations, up to 20 team members",
"support_key": "plans.professional.support",
"recommended_for_key": "plans.professional.recommended_for",
},
SubscriptionTier.ENTERPRISE: {
"name": "Enterprise",
"description": "For large bakery chains and franchises",
"tagline": "Unlimited scale & custom solutions",
"description_key": "plans.enterprise.description",
"tagline_key": "plans.enterprise.tagline",
"popular": False,
"monthly_price": PlanPricing.MONTHLY_PRICES[SubscriptionTier.ENTERPRISE],
"yearly_price": PlanPricing.YEARLY_PRICES[SubscriptionTier.ENTERPRISE],
"trial_days": 30,
"features": PlanFeatures.ENTERPRISE_FEATURES,
# Hero features (displayed prominently)
"hero_features": [
"full_api_access",
"custom_algorithms",
"dedicated_account_manager",
"24_7_support",
],
# ROI & Business Value
"roi_badge": {
"translation_key": "plans.enterprise.roi_badge",
"custom": True,
},
"business_metrics": {
"waste_reduction": "Custom",
"time_saved_hours_week": "Custom",
"scale": "Unlimited",
},
"limits": {
"users": "Unlimited",
"locations": "Unlimited",
"products": "Unlimited",
"forecasts_per_day": "Unlimited",
"forecast_horizon_days": QuotaLimits.FORECAST_HORIZON_DAYS[SubscriptionTier.ENTERPRISE],
},
"support": "24/7 dedicated support + account manager",
"recommended_for": "Enterprise operations, unlimited scale",
"support_key": "plans.enterprise.support",
"recommended_for_key": "plans.enterprise.recommended_for",
"custom_pricing": True,
"contact_sales": True,
},