Improve the frontend 2
This commit is contained in:
@@ -18,6 +18,7 @@ from app.models.users import User
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.services.admin_delete import AdminUserDeleteService
|
||||
from app.models import AuditLog
|
||||
|
||||
# Import unified authentication from shared library
|
||||
from shared.auth.decorators import (
|
||||
@@ -30,7 +31,7 @@ logger = structlog.get_logger()
|
||||
router = APIRouter(tags=["users"])
|
||||
|
||||
# Initialize audit logger
|
||||
audit_logger = create_audit_logger("auth-service")
|
||||
audit_logger = create_audit_logger("auth-service", AuditLog)
|
||||
|
||||
@router.delete("/api/v1/auth/users/{user_id}")
|
||||
async def delete_admin_user(
|
||||
|
||||
@@ -21,6 +21,7 @@ from shared.database.base import create_database_manager
|
||||
from shared.monitoring.decorators import track_execution_time
|
||||
from shared.monitoring.metrics import get_metrics_collector
|
||||
from app.core.config import settings
|
||||
from app.models import AuditLog
|
||||
from shared.routing import RouteBuilder
|
||||
from shared.auth.access_control import require_user_role
|
||||
from shared.security import create_audit_logger, create_rate_limiter, AuditSeverity, AuditAction
|
||||
@@ -32,7 +33,7 @@ logger = structlog.get_logger()
|
||||
router = APIRouter(tags=["forecasting-operations"])
|
||||
|
||||
# Initialize audit logger
|
||||
audit_logger = create_audit_logger("forecasting-service")
|
||||
audit_logger = create_audit_logger("forecasting-service", AuditLog)
|
||||
|
||||
async def get_rate_limiter():
|
||||
"""Dependency for rate limiter"""
|
||||
|
||||
@@ -50,7 +50,7 @@ async def create_food_safety_alert(
|
||||
alert = await food_safety_service.create_food_safety_alert(
|
||||
db,
|
||||
alert_data,
|
||||
user_id=UUID(current_user["sub"])
|
||||
user_id=UUID(current_user["user_id"])
|
||||
)
|
||||
|
||||
logger.info("Food safety alert created",
|
||||
@@ -196,7 +196,7 @@ async def update_food_safety_alert(
|
||||
|
||||
set_clauses.append("updated_at = NOW()")
|
||||
set_clauses.append("updated_by = :updated_by")
|
||||
params["updated_by"] = UUID(current_user["sub"])
|
||||
params["updated_by"] = UUID(current_user["user_id"])
|
||||
|
||||
update_query = f"""
|
||||
UPDATE food_safety_alerts
|
||||
|
||||
@@ -14,6 +14,7 @@ from shared.auth.access_control import require_user_role
|
||||
from shared.routing import RouteBuilder
|
||||
from app.core.database import get_db
|
||||
from app.services.food_safety_service import FoodSafetyService
|
||||
from app.models import AuditLog
|
||||
from app.schemas.food_safety import (
|
||||
FoodSafetyComplianceCreate,
|
||||
FoodSafetyComplianceUpdate,
|
||||
@@ -50,7 +51,7 @@ async def create_compliance_record(
|
||||
compliance = await food_safety_service.create_compliance_record(
|
||||
db,
|
||||
compliance_data,
|
||||
user_id=UUID(current_user["sub"])
|
||||
user_id=UUID(current_user["user_id"])
|
||||
)
|
||||
|
||||
logger.info("Compliance record created",
|
||||
@@ -181,7 +182,7 @@ async def update_compliance_record(
|
||||
compliance_id,
|
||||
tenant_id,
|
||||
compliance_data,
|
||||
user_id=UUID(current_user["sub"])
|
||||
user_id=UUID(current_user["user_id"])
|
||||
)
|
||||
|
||||
if not compliance:
|
||||
@@ -268,7 +269,7 @@ async def archive_compliance_record(
|
||||
# Log audit event for archiving compliance record
|
||||
try:
|
||||
from shared.security import create_audit_logger, AuditSeverity, AuditAction
|
||||
audit_logger = create_audit_logger("inventory-service")
|
||||
audit_logger = create_audit_logger("inventory-service", AuditLog)
|
||||
await audit_logger.log_event(
|
||||
db_session=db,
|
||||
tenant_id=str(tenant_id),
|
||||
|
||||
@@ -56,7 +56,7 @@ async def acknowledge_alert(
|
||||
result = await db.execute(update_query, {
|
||||
"alert_id": alert_id,
|
||||
"tenant_id": tenant_id,
|
||||
"user_id": UUID(current_user["sub"]),
|
||||
"user_id": UUID(current_user["user_id"]),
|
||||
"notes": f"\nAcknowledged: {notes}" if notes else "\nAcknowledged"
|
||||
})
|
||||
|
||||
|
||||
@@ -11,6 +11,7 @@ from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.core.database import get_db
|
||||
from app.services.inventory_service import InventoryService
|
||||
from app.models import AuditLog
|
||||
from app.schemas.inventory import (
|
||||
IngredientCreate,
|
||||
IngredientUpdate,
|
||||
@@ -30,7 +31,7 @@ route_builder = RouteBuilder('inventory')
|
||||
router = APIRouter(tags=["ingredients"])
|
||||
|
||||
# Initialize audit logger
|
||||
audit_logger = create_audit_logger("inventory-service")
|
||||
audit_logger = create_audit_logger("inventory-service", AuditLog)
|
||||
|
||||
# Helper function to extract user ID from user object
|
||||
def get_current_user_id(current_user: dict = Depends(get_current_user_dep)) -> UUID:
|
||||
|
||||
@@ -51,7 +51,7 @@ async def log_temperature(
|
||||
temp_log = await food_safety_service.log_temperature(
|
||||
db,
|
||||
temp_data,
|
||||
user_id=UUID(current_user["sub"])
|
||||
user_id=UUID(current_user["user_id"])
|
||||
)
|
||||
|
||||
logger.info("Temperature logged",
|
||||
@@ -89,7 +89,7 @@ async def bulk_log_temperatures(
|
||||
temp_logs = await food_safety_service.bulk_log_temperatures(
|
||||
db,
|
||||
bulk_data.readings,
|
||||
user_id=UUID(current_user["sub"])
|
||||
user_id=UUID(current_user["user_id"])
|
||||
)
|
||||
|
||||
logger.info("Bulk temperature logging completed",
|
||||
|
||||
@@ -85,6 +85,22 @@ class SustainabilityService:
|
||||
# Get waste data from production and inventory
|
||||
waste_data = await self._get_waste_data(db, tenant_id, start_date, end_date)
|
||||
|
||||
# Check if there's sufficient data for meaningful calculations
|
||||
# Minimum: 50kg production to avoid false metrics on empty accounts
|
||||
total_production = waste_data['total_production_kg']
|
||||
has_sufficient_data = total_production >= 50.0
|
||||
|
||||
logger.info(
|
||||
"Checking data sufficiency for sustainability metrics",
|
||||
tenant_id=str(tenant_id),
|
||||
total_production=total_production,
|
||||
has_sufficient_data=has_sufficient_data
|
||||
)
|
||||
|
||||
# If insufficient data, return a special "collecting data" state
|
||||
if not has_sufficient_data:
|
||||
return self._get_insufficient_data_response(start_date, end_date, waste_data)
|
||||
|
||||
# Calculate environmental impact
|
||||
environmental_impact = self._calculate_environmental_impact(waste_data)
|
||||
|
||||
@@ -118,7 +134,8 @@ class SustainabilityService:
|
||||
'sdg_compliance': sdg_compliance,
|
||||
'avoided_waste': avoided_waste,
|
||||
'financial_impact': financial_impact,
|
||||
'grant_readiness': self._assess_grant_readiness(sdg_compliance)
|
||||
'grant_readiness': self._assess_grant_readiness(sdg_compliance),
|
||||
'data_sufficient': True
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
@@ -126,6 +143,138 @@ class SustainabilityService:
|
||||
tenant_id=str(tenant_id), error=str(e))
|
||||
raise
|
||||
|
||||
def _get_insufficient_data_response(
|
||||
self,
|
||||
start_date: datetime,
|
||||
end_date: datetime,
|
||||
waste_data: Dict[str, Any]
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Return response for tenants with insufficient data
|
||||
|
||||
This prevents showing misleading "100% compliant" status for empty accounts
|
||||
"""
|
||||
return {
|
||||
'period': {
|
||||
'start_date': start_date.isoformat(),
|
||||
'end_date': end_date.isoformat(),
|
||||
'days': (end_date - start_date).days
|
||||
},
|
||||
'waste_metrics': {
|
||||
'total_waste_kg': 0.0,
|
||||
'production_waste_kg': 0.0,
|
||||
'expired_waste_kg': 0.0,
|
||||
'waste_percentage': 0.0,
|
||||
'waste_by_reason': {}
|
||||
},
|
||||
'environmental_impact': {
|
||||
'co2_emissions': {
|
||||
'kg': 0.0,
|
||||
'tons': 0.0,
|
||||
'trees_to_offset': 0.0
|
||||
},
|
||||
'water_footprint': {
|
||||
'liters': 0.0,
|
||||
'cubic_meters': 0.0
|
||||
},
|
||||
'land_use': {
|
||||
'square_meters': 0.0,
|
||||
'hectares': 0.0
|
||||
},
|
||||
'human_equivalents': {
|
||||
'car_km_equivalent': 0.0,
|
||||
'smartphone_charges': 0.0,
|
||||
'showers_equivalent': 0.0,
|
||||
'trees_planted': 0.0
|
||||
}
|
||||
},
|
||||
'sdg_compliance': {
|
||||
'sdg_12_3': {
|
||||
'baseline_waste_percentage': 0.0,
|
||||
'current_waste_percentage': 0.0,
|
||||
'reduction_achieved': 0.0,
|
||||
'target_reduction': 50.0,
|
||||
'progress_to_target': 0.0,
|
||||
'status': 'insufficient_data',
|
||||
'status_label': 'Collecting Baseline Data',
|
||||
'target_waste_percentage': 0.0
|
||||
},
|
||||
'baseline_period': 'not_available',
|
||||
'certification_ready': False,
|
||||
'improvement_areas': ['start_production_tracking']
|
||||
},
|
||||
'avoided_waste': {
|
||||
'waste_avoided_kg': 0.0,
|
||||
'ai_assisted_batches': 0,
|
||||
'environmental_impact_avoided': {
|
||||
'co2_kg': 0.0,
|
||||
'water_liters': 0.0
|
||||
},
|
||||
'methodology': 'insufficient_data'
|
||||
},
|
||||
'financial_impact': {
|
||||
'waste_cost_eur': 0.0,
|
||||
'cost_per_kg': 3.50,
|
||||
'potential_monthly_savings': 0.0,
|
||||
'annual_projection': 0.0
|
||||
},
|
||||
'grant_readiness': {
|
||||
'overall_readiness_percentage': 0.0,
|
||||
'grant_programs': {
|
||||
'life_circular_economy': {
|
||||
'eligible': False,
|
||||
'confidence': 'low',
|
||||
'requirements_met': False,
|
||||
'funding_eur': 73_000_000,
|
||||
'deadline': '2025-09-23',
|
||||
'program_type': 'grant'
|
||||
},
|
||||
'horizon_europe_cluster_6': {
|
||||
'eligible': False,
|
||||
'confidence': 'low',
|
||||
'requirements_met': False,
|
||||
'funding_eur': 880_000_000,
|
||||
'deadline': 'rolling_2025',
|
||||
'program_type': 'grant'
|
||||
},
|
||||
'fedima_sustainability_grant': {
|
||||
'eligible': False,
|
||||
'confidence': 'low',
|
||||
'requirements_met': False,
|
||||
'funding_eur': 20_000,
|
||||
'deadline': '2025-06-30',
|
||||
'program_type': 'grant',
|
||||
'sector_specific': 'bakery'
|
||||
},
|
||||
'eit_food_retail': {
|
||||
'eligible': False,
|
||||
'confidence': 'low',
|
||||
'requirements_met': False,
|
||||
'funding_eur': 45_000,
|
||||
'deadline': 'rolling',
|
||||
'program_type': 'grant',
|
||||
'sector_specific': 'retail'
|
||||
},
|
||||
'un_sdg_certified': {
|
||||
'eligible': False,
|
||||
'confidence': 'low',
|
||||
'requirements_met': False,
|
||||
'funding_eur': 0,
|
||||
'deadline': 'ongoing',
|
||||
'program_type': 'certification'
|
||||
}
|
||||
},
|
||||
'recommended_applications': [],
|
||||
'spain_compliance': {
|
||||
'law_1_2025': False,
|
||||
'circular_economy_strategy': False
|
||||
}
|
||||
},
|
||||
'data_sufficient': False,
|
||||
'minimum_production_required_kg': 50.0,
|
||||
'current_production_kg': waste_data['total_production_kg']
|
||||
}
|
||||
|
||||
async def _get_waste_data(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
@@ -306,79 +455,104 @@ class SustainabilityService:
|
||||
"""
|
||||
Calculate compliance with UN SDG 12.3
|
||||
Target: Halve per capita global food waste by 2030
|
||||
|
||||
IMPORTANT: This method assumes sufficient data validation was done upstream.
|
||||
It should only be called when waste_data has meaningful production volumes.
|
||||
"""
|
||||
try:
|
||||
# Get baseline (first 90 days of operation or industry average)
|
||||
# Get baseline (first 90 days of operation)
|
||||
baseline = await self._get_baseline_waste(db, tenant_id)
|
||||
|
||||
current_waste_percentage = waste_data['waste_percentage']
|
||||
# Ensure baseline is at least the industry average if not available
|
||||
baseline_percentage = baseline.get('waste_percentage', EnvironmentalConstants.EU_BAKERY_BASELINE_WASTE * 100)
|
||||
|
||||
# If baseline is too low (less than 1%), use industry average to prevent calculation errors
|
||||
if baseline_percentage < 1.0:
|
||||
baseline_percentage = EnvironmentalConstants.EU_BAKERY_BASELINE_WASTE * 100
|
||||
total_production = waste_data['total_production_kg']
|
||||
|
||||
# Calculate reduction from baseline
|
||||
# If current waste is higher than baseline, show negative reduction (worse than baseline)
|
||||
# If current waste is lower than baseline, show positive reduction (better than baseline)
|
||||
if baseline_percentage > 0:
|
||||
reduction_percentage = ((baseline_percentage - current_waste_percentage) / baseline_percentage) * 100
|
||||
else:
|
||||
# Check if we have a real baseline from production history
|
||||
has_real_baseline = baseline.get('data_available', False)
|
||||
baseline_percentage = baseline.get('waste_percentage', 0.0)
|
||||
|
||||
# If no real baseline AND insufficient current production, we can't make comparisons
|
||||
if not has_real_baseline and total_production < 50:
|
||||
logger.warning(
|
||||
"Cannot calculate SDG compliance without baseline or sufficient production",
|
||||
tenant_id=str(tenant_id),
|
||||
total_production=total_production
|
||||
)
|
||||
return self._get_insufficient_sdg_data()
|
||||
|
||||
# If we have no real baseline but have current production, use it as baseline
|
||||
if not has_real_baseline:
|
||||
logger.info(
|
||||
"Using current period as baseline (no historical data available)",
|
||||
tenant_id=str(tenant_id),
|
||||
current_waste_percentage=current_waste_percentage
|
||||
)
|
||||
baseline_percentage = current_waste_percentage
|
||||
# Set reduction to 0 since we're establishing baseline
|
||||
reduction_percentage = 0
|
||||
|
||||
# Calculate progress toward 50% reduction target
|
||||
# The target is to achieve 50% reduction from baseline
|
||||
# So if baseline is 25%, target is to reach 12.5% (25% * 0.5)
|
||||
target_reduction_percentage = 50.0
|
||||
target_waste_percentage = baseline_percentage * (1 - (target_reduction_percentage / 100))
|
||||
|
||||
# Calculate progress: how much of the 50% target has been achieved
|
||||
# If we've reduced from 25% to 19.28%, we've achieved (25-19.28)/(25-12.5) = 5.72/12.5 = 45.8% of target
|
||||
if baseline_percentage > target_waste_percentage:
|
||||
max_possible_reduction = baseline_percentage - target_waste_percentage
|
||||
actual_reduction = baseline_percentage - current_waste_percentage
|
||||
progress_to_target = (actual_reduction / max_possible_reduction) * 100 if max_possible_reduction > 0 else 0
|
||||
else:
|
||||
# If current is already better than target
|
||||
progress_to_target = 100.0 if current_waste_percentage <= target_waste_percentage else 0.0
|
||||
|
||||
# Ensure progress doesn't exceed 100%
|
||||
progress_to_target = min(progress_to_target, 100.0)
|
||||
|
||||
# Status assessment based on actual reduction achieved
|
||||
if reduction_percentage >= 50:
|
||||
status = 'sdg_compliant'
|
||||
status_label = 'SDG 12.3 Compliant'
|
||||
elif reduction_percentage >= 30:
|
||||
status = 'on_track'
|
||||
status_label = 'On Track to Compliance'
|
||||
elif reduction_percentage >= 10:
|
||||
status = 'progressing'
|
||||
status_label = 'Making Progress'
|
||||
elif reduction_percentage > 0:
|
||||
status = 'improving'
|
||||
status_label = 'Improving'
|
||||
elif reduction_percentage < 0:
|
||||
status = 'baseline'
|
||||
status_label = 'Above Baseline'
|
||||
else:
|
||||
progress_to_target = 0
|
||||
status = 'baseline'
|
||||
status_label = 'Establishing Baseline'
|
||||
else:
|
||||
# We have a real baseline - calculate actual reduction
|
||||
# If current waste is higher than baseline, show negative reduction (worse than baseline)
|
||||
# If current waste is lower than baseline, show positive reduction (better than baseline)
|
||||
if baseline_percentage > 0:
|
||||
reduction_percentage = ((baseline_percentage - current_waste_percentage) / baseline_percentage) * 100
|
||||
else:
|
||||
reduction_percentage = 0
|
||||
|
||||
# Calculate progress toward 50% reduction target
|
||||
# The target is to achieve 50% reduction from baseline
|
||||
# So if baseline is 25%, target is to reach 12.5% (25% * 0.5)
|
||||
target_reduction_percentage = 50.0
|
||||
target_waste_percentage = baseline_percentage * (1 - (target_reduction_percentage / 100))
|
||||
|
||||
# Calculate progress: how much of the 50% target has been achieved
|
||||
# If we've reduced from 25% to 19.28%, we've achieved (25-19.28)/(25-12.5) = 5.72/12.5 = 45.8% of target
|
||||
if baseline_percentage > target_waste_percentage:
|
||||
max_possible_reduction = baseline_percentage - target_waste_percentage
|
||||
actual_reduction = baseline_percentage - current_waste_percentage
|
||||
progress_to_target = (actual_reduction / max_possible_reduction) * 100 if max_possible_reduction > 0 else 0
|
||||
else:
|
||||
# If current is already better than target
|
||||
progress_to_target = 100.0 if current_waste_percentage <= target_waste_percentage else 0.0
|
||||
|
||||
# Ensure progress doesn't exceed 100%
|
||||
progress_to_target = min(progress_to_target, 100.0)
|
||||
|
||||
# Status assessment based on actual reduction achieved
|
||||
if reduction_percentage >= 50:
|
||||
status = 'sdg_compliant'
|
||||
status_label = 'SDG 12.3 Compliant'
|
||||
elif reduction_percentage >= 30:
|
||||
status = 'on_track'
|
||||
status_label = 'On Track to Compliance'
|
||||
elif reduction_percentage >= 10:
|
||||
status = 'progressing'
|
||||
status_label = 'Making Progress'
|
||||
elif reduction_percentage > 0:
|
||||
status = 'improving'
|
||||
status_label = 'Improving'
|
||||
elif reduction_percentage < 0:
|
||||
status = 'above_baseline'
|
||||
status_label = 'Above Baseline'
|
||||
else:
|
||||
status = 'baseline'
|
||||
status_label = 'At Baseline'
|
||||
|
||||
return {
|
||||
'sdg_12_3': {
|
||||
'baseline_waste_percentage': round(baseline_percentage, 2),
|
||||
'current_waste_percentage': round(current_waste_percentage, 2),
|
||||
'reduction_achieved': round(reduction_percentage, 2),
|
||||
'target_reduction': target_reduction_percentage,
|
||||
'target_reduction': 50.0,
|
||||
'progress_to_target': round(max(progress_to_target, 0), 1), # Ensure non-negative
|
||||
'status': status,
|
||||
'status_label': status_label,
|
||||
'target_waste_percentage': round(target_waste_percentage, 2)
|
||||
'target_waste_percentage': round(baseline_percentage * 0.5, 2) if baseline_percentage > 0 else 0.0
|
||||
},
|
||||
'baseline_period': baseline.get('period', 'industry_average'),
|
||||
'certification_ready': reduction_percentage >= 50,
|
||||
'baseline_period': baseline.get('period', 'current_period'),
|
||||
'certification_ready': reduction_percentage >= 50 if has_real_baseline else False,
|
||||
'improvement_areas': self._identify_improvement_areas(waste_data)
|
||||
}
|
||||
|
||||
@@ -386,6 +560,24 @@ class SustainabilityService:
|
||||
logger.error("Failed to calculate SDG compliance", error=str(e))
|
||||
raise
|
||||
|
||||
def _get_insufficient_sdg_data(self) -> Dict[str, Any]:
|
||||
"""Return SDG compliance structure for insufficient data case"""
|
||||
return {
|
||||
'sdg_12_3': {
|
||||
'baseline_waste_percentage': 0.0,
|
||||
'current_waste_percentage': 0.0,
|
||||
'reduction_achieved': 0.0,
|
||||
'target_reduction': 50.0,
|
||||
'progress_to_target': 0.0,
|
||||
'status': 'insufficient_data',
|
||||
'status_label': 'Collecting Baseline Data',
|
||||
'target_waste_percentage': 0.0
|
||||
},
|
||||
'baseline_period': 'not_available',
|
||||
'certification_ready': False,
|
||||
'improvement_areas': ['start_production_tracking']
|
||||
}
|
||||
|
||||
async def _get_baseline_waste(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
@@ -482,12 +674,24 @@ class SustainabilityService:
|
||||
return {
|
||||
'waste_avoided_kg': 0,
|
||||
'ai_assisted_batches': 0,
|
||||
'note': 'Insufficient data for avoided waste calculation'
|
||||
'environmental_impact_avoided': {
|
||||
'co2_kg': 0,
|
||||
'water_liters': 0
|
||||
},
|
||||
'methodology': 'insufficient_data'
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to calculate avoided waste", error=str(e))
|
||||
return {'waste_avoided_kg': 0, 'error': str(e)}
|
||||
return {
|
||||
'waste_avoided_kg': 0,
|
||||
'ai_assisted_batches': 0,
|
||||
'environmental_impact_avoided': {
|
||||
'co2_kg': 0,
|
||||
'water_liters': 0
|
||||
},
|
||||
'methodology': 'error_occurred'
|
||||
}
|
||||
|
||||
def _calculate_financial_impact(self, waste_data: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Calculate financial impact of food waste"""
|
||||
|
||||
@@ -17,6 +17,7 @@ from app.schemas.notifications import (
|
||||
)
|
||||
from app.services.notification_service import EnhancedNotificationService
|
||||
from app.models.notifications import NotificationType as ModelNotificationType
|
||||
from app.models import AuditLog
|
||||
from shared.auth.decorators import get_current_user_dep, get_current_user
|
||||
from shared.auth.access_control import require_user_role, admin_role_required
|
||||
from shared.routing.route_builder import RouteBuilder
|
||||
@@ -25,7 +26,7 @@ from shared.monitoring.metrics import track_endpoint_metrics
|
||||
from shared.security import create_audit_logger, AuditSeverity, AuditAction
|
||||
|
||||
logger = structlog.get_logger()
|
||||
audit_logger = create_audit_logger("notification-service")
|
||||
audit_logger = create_audit_logger("notification-service", AuditLog)
|
||||
router = APIRouter()
|
||||
route_builder = RouteBuilder("notification")
|
||||
|
||||
|
||||
@@ -16,6 +16,7 @@ from shared.routing import RouteBuilder
|
||||
from shared.security import create_audit_logger, AuditSeverity, AuditAction
|
||||
from app.core.database import get_db
|
||||
from app.services.orders_service import OrdersService
|
||||
from app.models import AuditLog
|
||||
from app.schemas.order_schemas import (
|
||||
CustomerCreate,
|
||||
CustomerUpdate,
|
||||
@@ -23,7 +24,7 @@ from app.schemas.order_schemas import (
|
||||
)
|
||||
|
||||
logger = structlog.get_logger()
|
||||
audit_logger = create_audit_logger("orders-service")
|
||||
audit_logger = create_audit_logger("orders-service", AuditLog)
|
||||
|
||||
# Create route builder for consistent URL structure
|
||||
route_builder = RouteBuilder('orders')
|
||||
|
||||
@@ -154,7 +154,7 @@ async def update_order_status(
|
||||
order_id,
|
||||
tenant_id,
|
||||
new_status,
|
||||
user_id=UUID(current_user["sub"]),
|
||||
user_id=UUID(current_user["user_id"]),
|
||||
reason=reason
|
||||
)
|
||||
|
||||
|
||||
@@ -17,6 +17,7 @@ from shared.routing import RouteBuilder
|
||||
from shared.security import create_audit_logger, AuditSeverity, AuditAction
|
||||
from app.core.database import get_db
|
||||
from app.services.orders_service import OrdersService
|
||||
from app.models import AuditLog
|
||||
from app.schemas.order_schemas import (
|
||||
OrderCreate,
|
||||
OrderUpdate,
|
||||
@@ -24,7 +25,7 @@ from app.schemas.order_schemas import (
|
||||
)
|
||||
|
||||
logger = structlog.get_logger()
|
||||
audit_logger = create_audit_logger("orders-service")
|
||||
audit_logger = create_audit_logger("orders-service", AuditLog)
|
||||
|
||||
# Create route builder for consistent URL structure
|
||||
route_builder = RouteBuilder('orders')
|
||||
@@ -217,7 +218,7 @@ async def update_order(
|
||||
db,
|
||||
db_obj=order,
|
||||
obj_in=order_data.dict(exclude_unset=True),
|
||||
updated_by=UUID(current_user["sub"])
|
||||
updated_by=UUID(current_user["user_id"])
|
||||
)
|
||||
|
||||
# Commit the transaction to persist changes
|
||||
|
||||
@@ -10,6 +10,7 @@ from uuid import UUID
|
||||
import structlog
|
||||
|
||||
from shared.config.base import BaseServiceSettings
|
||||
from shared.utils.tenant_settings_client import TenantSettingsClient
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
@@ -18,10 +19,16 @@ class ApprovalRulesService:
|
||||
"""
|
||||
Service for evaluating purchase orders against approval rules
|
||||
Implements smart auto-approval logic based on multiple criteria
|
||||
Uses tenant-specific settings from the database instead of system-level config
|
||||
"""
|
||||
|
||||
def __init__(self, config: BaseServiceSettings):
|
||||
def __init__(self, config: BaseServiceSettings, tenant_id: UUID):
|
||||
self.config = config
|
||||
self.tenant_id = tenant_id
|
||||
|
||||
# Initialize tenant settings client
|
||||
tenant_service_url = getattr(config, 'TENANT_SERVICE_URL', 'http://tenant-service:8000')
|
||||
self.tenant_settings_client = TenantSettingsClient(tenant_service_url=tenant_service_url)
|
||||
|
||||
async def evaluate_po_for_auto_approval(
|
||||
self,
|
||||
@@ -30,39 +37,60 @@ class ApprovalRulesService:
|
||||
requirements_data: Optional[List[Dict[str, Any]]] = None
|
||||
) -> Tuple[bool, List[str]]:
|
||||
"""
|
||||
Evaluate if a PO should be auto-approved
|
||||
Evaluate if a PO should be auto-approved using tenant-specific settings
|
||||
|
||||
Returns:
|
||||
Tuple of (should_auto_approve, reasons)
|
||||
"""
|
||||
if not self.config.AUTO_APPROVE_ENABLED:
|
||||
return False, ["Auto-approval is disabled in configuration"]
|
||||
# Fetch tenant-specific procurement settings
|
||||
try:
|
||||
tenant_settings = await self.tenant_settings_client.get_procurement_settings(self.tenant_id)
|
||||
except Exception as e:
|
||||
logger.error("Failed to fetch tenant settings, using safe defaults",
|
||||
tenant_id=str(self.tenant_id),
|
||||
error=str(e))
|
||||
# Use safe defaults if settings unavailable
|
||||
tenant_settings = {
|
||||
'auto_approve_enabled': False,
|
||||
'auto_approve_threshold_eur': 500.0,
|
||||
'auto_approve_min_supplier_score': 0.80,
|
||||
'require_approval_new_suppliers': True,
|
||||
'require_approval_critical_items': True
|
||||
}
|
||||
|
||||
# Check if auto-approval is enabled for this tenant
|
||||
if not tenant_settings.get('auto_approve_enabled', True):
|
||||
return False, ["Auto-approval is disabled in tenant settings"]
|
||||
|
||||
reasons = []
|
||||
should_approve = True
|
||||
|
||||
# Rule 1: Amount threshold check
|
||||
total_amount = self._calculate_po_total(po_data)
|
||||
if total_amount > self.config.AUTO_APPROVE_THRESHOLD_EUR:
|
||||
threshold = Decimal(str(tenant_settings.get('auto_approve_threshold_eur', 500.0)))
|
||||
|
||||
if total_amount > threshold:
|
||||
should_approve = False
|
||||
reasons.append(
|
||||
f"PO amount €{total_amount:.2f} exceeds threshold €{self.config.AUTO_APPROVE_THRESHOLD_EUR:.2f}"
|
||||
f"PO amount €{total_amount:.2f} exceeds threshold €{threshold:.2f}"
|
||||
)
|
||||
else:
|
||||
reasons.append(
|
||||
f"PO amount €{total_amount:.2f} within threshold €{self.config.AUTO_APPROVE_THRESHOLD_EUR:.2f}"
|
||||
f"PO amount €{total_amount:.2f} within threshold €{threshold:.2f}"
|
||||
)
|
||||
|
||||
# Rule 2: Supplier trust score check
|
||||
if supplier_data and self.config.AUTO_APPROVE_TRUSTED_SUPPLIERS:
|
||||
min_supplier_score = tenant_settings.get('auto_approve_min_supplier_score', 0.80)
|
||||
|
||||
if supplier_data:
|
||||
supplier_score = supplier_data.get('trust_score', 0.0)
|
||||
is_preferred = supplier_data.get('is_preferred_supplier', False)
|
||||
auto_approve_enabled = supplier_data.get('auto_approve_enabled', False)
|
||||
|
||||
if supplier_score < self.config.AUTO_APPROVE_MIN_SUPPLIER_SCORE:
|
||||
if supplier_score < min_supplier_score:
|
||||
should_approve = False
|
||||
reasons.append(
|
||||
f"Supplier trust score {supplier_score:.2f} below minimum {self.config.AUTO_APPROVE_MIN_SUPPLIER_SCORE:.2f}"
|
||||
f"Supplier trust score {supplier_score:.2f} below minimum {min_supplier_score:.2f}"
|
||||
)
|
||||
else:
|
||||
reasons.append(f"Supplier trust score {supplier_score:.2f} meets minimum requirements")
|
||||
@@ -84,7 +112,9 @@ class ApprovalRulesService:
|
||||
reasons.append("No supplier data available")
|
||||
|
||||
# Rule 3: New supplier check
|
||||
if supplier_data and self.config.REQUIRE_APPROVAL_NEW_SUPPLIERS:
|
||||
require_approval_new_suppliers = tenant_settings.get('require_approval_new_suppliers', True)
|
||||
|
||||
if supplier_data and require_approval_new_suppliers:
|
||||
total_pos = supplier_data.get('total_pos_count', 0)
|
||||
if total_pos < 5:
|
||||
should_approve = False
|
||||
@@ -93,7 +123,9 @@ class ApprovalRulesService:
|
||||
reasons.append(f"Established supplier with {total_pos} previous orders")
|
||||
|
||||
# Rule 4: Critical/urgent items check
|
||||
if requirements_data and self.config.REQUIRE_APPROVAL_CRITICAL_ITEMS:
|
||||
require_approval_critical_items = tenant_settings.get('require_approval_critical_items', True)
|
||||
|
||||
if requirements_data and require_approval_critical_items:
|
||||
critical_count = sum(
|
||||
1 for req in requirements_data
|
||||
if req.get('priority') in ['critical', 'urgent', 'CRITICAL', 'URGENT']
|
||||
|
||||
@@ -502,7 +502,7 @@ class ProcurementService:
|
||||
|
||||
# Import approval rules service
|
||||
from app.services.approval_rules_service import ApprovalRulesService
|
||||
approval_service = ApprovalRulesService(self.config)
|
||||
approval_service = ApprovalRulesService(self.config, tenant_id)
|
||||
|
||||
# Group requirements by supplier
|
||||
supplier_requirements = {}
|
||||
|
||||
@@ -15,10 +15,11 @@ from shared.routing import RouteBuilder
|
||||
from shared.security import create_audit_logger, AuditSeverity, AuditAction
|
||||
from app.services.pos_config_service import POSConfigurationService
|
||||
from app.schemas.pos_config import POSConfigurationListResponse
|
||||
from app.models import AuditLog
|
||||
|
||||
router = APIRouter()
|
||||
logger = structlog.get_logger()
|
||||
audit_logger = create_audit_logger("pos-service")
|
||||
audit_logger = create_audit_logger("pos-service", AuditLog)
|
||||
route_builder = RouteBuilder('pos')
|
||||
|
||||
|
||||
|
||||
@@ -14,11 +14,13 @@ from shared.routing import RouteBuilder
|
||||
from shared.security import create_audit_logger, AuditSeverity, AuditAction
|
||||
from app.core.database import get_db
|
||||
from app.services.production_service import ProductionService
|
||||
from app.models import AuditLog
|
||||
from app.schemas.equipment import (
|
||||
EquipmentCreate,
|
||||
EquipmentUpdate,
|
||||
EquipmentResponse,
|
||||
EquipmentListResponse
|
||||
EquipmentListResponse,
|
||||
EquipmentDeletionSummary
|
||||
)
|
||||
from app.models.production import EquipmentStatus, EquipmentType
|
||||
from app.core.config import settings
|
||||
@@ -27,8 +29,8 @@ logger = structlog.get_logger()
|
||||
route_builder = RouteBuilder('production')
|
||||
router = APIRouter(tags=["production-equipment"])
|
||||
|
||||
# Initialize audit logger
|
||||
audit_logger = create_audit_logger("production-service")
|
||||
# Initialize audit logger with the production service's AuditLog model
|
||||
audit_logger = create_audit_logger("production-service", AuditLog)
|
||||
|
||||
|
||||
def get_production_service() -> ProductionService:
|
||||
@@ -80,7 +82,8 @@ async def create_equipment(
|
||||
equipment_data: EquipmentCreate,
|
||||
tenant_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
production_service: ProductionService = Depends(get_production_service)
|
||||
production_service: ProductionService = Depends(get_production_service),
|
||||
db = Depends(get_db)
|
||||
):
|
||||
"""Create a new equipment item"""
|
||||
try:
|
||||
@@ -89,15 +92,16 @@ async def create_equipment(
|
||||
logger.info("Created equipment",
|
||||
equipment_id=str(equipment.id), tenant_id=str(tenant_id))
|
||||
|
||||
# Audit log
|
||||
await audit_logger.log(
|
||||
action=AuditAction.CREATE,
|
||||
# Audit log the equipment creation
|
||||
await audit_logger.log_event(
|
||||
db_session=db,
|
||||
tenant_id=str(tenant_id),
|
||||
user_id=current_user.get('user_id'),
|
||||
action=AuditAction.CREATE.value,
|
||||
resource_type="equipment",
|
||||
resource_id=str(equipment.id),
|
||||
user_id=current_user.get('user_id'),
|
||||
tenant_id=str(tenant_id),
|
||||
severity=AuditSeverity.INFO,
|
||||
details={"equipment_name": equipment.name, "equipment_type": equipment.type.value}
|
||||
severity=AuditSeverity.INFO.value,
|
||||
audit_metadata={"equipment_name": equipment.name, "equipment_type": equipment.type.value}
|
||||
)
|
||||
|
||||
return EquipmentResponse.model_validate(equipment)
|
||||
@@ -152,7 +156,8 @@ async def update_equipment(
|
||||
tenant_id: UUID = Path(...),
|
||||
equipment_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
production_service: ProductionService = Depends(get_production_service)
|
||||
production_service: ProductionService = Depends(get_production_service),
|
||||
db = Depends(get_db)
|
||||
):
|
||||
"""Update an equipment item"""
|
||||
try:
|
||||
@@ -164,15 +169,16 @@ async def update_equipment(
|
||||
logger.info("Updated equipment",
|
||||
equipment_id=str(equipment_id), tenant_id=str(tenant_id))
|
||||
|
||||
# Audit log
|
||||
await audit_logger.log(
|
||||
action=AuditAction.UPDATE,
|
||||
# Audit log the equipment update
|
||||
await audit_logger.log_event(
|
||||
db_session=db,
|
||||
tenant_id=str(tenant_id),
|
||||
user_id=current_user.get('user_id'),
|
||||
action=AuditAction.UPDATE.value,
|
||||
resource_type="equipment",
|
||||
resource_id=str(equipment_id),
|
||||
user_id=current_user.get('user_id'),
|
||||
tenant_id=str(tenant_id),
|
||||
severity=AuditSeverity.INFO,
|
||||
details={"updates": equipment_data.model_dump(exclude_unset=True)}
|
||||
severity=AuditSeverity.INFO.value,
|
||||
audit_metadata={"updates": equipment_data.model_dump(exclude_unset=True)}
|
||||
)
|
||||
|
||||
return EquipmentResponse.model_validate(equipment)
|
||||
@@ -189,37 +195,80 @@ async def update_equipment(
|
||||
raise HTTPException(status_code=500, detail="Failed to update equipment")
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_base_route("equipment/{equipment_id}/deletion-summary"),
|
||||
response_model=EquipmentDeletionSummary
|
||||
)
|
||||
async def get_equipment_deletion_summary(
|
||||
tenant_id: UUID = Path(...),
|
||||
equipment_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
production_service: ProductionService = Depends(get_production_service)
|
||||
):
|
||||
"""Get deletion summary for equipment (dependency check)"""
|
||||
try:
|
||||
summary = await production_service.get_equipment_deletion_summary(tenant_id, equipment_id)
|
||||
|
||||
logger.info("Retrieved equipment deletion summary",
|
||||
equipment_id=str(equipment_id), tenant_id=str(tenant_id))
|
||||
|
||||
return EquipmentDeletionSummary(**summary)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting equipment deletion summary",
|
||||
error=str(e), equipment_id=str(equipment_id), tenant_id=str(tenant_id))
|
||||
raise HTTPException(status_code=500, detail="Failed to get deletion summary")
|
||||
|
||||
|
||||
@router.delete(
|
||||
route_builder.build_base_route("equipment/{equipment_id}")
|
||||
)
|
||||
async def delete_equipment(
|
||||
tenant_id: UUID = Path(...),
|
||||
equipment_id: UUID = Path(...),
|
||||
permanent: bool = Query(False, description="Permanent delete (hard delete) if true"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
production_service: ProductionService = Depends(get_production_service)
|
||||
production_service: ProductionService = Depends(get_production_service),
|
||||
db = Depends(get_db)
|
||||
):
|
||||
"""Delete (soft delete) an equipment item"""
|
||||
"""Delete an equipment item. Use permanent=true for hard delete (requires admin role)"""
|
||||
try:
|
||||
success = await production_service.delete_equipment(tenant_id, equipment_id)
|
||||
# Hard delete requires admin role
|
||||
if permanent:
|
||||
user_role = current_user.get('role', '').lower()
|
||||
if user_role not in ['admin', 'owner']:
|
||||
raise HTTPException(
|
||||
status_code=403,
|
||||
detail="Hard delete requires admin or owner role"
|
||||
)
|
||||
|
||||
success = await production_service.hard_delete_equipment(tenant_id, equipment_id)
|
||||
delete_type = "hard_delete"
|
||||
severity = AuditSeverity.CRITICAL.value
|
||||
else:
|
||||
success = await production_service.delete_equipment(tenant_id, equipment_id)
|
||||
delete_type = "soft_delete"
|
||||
severity = AuditSeverity.WARNING.value
|
||||
|
||||
if not success:
|
||||
raise HTTPException(status_code=404, detail="Equipment not found")
|
||||
|
||||
logger.info("Deleted equipment",
|
||||
logger.info(f"{'Hard' if permanent else 'Soft'} deleted equipment",
|
||||
equipment_id=str(equipment_id), tenant_id=str(tenant_id))
|
||||
|
||||
# Audit log
|
||||
await audit_logger.log(
|
||||
action=AuditAction.DELETE,
|
||||
# Audit log the equipment deletion
|
||||
await audit_logger.log_event(
|
||||
db_session=db,
|
||||
tenant_id=str(tenant_id),
|
||||
user_id=current_user.get('user_id'),
|
||||
action=AuditAction.DELETE.value,
|
||||
resource_type="equipment",
|
||||
resource_id=str(equipment_id),
|
||||
user_id=current_user.get('user_id'),
|
||||
tenant_id=str(tenant_id),
|
||||
severity=AuditSeverity.WARNING,
|
||||
details={"action": "soft_delete"}
|
||||
severity=severity,
|
||||
audit_metadata={"action": delete_type, "permanent": permanent}
|
||||
)
|
||||
|
||||
return {"message": "Equipment deleted successfully"}
|
||||
return {"message": f"Equipment {'permanently deleted' if permanent else 'deleted'} successfully"}
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
|
||||
@@ -15,6 +15,7 @@ from shared.routing import RouteBuilder
|
||||
from shared.security import create_audit_logger, AuditSeverity, AuditAction
|
||||
from app.core.database import get_db
|
||||
from app.services.production_service import ProductionService
|
||||
from app.models import AuditLog
|
||||
from app.schemas.production import (
|
||||
ProductionBatchCreate,
|
||||
ProductionBatchUpdate,
|
||||
@@ -29,8 +30,8 @@ logger = structlog.get_logger()
|
||||
route_builder = RouteBuilder('production')
|
||||
router = APIRouter(tags=["production-batches"])
|
||||
|
||||
# Initialize audit logger
|
||||
audit_logger = create_audit_logger("production-service")
|
||||
# Initialize audit logger with the production service's AuditLog model
|
||||
audit_logger = create_audit_logger("production-service", AuditLog)
|
||||
|
||||
|
||||
def get_production_service() -> ProductionService:
|
||||
|
||||
@@ -15,6 +15,7 @@ from shared.routing import RouteBuilder
|
||||
from shared.security import create_audit_logger, AuditSeverity, AuditAction
|
||||
from app.core.database import get_db
|
||||
from app.services.production_service import ProductionService
|
||||
from app.models import AuditLog
|
||||
from app.schemas.production import (
|
||||
ProductionScheduleCreate,
|
||||
ProductionScheduleUpdate,
|
||||
@@ -26,8 +27,8 @@ logger = structlog.get_logger()
|
||||
route_builder = RouteBuilder('production')
|
||||
router = APIRouter(tags=["production-schedules"])
|
||||
|
||||
# Initialize audit logger
|
||||
audit_logger = create_audit_logger("production-service")
|
||||
# Initialize audit logger with the production service's AuditLog model
|
||||
audit_logger = create_audit_logger("production-service", AuditLog)
|
||||
|
||||
|
||||
def get_production_service() -> ProductionService:
|
||||
|
||||
@@ -102,7 +102,7 @@ async def create_quality_template(
|
||||
|
||||
# Add created_by from current user
|
||||
template_dict = template_data.dict()
|
||||
template_dict['created_by'] = UUID(current_user["sub"])
|
||||
template_dict['created_by'] = UUID(current_user["user_id"])
|
||||
template_create = QualityCheckTemplateCreate(**template_dict)
|
||||
|
||||
# Create template via service (handles validation and business rules)
|
||||
@@ -111,6 +111,9 @@ async def create_quality_template(
|
||||
template_data=template_create
|
||||
)
|
||||
|
||||
# Commit the transaction to persist changes
|
||||
await db.commit()
|
||||
|
||||
logger.info("Created quality template",
|
||||
template_id=str(template.id),
|
||||
template_name=template.name,
|
||||
@@ -202,6 +205,9 @@ async def update_quality_template(
|
||||
detail="Quality template not found"
|
||||
)
|
||||
|
||||
# Commit the transaction to persist changes
|
||||
await db.commit()
|
||||
|
||||
logger.info("Updated quality template",
|
||||
template_id=str(template_id),
|
||||
tenant_id=str(tenant_id))
|
||||
@@ -259,6 +265,9 @@ async def delete_quality_template(
|
||||
detail="Quality template not found"
|
||||
)
|
||||
|
||||
# Commit the transaction to persist changes
|
||||
await db.commit()
|
||||
|
||||
logger.info("Deleted quality template",
|
||||
template_id=str(template_id),
|
||||
tenant_id=str(tenant_id))
|
||||
|
||||
@@ -150,3 +150,72 @@ class EquipmentRepository(ProductionBaseRepository):
|
||||
except Exception as e:
|
||||
logger.error("Error deleting equipment", error=str(e), equipment_id=str(equipment_id))
|
||||
raise
|
||||
|
||||
async def hard_delete_equipment(self, equipment_id: UUID) -> bool:
|
||||
"""Permanently delete equipment from database"""
|
||||
try:
|
||||
equipment = await self.get(equipment_id)
|
||||
if not equipment:
|
||||
return False
|
||||
|
||||
await self.session.delete(equipment)
|
||||
await self.session.flush()
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error hard deleting equipment", error=str(e), equipment_id=str(equipment_id))
|
||||
raise
|
||||
|
||||
async def get_equipment_deletion_summary(self, tenant_id: UUID, equipment_id: UUID) -> Dict[str, Any]:
|
||||
"""Get summary of what will be affected by deleting equipment"""
|
||||
try:
|
||||
equipment = await self.get_equipment_by_id(tenant_id, equipment_id)
|
||||
if not equipment:
|
||||
return {
|
||||
"can_delete": False,
|
||||
"warnings": ["Equipment not found"],
|
||||
"production_batches_count": 0,
|
||||
"maintenance_records_count": 0,
|
||||
"temperature_logs_count": 0
|
||||
}
|
||||
|
||||
# Check for related production batches
|
||||
from app.models.production import ProductionBatch
|
||||
batch_query = select(func.count(ProductionBatch.id)).filter(
|
||||
and_(
|
||||
ProductionBatch.tenant_id == tenant_id,
|
||||
ProductionBatch.equipment_id == equipment_id
|
||||
)
|
||||
)
|
||||
batch_result = await self.session.execute(batch_query)
|
||||
batches_count = batch_result.scalar() or 0
|
||||
|
||||
# For now, we'll use placeholder counts for maintenance and temperature logs
|
||||
# These would need to be implemented based on your actual models
|
||||
maintenance_count = 0
|
||||
temperature_logs_count = 0
|
||||
|
||||
warnings = []
|
||||
if batches_count > 0:
|
||||
warnings.append(f"{batches_count} production batch(es) are using this equipment")
|
||||
|
||||
# Equipment can be deleted even with dependencies, but warn the user
|
||||
can_delete = True
|
||||
|
||||
return {
|
||||
"can_delete": can_delete,
|
||||
"warnings": warnings,
|
||||
"production_batches_count": batches_count,
|
||||
"maintenance_records_count": maintenance_count,
|
||||
"temperature_logs_count": temperature_logs_count,
|
||||
"equipment_name": equipment.name,
|
||||
"equipment_type": equipment.type.value,
|
||||
"equipment_location": equipment.location
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting equipment deletion summary",
|
||||
error=str(e),
|
||||
equipment_id=str(equipment_id),
|
||||
tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
@@ -132,6 +132,16 @@ class QualityTemplateRepository(ProductionBaseRepository):
|
||||
existing = await self.get_by_filters(and_(*filters))
|
||||
return existing is not None
|
||||
|
||||
async def get_by_filters(self, *filters):
|
||||
"""Get a single record by filters"""
|
||||
try:
|
||||
query = select(self.model).where(and_(*filters))
|
||||
result = await self.session.execute(query)
|
||||
return result.scalar_one_or_none()
|
||||
except Exception as e:
|
||||
logger.error("Error getting record by filters", error=str(e), filters=str(filters))
|
||||
raise
|
||||
|
||||
async def get_templates_by_ids(
|
||||
self,
|
||||
tenant_id: str,
|
||||
@@ -149,4 +159,4 @@ class QualityTemplateRepository(ProductionBaseRepository):
|
||||
QualityCheckTemplate.is_required.desc(),
|
||||
QualityCheckTemplate.weight.desc()
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
@@ -169,3 +169,30 @@ class EquipmentListResponse(BaseModel):
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class EquipmentDeletionSummary(BaseModel):
|
||||
"""Schema for equipment deletion summary"""
|
||||
can_delete: bool = Field(..., description="Whether the equipment can be deleted")
|
||||
warnings: List[str] = Field(default_factory=list, description="List of warnings about deletion")
|
||||
production_batches_count: int = Field(default=0, description="Number of production batches using this equipment")
|
||||
maintenance_records_count: int = Field(default=0, description="Number of maintenance records")
|
||||
temperature_logs_count: int = Field(default=0, description="Number of temperature logs")
|
||||
equipment_name: Optional[str] = Field(None, description="Equipment name")
|
||||
equipment_type: Optional[str] = Field(None, description="Equipment type")
|
||||
equipment_location: Optional[str] = Field(None, description="Equipment location")
|
||||
|
||||
model_config = ConfigDict(
|
||||
json_schema_extra={
|
||||
"example": {
|
||||
"can_delete": True,
|
||||
"warnings": ["3 production batch(es) are using this equipment"],
|
||||
"production_batches_count": 3,
|
||||
"maintenance_records_count": 5,
|
||||
"temperature_logs_count": 120,
|
||||
"equipment_name": "Horno Principal #1",
|
||||
"equipment_type": "oven",
|
||||
"equipment_location": "Área de Horneado"
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
@@ -1501,6 +1501,9 @@ class ProductionService:
|
||||
# Create equipment
|
||||
equipment = await equipment_repo.create_equipment(equipment_dict)
|
||||
|
||||
# Commit the transaction to persist changes
|
||||
await session.commit()
|
||||
|
||||
logger.info("Created equipment",
|
||||
equipment_id=str(equipment.id), tenant_id=str(tenant_id))
|
||||
|
||||
@@ -1529,6 +1532,9 @@ class ProductionService:
|
||||
equipment_update.model_dump(exclude_none=True)
|
||||
)
|
||||
|
||||
# Commit the transaction to persist changes
|
||||
await session.commit()
|
||||
|
||||
logger.info("Updated equipment",
|
||||
equipment_id=str(equipment_id), tenant_id=str(tenant_id))
|
||||
|
||||
@@ -1554,6 +1560,9 @@ class ProductionService:
|
||||
# Soft delete equipment
|
||||
success = await equipment_repo.delete_equipment(equipment_id)
|
||||
|
||||
# Commit the transaction to persist changes
|
||||
await session.commit()
|
||||
|
||||
logger.info("Deleted equipment",
|
||||
equipment_id=str(equipment_id), tenant_id=str(tenant_id))
|
||||
|
||||
@@ -1564,6 +1573,60 @@ class ProductionService:
|
||||
error=str(e), equipment_id=str(equipment_id), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def hard_delete_equipment(self, tenant_id: UUID, equipment_id: UUID) -> bool:
|
||||
"""Permanently delete an equipment item from database"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
from app.repositories.equipment_repository import EquipmentRepository
|
||||
equipment_repo = EquipmentRepository(session)
|
||||
|
||||
# First verify equipment belongs to tenant
|
||||
equipment = await equipment_repo.get_equipment_by_id(tenant_id, equipment_id)
|
||||
if not equipment:
|
||||
return False
|
||||
|
||||
# Get deletion summary first for logging
|
||||
summary = await equipment_repo.get_equipment_deletion_summary(tenant_id, equipment_id)
|
||||
|
||||
# Hard delete equipment
|
||||
success = await equipment_repo.hard_delete_equipment(equipment_id)
|
||||
|
||||
# Commit the transaction to persist changes
|
||||
await session.commit()
|
||||
|
||||
logger.info("Hard deleted equipment",
|
||||
equipment_id=str(equipment_id),
|
||||
tenant_id=str(tenant_id),
|
||||
affected_batches=summary.get("production_batches_count", 0))
|
||||
|
||||
return success
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error hard deleting equipment",
|
||||
error=str(e), equipment_id=str(equipment_id), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_equipment_deletion_summary(self, tenant_id: UUID, equipment_id: UUID) -> Dict[str, Any]:
|
||||
"""Get deletion summary for an equipment item"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
from app.repositories.equipment_repository import EquipmentRepository
|
||||
equipment_repo = EquipmentRepository(session)
|
||||
|
||||
summary = await equipment_repo.get_equipment_deletion_summary(tenant_id, equipment_id)
|
||||
|
||||
logger.info("Retrieved equipment deletion summary",
|
||||
equipment_id=str(equipment_id),
|
||||
tenant_id=str(tenant_id),
|
||||
can_delete=summary.get("can_delete", False))
|
||||
|
||||
return summary
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting equipment deletion summary",
|
||||
error=str(e), equipment_id=str(equipment_id), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
# ================================================================
|
||||
# SUSTAINABILITY / WASTE ANALYTICS
|
||||
# ================================================================
|
||||
|
||||
@@ -16,6 +16,7 @@ from ..schemas.recipes import (
|
||||
RecipeUpdate,
|
||||
RecipeResponse,
|
||||
)
|
||||
from ..models import AuditLog
|
||||
from shared.routing import RouteBuilder, RouteCategory
|
||||
from shared.auth.access_control import require_user_role
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
@@ -23,7 +24,7 @@ from shared.security import create_audit_logger, AuditSeverity, AuditAction
|
||||
|
||||
route_builder = RouteBuilder('recipes')
|
||||
logger = logging.getLogger(__name__)
|
||||
audit_logger = create_audit_logger("recipes-service")
|
||||
audit_logger = create_audit_logger("recipes-service", AuditLog)
|
||||
router = APIRouter(tags=["recipes"])
|
||||
|
||||
|
||||
|
||||
@@ -16,6 +16,7 @@ from app.schemas.sales import (
|
||||
SalesDataQuery
|
||||
)
|
||||
from app.services.sales_service import SalesService
|
||||
from app.models import AuditLog
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from shared.auth.access_control import require_user_role
|
||||
from shared.routing import RouteBuilder
|
||||
@@ -26,7 +27,7 @@ router = APIRouter(tags=["sales-records"])
|
||||
logger = structlog.get_logger()
|
||||
|
||||
# Initialize audit logger
|
||||
audit_logger = create_audit_logger("sales-service")
|
||||
audit_logger = create_audit_logger("sales-service", AuditLog)
|
||||
|
||||
|
||||
def get_sales_service():
|
||||
|
||||
@@ -16,6 +16,7 @@ from app.schemas.suppliers import (
|
||||
PurchaseOrderSearchParams
|
||||
)
|
||||
from app.models.suppliers import PurchaseOrderStatus
|
||||
from app.models import AuditLog
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from shared.routing import RouteBuilder
|
||||
from shared.auth.access_control import require_user_role
|
||||
@@ -27,7 +28,7 @@ route_builder = RouteBuilder('suppliers')
|
||||
|
||||
router = APIRouter(tags=["purchase-orders"])
|
||||
logger = structlog.get_logger()
|
||||
audit_logger = create_audit_logger("suppliers-service")
|
||||
audit_logger = create_audit_logger("suppliers-service", AuditLog)
|
||||
|
||||
|
||||
@router.post(route_builder.build_base_route("purchase-orders"), response_model=PurchaseOrderResponse)
|
||||
|
||||
@@ -22,6 +22,7 @@ from app.schemas.suppliers import (
|
||||
PurchaseOrderStatusUpdate, PurchaseOrderApproval, PurchaseOrderResponse, PurchaseOrderSummary
|
||||
)
|
||||
from app.models.suppliers import SupplierType
|
||||
from app.models import AuditLog
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from shared.routing import RouteBuilder
|
||||
from shared.auth.access_control import require_user_role
|
||||
@@ -33,7 +34,7 @@ route_builder = RouteBuilder('suppliers')
|
||||
|
||||
router = APIRouter(tags=["supplier-operations"])
|
||||
logger = structlog.get_logger()
|
||||
audit_logger = create_audit_logger("suppliers-service")
|
||||
audit_logger = create_audit_logger("suppliers-service", AuditLog)
|
||||
|
||||
|
||||
# ===== Supplier Operations =====
|
||||
|
||||
@@ -13,9 +13,11 @@ from sqlalchemy import select
|
||||
from app.core.database import get_db
|
||||
from app.services.supplier_service import SupplierService
|
||||
from app.models.suppliers import SupplierPriceList
|
||||
from app.models import AuditLog
|
||||
from app.schemas.suppliers import (
|
||||
SupplierCreate, SupplierUpdate, SupplierResponse, SupplierSummary,
|
||||
SupplierSearchParams, SupplierDeletionSummary
|
||||
SupplierSearchParams, SupplierDeletionSummary,
|
||||
SupplierPriceListCreate, SupplierPriceListUpdate, SupplierPriceListResponse
|
||||
)
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from shared.routing import RouteBuilder
|
||||
@@ -28,7 +30,7 @@ route_builder = RouteBuilder('suppliers')
|
||||
|
||||
router = APIRouter(tags=["suppliers"])
|
||||
logger = structlog.get_logger()
|
||||
audit_logger = create_audit_logger("suppliers-service")
|
||||
audit_logger = create_audit_logger("suppliers-service", AuditLog)
|
||||
|
||||
@router.post(route_builder.build_base_route(""), response_model=SupplierResponse)
|
||||
@require_user_role(['admin', 'owner', 'member'])
|
||||
@@ -359,4 +361,252 @@ async def get_supplier_products(
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail="Failed to retrieve supplier products"
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_resource_action_route("", "supplier_id", "price-lists"),
|
||||
response_model=List[SupplierPriceListResponse]
|
||||
)
|
||||
async def get_supplier_price_lists(
|
||||
supplier_id: UUID = Path(..., description="Supplier ID"),
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
is_active: bool = Query(True, description="Filter by active price lists"),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get all price list items for a supplier"""
|
||||
try:
|
||||
service = SupplierService(db)
|
||||
price_lists = await service.get_supplier_price_lists(
|
||||
supplier_id=supplier_id,
|
||||
tenant_id=UUID(tenant_id),
|
||||
is_active=is_active
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Retrieved supplier price lists",
|
||||
supplier_id=str(supplier_id),
|
||||
count=len(price_lists)
|
||||
)
|
||||
|
||||
return [SupplierPriceListResponse.from_orm(pl) for pl in price_lists]
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error getting supplier price lists",
|
||||
supplier_id=str(supplier_id),
|
||||
error=str(e)
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail="Failed to retrieve supplier price lists"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_resource_action_route("", "supplier_id", "price-lists/{price_list_id}"),
|
||||
response_model=SupplierPriceListResponse
|
||||
)
|
||||
async def get_supplier_price_list(
|
||||
supplier_id: UUID = Path(..., description="Supplier ID"),
|
||||
price_list_id: UUID = Path(..., description="Price List ID"),
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get specific price list item for a supplier"""
|
||||
try:
|
||||
service = SupplierService(db)
|
||||
price_list = await service.get_supplier_price_list(
|
||||
price_list_id=price_list_id,
|
||||
tenant_id=UUID(tenant_id)
|
||||
)
|
||||
|
||||
if not price_list:
|
||||
raise HTTPException(status_code=404, detail="Price list item not found")
|
||||
|
||||
logger.info(
|
||||
"Retrieved supplier price list item",
|
||||
supplier_id=str(supplier_id),
|
||||
price_list_id=str(price_list_id)
|
||||
)
|
||||
|
||||
return SupplierPriceListResponse.from_orm(price_list)
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error getting supplier price list item",
|
||||
supplier_id=str(supplier_id),
|
||||
price_list_id=str(price_list_id),
|
||||
error=str(e)
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail="Failed to retrieve supplier price list item"
|
||||
)
|
||||
|
||||
|
||||
@router.post(
|
||||
route_builder.build_resource_action_route("", "supplier_id", "price-lists"),
|
||||
response_model=SupplierPriceListResponse
|
||||
)
|
||||
@require_user_role(['admin', 'owner', 'member'])
|
||||
async def create_supplier_price_list(
|
||||
supplier_id: UUID = Path(..., description="Supplier ID"),
|
||||
price_list_data: SupplierPriceListCreate = None,
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Create a new price list item for a supplier"""
|
||||
try:
|
||||
service = SupplierService(db)
|
||||
|
||||
# Verify supplier exists
|
||||
supplier = await service.get_supplier(supplier_id)
|
||||
if not supplier:
|
||||
raise HTTPException(status_code=404, detail="Supplier not found")
|
||||
|
||||
price_list = await service.create_supplier_price_list(
|
||||
supplier_id=supplier_id,
|
||||
price_list_data=price_list_data,
|
||||
tenant_id=UUID(tenant_id),
|
||||
created_by=UUID(current_user["user_id"])
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Created supplier price list item",
|
||||
supplier_id=str(supplier_id),
|
||||
price_list_id=str(price_list.id)
|
||||
)
|
||||
|
||||
return SupplierPriceListResponse.from_orm(price_list)
|
||||
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error creating supplier price list item",
|
||||
supplier_id=str(supplier_id),
|
||||
error=str(e)
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail="Failed to create supplier price list item"
|
||||
)
|
||||
|
||||
|
||||
@router.put(
|
||||
route_builder.build_resource_action_route("", "supplier_id", "price-lists/{price_list_id}"),
|
||||
response_model=SupplierPriceListResponse
|
||||
)
|
||||
@require_user_role(['admin', 'owner', 'member'])
|
||||
async def update_supplier_price_list(
|
||||
supplier_id: UUID = Path(..., description="Supplier ID"),
|
||||
price_list_id: UUID = Path(..., description="Price List ID"),
|
||||
price_list_data: SupplierPriceListUpdate = None,
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Update a price list item for a supplier"""
|
||||
try:
|
||||
service = SupplierService(db)
|
||||
|
||||
# Verify supplier and price list exist
|
||||
supplier = await service.get_supplier(supplier_id)
|
||||
if not supplier:
|
||||
raise HTTPException(status_code=404, detail="Supplier not found")
|
||||
|
||||
price_list = await service.get_supplier_price_list(
|
||||
price_list_id=price_list_id,
|
||||
tenant_id=UUID(tenant_id)
|
||||
)
|
||||
if not price_list:
|
||||
raise HTTPException(status_code=404, detail="Price list item not found")
|
||||
|
||||
updated_price_list = await service.update_supplier_price_list(
|
||||
price_list_id=price_list_id,
|
||||
price_list_data=price_list_data,
|
||||
tenant_id=UUID(tenant_id),
|
||||
updated_by=UUID(current_user["user_id"])
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Updated supplier price list item",
|
||||
supplier_id=str(supplier_id),
|
||||
price_list_id=str(price_list_id)
|
||||
)
|
||||
|
||||
return SupplierPriceListResponse.from_orm(updated_price_list)
|
||||
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error updating supplier price list item",
|
||||
supplier_id=str(supplier_id),
|
||||
price_list_id=str(price_list_id),
|
||||
error=str(e)
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail="Failed to update supplier price list item"
|
||||
)
|
||||
|
||||
|
||||
@router.delete(
|
||||
route_builder.build_resource_action_route("", "supplier_id", "price-lists/{price_list_id}")
|
||||
)
|
||||
@require_user_role(['admin', 'owner'])
|
||||
async def delete_supplier_price_list(
|
||||
supplier_id: UUID = Path(..., description="Supplier ID"),
|
||||
price_list_id: UUID = Path(..., description="Price List ID"),
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Delete a price list item for a supplier"""
|
||||
try:
|
||||
service = SupplierService(db)
|
||||
|
||||
# Verify supplier and price list exist
|
||||
supplier = await service.get_supplier(supplier_id)
|
||||
if not supplier:
|
||||
raise HTTPException(status_code=404, detail="Supplier not found")
|
||||
|
||||
price_list = await service.get_supplier_price_list(
|
||||
price_list_id=price_list_id,
|
||||
tenant_id=UUID(tenant_id)
|
||||
)
|
||||
if not price_list:
|
||||
raise HTTPException(status_code=404, detail="Price list item not found")
|
||||
|
||||
success = await service.delete_supplier_price_list(
|
||||
price_list_id=price_list_id,
|
||||
tenant_id=UUID(tenant_id)
|
||||
)
|
||||
|
||||
if not success:
|
||||
raise HTTPException(status_code=404, detail="Price list item not found")
|
||||
|
||||
logger.info(
|
||||
"Deleted supplier price list item",
|
||||
supplier_id=str(supplier_id),
|
||||
price_list_id=str(price_list_id)
|
||||
)
|
||||
|
||||
return {"message": "Price list item deleted successfully"}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error deleting supplier price list item",
|
||||
supplier_id=str(supplier_id),
|
||||
price_list_id=str(price_list_id),
|
||||
error=str(e)
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail="Failed to delete supplier price list item"
|
||||
)
|
||||
|
||||
@@ -348,3 +348,93 @@ class SupplierRepository(BaseRepository[Supplier]):
|
||||
"deleted_scorecards": scorecards_count,
|
||||
"deletion_timestamp": datetime.utcnow()
|
||||
}
|
||||
|
||||
async def get_supplier_price_lists(
|
||||
self,
|
||||
supplier_id: UUID,
|
||||
tenant_id: UUID,
|
||||
is_active: bool = True
|
||||
) -> List[Any]:
|
||||
"""Get all price list items for a supplier"""
|
||||
from app.models.suppliers import SupplierPriceList
|
||||
|
||||
stmt = select(SupplierPriceList).filter(
|
||||
and_(
|
||||
SupplierPriceList.supplier_id == supplier_id,
|
||||
SupplierPriceList.tenant_id == tenant_id
|
||||
)
|
||||
)
|
||||
|
||||
if is_active:
|
||||
stmt = stmt.filter(SupplierPriceList.is_active == True)
|
||||
|
||||
result = await self.db.execute(stmt)
|
||||
return result.scalars().all()
|
||||
|
||||
async def get_supplier_price_list(
|
||||
self,
|
||||
price_list_id: UUID,
|
||||
tenant_id: UUID
|
||||
) -> Optional[Any]:
|
||||
"""Get specific price list item"""
|
||||
from app.models.suppliers import SupplierPriceList
|
||||
|
||||
stmt = select(SupplierPriceList).filter(
|
||||
and_(
|
||||
SupplierPriceList.id == price_list_id,
|
||||
SupplierPriceList.tenant_id == tenant_id
|
||||
)
|
||||
)
|
||||
result = await self.db.execute(stmt)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
async def create_supplier_price_list(
|
||||
self,
|
||||
create_data: Dict[str, Any]
|
||||
) -> Any:
|
||||
"""Create a new price list item"""
|
||||
from app.models.suppliers import SupplierPriceList
|
||||
|
||||
price_list = SupplierPriceList(**create_data)
|
||||
self.db.add(price_list)
|
||||
await self.db.commit()
|
||||
await self.db.refresh(price_list)
|
||||
return price_list
|
||||
|
||||
async def update_supplier_price_list(
|
||||
self,
|
||||
price_list_id: UUID,
|
||||
update_data: Dict[str, Any]
|
||||
) -> Any:
|
||||
"""Update a price list item"""
|
||||
from app.models.suppliers import SupplierPriceList
|
||||
|
||||
stmt = select(SupplierPriceList).filter(SupplierPriceList.id == price_list_id)
|
||||
result = await self.db.execute(stmt)
|
||||
price_list = result.scalar_one_or_none()
|
||||
|
||||
if not price_list:
|
||||
raise ValueError("Price list item not found")
|
||||
|
||||
# Update fields
|
||||
for key, value in update_data.items():
|
||||
if hasattr(price_list, key):
|
||||
setattr(price_list, key, value)
|
||||
|
||||
await self.db.commit()
|
||||
await self.db.refresh(price_list)
|
||||
return price_list
|
||||
|
||||
async def delete_supplier_price_list(
|
||||
self,
|
||||
price_list_id: UUID
|
||||
) -> bool:
|
||||
"""Delete a price list item"""
|
||||
from app.models.suppliers import SupplierPriceList
|
||||
from sqlalchemy import delete
|
||||
|
||||
stmt = delete(SupplierPriceList).filter(SupplierPriceList.id == price_list_id)
|
||||
result = await self.db.execute(stmt)
|
||||
|
||||
await self.db.commit()
|
||||
return result.rowcount > 0
|
||||
|
||||
@@ -600,6 +600,80 @@ class DeliverySearchParams(BaseModel):
|
||||
offset: int = Field(default=0, ge=0)
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# SUPPLIER PRICE LIST SCHEMAS
|
||||
# ============================================================================
|
||||
|
||||
class SupplierPriceListCreate(BaseModel):
|
||||
"""Schema for creating supplier price list items"""
|
||||
inventory_product_id: UUID
|
||||
product_code: Optional[str] = Field(None, max_length=100)
|
||||
unit_price: Decimal = Field(..., gt=0)
|
||||
unit_of_measure: str = Field(..., max_length=20)
|
||||
minimum_order_quantity: Optional[int] = Field(None, ge=1)
|
||||
price_per_unit: Decimal = Field(..., gt=0)
|
||||
tier_pricing: Optional[Dict[str, Any]] = None # [{quantity: 100, price: 2.50}, ...]
|
||||
effective_date: Optional[datetime] = Field(default_factory=lambda: datetime.now())
|
||||
expiry_date: Optional[datetime] = None
|
||||
is_active: bool = True
|
||||
brand: Optional[str] = Field(None, max_length=100)
|
||||
packaging_size: Optional[str] = Field(None, max_length=50)
|
||||
origin_country: Optional[str] = Field(None, max_length=100)
|
||||
shelf_life_days: Optional[int] = None
|
||||
storage_requirements: Optional[str] = None
|
||||
quality_specs: Optional[Dict[str, Any]] = None
|
||||
allergens: Optional[Dict[str, Any]] = None
|
||||
|
||||
|
||||
class SupplierPriceListUpdate(BaseModel):
|
||||
"""Schema for updating supplier price list items"""
|
||||
unit_price: Optional[Decimal] = Field(None, gt=0)
|
||||
unit_of_measure: Optional[str] = Field(None, max_length=20)
|
||||
minimum_order_quantity: Optional[int] = Field(None, ge=1)
|
||||
tier_pricing: Optional[Dict[str, Any]] = None
|
||||
effective_date: Optional[datetime] = None
|
||||
expiry_date: Optional[datetime] = None
|
||||
is_active: Optional[bool] = None
|
||||
brand: Optional[str] = Field(None, max_length=100)
|
||||
packaging_size: Optional[str] = Field(None, max_length=50)
|
||||
origin_country: Optional[str] = Field(None, max_length=100)
|
||||
shelf_life_days: Optional[int] = None
|
||||
storage_requirements: Optional[str] = None
|
||||
quality_specs: Optional[Dict[str, Any]] = None
|
||||
allergens: Optional[Dict[str, Any]] = None
|
||||
|
||||
|
||||
class SupplierPriceListResponse(BaseModel):
|
||||
"""Schema for supplier price list responses"""
|
||||
id: UUID
|
||||
tenant_id: UUID
|
||||
supplier_id: UUID
|
||||
inventory_product_id: UUID
|
||||
product_code: Optional[str] = None
|
||||
unit_price: Decimal
|
||||
unit_of_measure: str
|
||||
minimum_order_quantity: Optional[int] = None
|
||||
price_per_unit: Decimal
|
||||
tier_pricing: Optional[Dict[str, Any]] = None
|
||||
effective_date: datetime
|
||||
expiry_date: Optional[datetime] = None
|
||||
is_active: bool
|
||||
brand: Optional[str] = None
|
||||
packaging_size: Optional[str] = None
|
||||
origin_country: Optional[str] = None
|
||||
shelf_life_days: Optional[int] = None
|
||||
storage_requirements: Optional[str] = None
|
||||
quality_specs: Optional[Dict[str, Any]] = None
|
||||
allergens: Optional[Dict[str, Any]] = None
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
created_by: UUID
|
||||
updated_by: UUID
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# STATISTICS AND REPORTING SCHEMAS
|
||||
# ============================================================================
|
||||
@@ -641,4 +715,4 @@ class DeliverySummaryStats(BaseModel):
|
||||
todays_deliveries: int
|
||||
this_week_deliveries: int
|
||||
overdue_deliveries: int
|
||||
in_transit_deliveries: int
|
||||
in_transit_deliveries: int
|
||||
|
||||
@@ -13,7 +13,8 @@ from app.repositories.supplier_repository import SupplierRepository
|
||||
from app.models.suppliers import Supplier, SupplierStatus, SupplierType
|
||||
from app.schemas.suppliers import (
|
||||
SupplierCreate, SupplierUpdate, SupplierResponse,
|
||||
SupplierSearchParams, SupplierStatistics
|
||||
SupplierSearchParams, SupplierStatistics,
|
||||
SupplierPriceListCreate, SupplierPriceListUpdate, SupplierPriceListResponse
|
||||
)
|
||||
from app.core.config import settings
|
||||
|
||||
@@ -378,3 +379,132 @@ class SupplierService:
|
||||
errors['minimum_order_amount'] = "Minimum order amount cannot be negative"
|
||||
|
||||
return errors
|
||||
|
||||
async def get_supplier_price_lists(
|
||||
self,
|
||||
supplier_id: UUID,
|
||||
tenant_id: UUID,
|
||||
is_active: bool = True
|
||||
) -> List[Any]:
|
||||
"""Get all price list items for a supplier"""
|
||||
logger.info(
|
||||
"Getting supplier price lists",
|
||||
supplier_id=str(supplier_id),
|
||||
tenant_id=str(tenant_id),
|
||||
is_active=is_active
|
||||
)
|
||||
|
||||
return await self.repository.get_supplier_price_lists(
|
||||
supplier_id=supplier_id,
|
||||
tenant_id=tenant_id,
|
||||
is_active=is_active
|
||||
)
|
||||
|
||||
async def get_supplier_price_list(
|
||||
self,
|
||||
price_list_id: UUID,
|
||||
tenant_id: UUID
|
||||
) -> Optional[Any]:
|
||||
"""Get specific price list item"""
|
||||
logger.info(
|
||||
"Getting supplier price list item",
|
||||
price_list_id=str(price_list_id),
|
||||
tenant_id=str(tenant_id)
|
||||
)
|
||||
|
||||
return await self.repository.get_supplier_price_list(
|
||||
price_list_id=price_list_id,
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
|
||||
async def create_supplier_price_list(
|
||||
self,
|
||||
supplier_id: UUID,
|
||||
price_list_data: SupplierPriceListCreate,
|
||||
tenant_id: UUID,
|
||||
created_by: UUID
|
||||
) -> Any:
|
||||
"""Create a new price list item for a supplier"""
|
||||
logger.info(
|
||||
"Creating supplier price list item",
|
||||
supplier_id=str(supplier_id),
|
||||
tenant_id=str(tenant_id)
|
||||
)
|
||||
|
||||
# Prepare creation data
|
||||
create_data = price_list_data.model_dump(exclude_unset=True)
|
||||
create_data.update({
|
||||
'tenant_id': tenant_id,
|
||||
'supplier_id': supplier_id,
|
||||
'created_by': created_by,
|
||||
'updated_by': created_by,
|
||||
})
|
||||
|
||||
# Calculate price_per_unit if not provided
|
||||
if 'price_per_unit' not in create_data or create_data['price_per_unit'] is None:
|
||||
create_data['price_per_unit'] = create_data['unit_price']
|
||||
|
||||
price_list = await self.repository.create_supplier_price_list(create_data)
|
||||
|
||||
logger.info(
|
||||
"Supplier price list item created successfully",
|
||||
price_list_id=str(price_list.id),
|
||||
supplier_id=str(supplier_id)
|
||||
)
|
||||
|
||||
return price_list
|
||||
|
||||
async def update_supplier_price_list(
|
||||
self,
|
||||
price_list_id: UUID,
|
||||
price_list_data: SupplierPriceListUpdate,
|
||||
tenant_id: UUID,
|
||||
updated_by: UUID
|
||||
) -> Any:
|
||||
"""Update a price list item"""
|
||||
logger.info(
|
||||
"Updating supplier price list item",
|
||||
price_list_id=str(price_list_id),
|
||||
tenant_id=str(tenant_id)
|
||||
)
|
||||
|
||||
# Prepare update data
|
||||
update_data = price_list_data.model_dump(exclude_unset=True)
|
||||
update_data['updated_by'] = updated_by
|
||||
update_data['updated_at'] = datetime.now()
|
||||
|
||||
price_list = await self.repository.update_supplier_price_list(
|
||||
price_list_id=price_list_id,
|
||||
update_data=update_data
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Supplier price list item updated successfully",
|
||||
price_list_id=str(price_list_id)
|
||||
)
|
||||
|
||||
return price_list
|
||||
|
||||
async def delete_supplier_price_list(
|
||||
self,
|
||||
price_list_id: UUID,
|
||||
tenant_id: UUID
|
||||
) -> bool:
|
||||
"""Delete a price list item"""
|
||||
logger.info(
|
||||
"Deleting supplier price list item",
|
||||
price_list_id=str(price_list_id),
|
||||
tenant_id=str(tenant_id)
|
||||
)
|
||||
|
||||
success = await self.repository.delete_supplier_price_list(
|
||||
price_list_id=price_list_id
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Supplier price list item deletion completed",
|
||||
price_list_id=str(price_list_id),
|
||||
success=success
|
||||
)
|
||||
|
||||
return success
|
||||
|
||||
@@ -170,7 +170,6 @@ async def clone_demo_data(
|
||||
is_demo=True,
|
||||
is_demo_template=False,
|
||||
business_model=demo_account_type,
|
||||
subscription_tier="demo", # Special tier for demo sessions
|
||||
is_active=True,
|
||||
timezone="Europe/Madrid",
|
||||
owner_id=demo_owner_uuid # Required field - matches seed_demo_users.py
|
||||
@@ -179,6 +178,21 @@ async def clone_demo_data(
|
||||
db.add(tenant)
|
||||
await db.flush() # Flush to get the tenant ID
|
||||
|
||||
# Create demo subscription (enterprise tier for full access)
|
||||
from app.models.tenants import Subscription
|
||||
demo_subscription = Subscription(
|
||||
tenant_id=tenant.id,
|
||||
plan="enterprise", # Demo gets full access
|
||||
status="active",
|
||||
monthly_price=0.0, # Free for demo
|
||||
billing_cycle="monthly",
|
||||
max_users=-1, # Unlimited
|
||||
max_locations=-1,
|
||||
max_products=-1,
|
||||
features={}
|
||||
)
|
||||
db.add(demo_subscription)
|
||||
|
||||
# Create tenant member records for demo owner and staff
|
||||
from app.models.tenants import TenantMember
|
||||
import json
|
||||
|
||||
@@ -17,6 +17,7 @@ from app.schemas.tenants import (
|
||||
from app.services.tenant_service import EnhancedTenantService
|
||||
from app.services.subscription_limit_service import SubscriptionLimitService
|
||||
from app.services.payment_service import PaymentService
|
||||
from app.models import AuditLog
|
||||
from shared.auth.decorators import (
|
||||
get_current_user_dep,
|
||||
require_admin_role_dep
|
||||
@@ -33,7 +34,7 @@ router = APIRouter()
|
||||
route_builder = RouteBuilder("tenants")
|
||||
|
||||
# Initialize audit logger
|
||||
audit_logger = create_audit_logger("tenant-service")
|
||||
audit_logger = create_audit_logger("tenant-service", AuditLog)
|
||||
|
||||
# Global Redis client
|
||||
_redis_client = None
|
||||
@@ -555,6 +556,73 @@ async def get_tenant_statistics(
|
||||
# SUBSCRIPTION OPERATIONS
|
||||
# ============================================================================
|
||||
|
||||
@router.get("/api/v1/subscriptions/{tenant_id}/tier")
|
||||
async def get_tenant_subscription_tier_fast(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
redis_client = Depends(get_tenant_redis_client)
|
||||
):
|
||||
"""
|
||||
Fast cached lookup for tenant subscription tier
|
||||
|
||||
This endpoint is optimized for high-frequency access (e.g., from gateway middleware)
|
||||
with Redis caching (10-minute TTL). No authentication required for internal service calls.
|
||||
"""
|
||||
try:
|
||||
from app.services.subscription_cache import get_subscription_cache_service
|
||||
|
||||
cache_service = get_subscription_cache_service(redis_client)
|
||||
tier = await cache_service.get_tenant_tier_cached(str(tenant_id))
|
||||
|
||||
return {
|
||||
"tenant_id": str(tenant_id),
|
||||
"tier": tier,
|
||||
"cached": True
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get subscription tier",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to get subscription tier"
|
||||
)
|
||||
|
||||
@router.get(route_builder.build_base_route("subscriptions/{tenant_id}/active", include_tenant_prefix=False))
|
||||
async def get_tenant_active_subscription(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
redis_client = Depends(get_tenant_redis_client)
|
||||
):
|
||||
"""
|
||||
Get full active subscription with caching
|
||||
|
||||
Returns complete subscription details with 10-minute Redis cache.
|
||||
"""
|
||||
try:
|
||||
from app.services.subscription_cache import get_subscription_cache_service
|
||||
|
||||
cache_service = get_subscription_cache_service(redis_client)
|
||||
subscription = await cache_service.get_tenant_subscription_cached(str(tenant_id))
|
||||
|
||||
if not subscription:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="No active subscription found"
|
||||
)
|
||||
|
||||
return subscription
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Failed to get active subscription",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to get active subscription"
|
||||
)
|
||||
|
||||
@router.get(route_builder.build_base_route("subscriptions/{tenant_id}/limits", include_tenant_prefix=False))
|
||||
async def get_subscription_limits(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
@@ -760,6 +828,25 @@ async def upgrade_subscription_plan(
|
||||
new_plan=new_plan,
|
||||
user_id=current_user["user_id"])
|
||||
|
||||
# Invalidate subscription cache to ensure immediate availability of new tier
|
||||
try:
|
||||
from app.services.subscription_cache import get_subscription_cache_service
|
||||
import shared.redis_utils
|
||||
from app.core.config import settings
|
||||
|
||||
redis_client = await shared.redis_utils.initialize_redis(settings.REDIS_URL)
|
||||
cache_service = get_subscription_cache_service(redis_client)
|
||||
await cache_service.invalidate_subscription_cache(str(tenant_id))
|
||||
|
||||
logger.info("Subscription cache invalidated after upgrade",
|
||||
tenant_id=str(tenant_id),
|
||||
new_plan=new_plan)
|
||||
except Exception as cache_error:
|
||||
logger.error("Failed to invalidate subscription cache after upgrade",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(cache_error))
|
||||
# Don't fail the upgrade if cache invalidation fails
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"message": f"Plan successfully upgraded to {new_plan}",
|
||||
|
||||
@@ -38,7 +38,6 @@ class Tenant(Base):
|
||||
|
||||
# Status
|
||||
is_active = Column(Boolean, default=True)
|
||||
subscription_tier = Column(String(50), default="starter")
|
||||
|
||||
# Demo account flags
|
||||
is_demo = Column(Boolean, default=False, index=True)
|
||||
@@ -63,9 +62,24 @@ class Tenant(Base):
|
||||
|
||||
# Relationships - only within tenant service
|
||||
members = relationship("TenantMember", back_populates="tenant", cascade="all, delete-orphan")
|
||||
|
||||
subscriptions = relationship("Subscription", back_populates="tenant", cascade="all, delete-orphan")
|
||||
|
||||
# REMOVED: users relationship - no cross-service SQLAlchemy relationships
|
||||
|
||||
|
||||
@property
|
||||
def subscription_tier(self):
|
||||
"""
|
||||
Get current subscription tier from active subscription
|
||||
|
||||
Note: This is a computed property that requires subscription relationship to be loaded.
|
||||
For performance-critical operations, use the subscription cache service directly.
|
||||
"""
|
||||
# Find active subscription
|
||||
for subscription in self.subscriptions:
|
||||
if subscription.status == 'active':
|
||||
return subscription.plan
|
||||
return "starter" # Default fallback
|
||||
|
||||
def __repr__(self):
|
||||
return f"<Tenant(id={self.id}, name={self.name})>"
|
||||
|
||||
|
||||
@@ -13,6 +13,7 @@ import json
|
||||
from .base import TenantBaseRepository
|
||||
from app.models.tenants import Subscription
|
||||
from shared.database.exceptions import DatabaseError, ValidationError, DuplicateRecordError
|
||||
from shared.subscription.plans import SubscriptionPlanMetadata, QuotaLimits, PlanPricing
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
@@ -44,13 +45,30 @@ class SubscriptionRepository(TenantBaseRepository):
|
||||
if existing_subscription:
|
||||
raise DuplicateRecordError(f"Tenant already has an active subscription")
|
||||
|
||||
# Set default values based on plan
|
||||
plan_config = self._get_plan_configuration(subscription_data["plan"])
|
||||
|
||||
# Set defaults from plan config
|
||||
for key, value in plan_config.items():
|
||||
if key not in subscription_data:
|
||||
subscription_data[key] = value
|
||||
# Set default values based on plan from centralized configuration
|
||||
plan = subscription_data["plan"]
|
||||
plan_info = SubscriptionPlanMetadata.get_plan_info(plan)
|
||||
|
||||
# Set defaults from centralized plan configuration
|
||||
if "monthly_price" not in subscription_data:
|
||||
billing_cycle = subscription_data.get("billing_cycle", "monthly")
|
||||
subscription_data["monthly_price"] = float(
|
||||
PlanPricing.get_price(plan, billing_cycle)
|
||||
)
|
||||
|
||||
if "max_users" not in subscription_data:
|
||||
subscription_data["max_users"] = QuotaLimits.get_limit('MAX_USERS', plan) or -1
|
||||
|
||||
if "max_locations" not in subscription_data:
|
||||
subscription_data["max_locations"] = QuotaLimits.get_limit('MAX_LOCATIONS', plan) or -1
|
||||
|
||||
if "max_products" not in subscription_data:
|
||||
subscription_data["max_products"] = QuotaLimits.get_limit('MAX_PRODUCTS', plan) or -1
|
||||
|
||||
if "features" not in subscription_data:
|
||||
subscription_data["features"] = {
|
||||
feature: True for feature in plan_info.get("features", [])
|
||||
}
|
||||
|
||||
# Set default subscription values
|
||||
if "status" not in subscription_data:
|
||||
@@ -129,37 +147,47 @@ class SubscriptionRepository(TenantBaseRepository):
|
||||
async def update_subscription_plan(
|
||||
self,
|
||||
subscription_id: str,
|
||||
new_plan: str
|
||||
new_plan: str,
|
||||
billing_cycle: str = "monthly"
|
||||
) -> Optional[Subscription]:
|
||||
"""Update subscription plan and pricing"""
|
||||
"""Update subscription plan and pricing using centralized configuration"""
|
||||
try:
|
||||
valid_plans = ["starter", "professional", "enterprise"]
|
||||
if new_plan not in valid_plans:
|
||||
raise ValidationError(f"Invalid plan. Must be one of: {valid_plans}")
|
||||
|
||||
# Get new plan configuration
|
||||
plan_config = self._get_plan_configuration(new_plan)
|
||||
|
||||
|
||||
# Get current subscription to find tenant_id for cache invalidation
|
||||
subscription = await self.get_by_id(subscription_id)
|
||||
if not subscription:
|
||||
raise ValidationError(f"Subscription {subscription_id} not found")
|
||||
|
||||
# Get new plan configuration from centralized source
|
||||
plan_info = SubscriptionPlanMetadata.get_plan_info(new_plan)
|
||||
|
||||
# Update subscription with new plan details
|
||||
update_data = {
|
||||
"plan": new_plan,
|
||||
"monthly_price": plan_config["monthly_price"],
|
||||
"max_users": plan_config["max_users"],
|
||||
"max_locations": plan_config["max_locations"],
|
||||
"max_products": plan_config["max_products"],
|
||||
"features": plan_config.get("features", {}),
|
||||
"monthly_price": float(PlanPricing.get_price(new_plan, billing_cycle)),
|
||||
"billing_cycle": billing_cycle,
|
||||
"max_users": QuotaLimits.get_limit('MAX_USERS', new_plan) or -1,
|
||||
"max_locations": QuotaLimits.get_limit('MAX_LOCATIONS', new_plan) or -1,
|
||||
"max_products": QuotaLimits.get_limit('MAX_PRODUCTS', new_plan) or -1,
|
||||
"features": {feature: True for feature in plan_info.get("features", [])},
|
||||
"updated_at": datetime.utcnow()
|
||||
}
|
||||
|
||||
|
||||
updated_subscription = await self.update(subscription_id, update_data)
|
||||
|
||||
|
||||
# Invalidate cache
|
||||
await self._invalidate_cache(str(subscription.tenant_id))
|
||||
|
||||
logger.info("Subscription plan updated",
|
||||
subscription_id=subscription_id,
|
||||
new_plan=new_plan,
|
||||
new_price=plan_config["monthly_price"])
|
||||
|
||||
new_price=update_data["monthly_price"])
|
||||
|
||||
return updated_subscription
|
||||
|
||||
|
||||
except ValidationError:
|
||||
raise
|
||||
except Exception as e:
|
||||
@@ -176,19 +204,27 @@ class SubscriptionRepository(TenantBaseRepository):
|
||||
) -> Optional[Subscription]:
|
||||
"""Cancel a subscription"""
|
||||
try:
|
||||
# Get subscription to find tenant_id for cache invalidation
|
||||
subscription = await self.get_by_id(subscription_id)
|
||||
if not subscription:
|
||||
raise ValidationError(f"Subscription {subscription_id} not found")
|
||||
|
||||
update_data = {
|
||||
"status": "cancelled",
|
||||
"updated_at": datetime.utcnow()
|
||||
}
|
||||
|
||||
|
||||
updated_subscription = await self.update(subscription_id, update_data)
|
||||
|
||||
|
||||
# Invalidate cache
|
||||
await self._invalidate_cache(str(subscription.tenant_id))
|
||||
|
||||
logger.info("Subscription cancelled",
|
||||
subscription_id=subscription_id,
|
||||
reason=reason)
|
||||
|
||||
|
||||
return updated_subscription
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to cancel subscription",
|
||||
subscription_id=subscription_id,
|
||||
@@ -202,12 +238,20 @@ class SubscriptionRepository(TenantBaseRepository):
|
||||
) -> Optional[Subscription]:
|
||||
"""Suspend a subscription"""
|
||||
try:
|
||||
# Get subscription to find tenant_id for cache invalidation
|
||||
subscription = await self.get_by_id(subscription_id)
|
||||
if not subscription:
|
||||
raise ValidationError(f"Subscription {subscription_id} not found")
|
||||
|
||||
update_data = {
|
||||
"status": "suspended",
|
||||
"updated_at": datetime.utcnow()
|
||||
}
|
||||
|
||||
updated_subscription = await self.update(subscription_id, update_data)
|
||||
|
||||
# Invalidate cache
|
||||
await self._invalidate_cache(str(subscription.tenant_id))
|
||||
|
||||
logger.info("Subscription suspended",
|
||||
subscription_id=subscription_id,
|
||||
@@ -227,23 +271,31 @@ class SubscriptionRepository(TenantBaseRepository):
|
||||
) -> Optional[Subscription]:
|
||||
"""Reactivate a cancelled or suspended subscription"""
|
||||
try:
|
||||
# Get subscription to find tenant_id for cache invalidation
|
||||
subscription = await self.get_by_id(subscription_id)
|
||||
if not subscription:
|
||||
raise ValidationError(f"Subscription {subscription_id} not found")
|
||||
|
||||
# Reset billing date when reactivating
|
||||
next_billing_date = datetime.utcnow() + timedelta(days=30)
|
||||
|
||||
|
||||
update_data = {
|
||||
"status": "active",
|
||||
"next_billing_date": next_billing_date,
|
||||
"updated_at": datetime.utcnow()
|
||||
}
|
||||
|
||||
|
||||
updated_subscription = await self.update(subscription_id, update_data)
|
||||
|
||||
|
||||
# Invalidate cache
|
||||
await self._invalidate_cache(str(subscription.tenant_id))
|
||||
|
||||
logger.info("Subscription reactivated",
|
||||
subscription_id=subscription_id,
|
||||
next_billing_date=next_billing_date)
|
||||
|
||||
|
||||
return updated_subscription
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to reactivate subscription",
|
||||
subscription_id=subscription_id,
|
||||
@@ -394,63 +446,23 @@ class SubscriptionRepository(TenantBaseRepository):
|
||||
logger.error("Failed to cleanup old subscriptions",
|
||||
error=str(e))
|
||||
raise DatabaseError(f"Cleanup failed: {str(e)}")
|
||||
|
||||
def _get_plan_configuration(self, plan: str) -> Dict[str, Any]:
|
||||
"""Get configuration for a subscription plan"""
|
||||
plan_configs = {
|
||||
"starter": {
|
||||
"monthly_price": 49.0,
|
||||
"max_users": 5, # Reasonable for small bakeries
|
||||
"max_locations": 1,
|
||||
"max_products": 50,
|
||||
"features": {
|
||||
"inventory_management": "basic",
|
||||
"demand_prediction": "basic",
|
||||
"production_reports": "basic",
|
||||
"analytics": "basic",
|
||||
"support": "email",
|
||||
"trial_days": 14,
|
||||
"locations": "1_location",
|
||||
"ai_model_configuration": "basic" # Added AI model configuration for all tiers
|
||||
}
|
||||
},
|
||||
"professional": {
|
||||
"monthly_price": 129.0,
|
||||
"max_users": 15, # Good for growing bakeries
|
||||
"max_locations": 2,
|
||||
"max_products": -1, # Unlimited products
|
||||
"features": {
|
||||
"inventory_management": "advanced",
|
||||
"demand_prediction": "ai_92_percent",
|
||||
"production_management": "complete",
|
||||
"pos_integrated": True,
|
||||
"logistics": "basic",
|
||||
"analytics": "advanced",
|
||||
"support": "priority_24_7",
|
||||
"trial_days": 14,
|
||||
"locations": "1_2_locations",
|
||||
"ai_model_configuration": "advanced" # Enhanced AI model configuration for Professional
|
||||
}
|
||||
},
|
||||
"enterprise": {
|
||||
"monthly_price": 399.0,
|
||||
"max_users": -1, # Unlimited users
|
||||
"max_locations": -1, # Unlimited locations
|
||||
"max_products": -1, # Unlimited products
|
||||
"features": {
|
||||
"inventory_management": "multi_location",
|
||||
"demand_prediction": "ai_personalized",
|
||||
"production_optimization": "capacity",
|
||||
"erp_integration": True,
|
||||
"logistics": "advanced",
|
||||
"analytics": "predictive",
|
||||
"api_access": "personalized",
|
||||
"account_manager": True,
|
||||
"demo": "personalized",
|
||||
"locations": "unlimited_obradores",
|
||||
"ai_model_configuration": "enterprise" # Full AI model configuration for Enterprise
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return plan_configs.get(plan, plan_configs["starter"])
|
||||
|
||||
async def _invalidate_cache(self, tenant_id: str) -> None:
|
||||
"""
|
||||
Invalidate subscription cache for a tenant
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
"""
|
||||
try:
|
||||
from app.services.subscription_cache import get_subscription_cache_service
|
||||
|
||||
cache_service = get_subscription_cache_service()
|
||||
await cache_service.invalidate_subscription_cache(tenant_id)
|
||||
|
||||
logger.debug("Invalidated subscription cache from repository",
|
||||
tenant_id=tenant_id)
|
||||
|
||||
except Exception as e:
|
||||
logger.warning("Failed to invalidate cache (non-critical)",
|
||||
tenant_id=tenant_id, error=str(e))
|
||||
|
||||
@@ -53,8 +53,6 @@ class TenantRepository(TenantBaseRepository):
|
||||
tenant_data["city"] = "Madrid"
|
||||
if "is_active" not in tenant_data:
|
||||
tenant_data["is_active"] = True
|
||||
if "subscription_tier" not in tenant_data:
|
||||
tenant_data["subscription_tier"] = "basic"
|
||||
if "ml_model_trained" not in tenant_data:
|
||||
tenant_data["ml_model_trained"] = False
|
||||
|
||||
@@ -189,35 +187,6 @@ class TenantRepository(TenantBaseRepository):
|
||||
error=str(e))
|
||||
raise DatabaseError(f"Failed to update model status: {str(e)}")
|
||||
|
||||
async def update_subscription_tier(
|
||||
self,
|
||||
tenant_id: str,
|
||||
subscription_tier: str
|
||||
) -> Optional[Tenant]:
|
||||
"""Update tenant subscription tier"""
|
||||
try:
|
||||
valid_tiers = ["basic", "professional", "enterprise"]
|
||||
if subscription_tier not in valid_tiers:
|
||||
raise ValidationError(f"Invalid subscription tier. Must be one of: {valid_tiers}")
|
||||
|
||||
updated_tenant = await self.update(tenant_id, {
|
||||
"subscription_tier": subscription_tier,
|
||||
"updated_at": datetime.utcnow()
|
||||
})
|
||||
|
||||
logger.info("Tenant subscription tier updated",
|
||||
tenant_id=tenant_id,
|
||||
subscription_tier=subscription_tier)
|
||||
|
||||
return updated_tenant
|
||||
|
||||
except ValidationError:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Failed to update subscription tier",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
raise DatabaseError(f"Failed to update subscription: {str(e)}")
|
||||
|
||||
async def get_tenants_by_location(
|
||||
self,
|
||||
@@ -291,17 +260,21 @@ class TenantRepository(TenantBaseRepository):
|
||||
result = await self.session.execute(business_type_query)
|
||||
business_type_stats = {row.business_type: row.count for row in result.fetchall()}
|
||||
|
||||
# Get tenants by subscription tier
|
||||
# Get tenants by subscription tier - now from subscriptions table
|
||||
tier_query = text("""
|
||||
SELECT subscription_tier, COUNT(*) as count
|
||||
FROM tenants
|
||||
WHERE is_active = true
|
||||
GROUP BY subscription_tier
|
||||
SELECT s.plan as subscription_tier, COUNT(*) as count
|
||||
FROM tenants t
|
||||
LEFT JOIN subscriptions s ON t.id = s.tenant_id AND s.status = 'active'
|
||||
WHERE t.is_active = true
|
||||
GROUP BY s.plan
|
||||
ORDER BY count DESC
|
||||
""")
|
||||
|
||||
tier_result = await self.session.execute(tier_query)
|
||||
tier_stats = {row.subscription_tier: row.count for row in tier_result.fetchall()}
|
||||
tier_stats = {}
|
||||
for row in tier_result.fetchall():
|
||||
tier = row.subscription_tier if row.subscription_tier else "no_subscription"
|
||||
tier_stats[tier] = row.count
|
||||
|
||||
# Get model training statistics
|
||||
model_query = text("""
|
||||
|
||||
@@ -57,7 +57,7 @@ class BakeryRegistration(BaseModel):
|
||||
return v
|
||||
|
||||
class TenantResponse(BaseModel):
|
||||
"""Tenant response schema - FIXED VERSION"""
|
||||
"""Tenant response schema - Updated to use subscription relationship"""
|
||||
id: str # ✅ Keep as str for Pydantic validation
|
||||
name: str
|
||||
subdomain: Optional[str]
|
||||
@@ -68,12 +68,12 @@ class TenantResponse(BaseModel):
|
||||
postal_code: str
|
||||
phone: Optional[str]
|
||||
is_active: bool
|
||||
subscription_tier: str
|
||||
subscription_plan: Optional[str] = None # Populated from subscription relationship or service
|
||||
ml_model_trained: bool
|
||||
last_training_date: Optional[datetime]
|
||||
owner_id: str # ✅ Keep as str for Pydantic validation
|
||||
created_at: datetime
|
||||
|
||||
|
||||
# ✅ FIX: Add custom validator to convert UUID to string
|
||||
@field_validator('id', 'owner_id', mode='before')
|
||||
@classmethod
|
||||
@@ -82,7 +82,7 @@ class TenantResponse(BaseModel):
|
||||
if isinstance(v, UUID):
|
||||
return str(v)
|
||||
return v
|
||||
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
258
services/tenant/app/services/subscription_cache.py
Normal file
258
services/tenant/app/services/subscription_cache.py
Normal file
@@ -0,0 +1,258 @@
|
||||
"""
|
||||
Subscription Cache Service
|
||||
Provides Redis-based caching for subscription data with 10-minute TTL
|
||||
"""
|
||||
|
||||
import structlog
|
||||
from typing import Optional, Dict, Any
|
||||
import json
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from app.repositories import SubscriptionRepository
|
||||
from app.models.tenants import Subscription
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
# Cache TTL in seconds (10 minutes)
|
||||
SUBSCRIPTION_CACHE_TTL = 600
|
||||
|
||||
|
||||
class SubscriptionCacheService:
|
||||
"""Service for cached subscription lookups"""
|
||||
|
||||
def __init__(self, redis_client=None, database_manager=None):
|
||||
self.redis = redis_client
|
||||
self.database_manager = database_manager
|
||||
|
||||
async def ensure_database_manager(self):
|
||||
"""Ensure database manager is properly initialized"""
|
||||
if self.database_manager is None:
|
||||
from app.core.config import settings
|
||||
from shared.database.base import create_database_manager
|
||||
self.database_manager = create_database_manager(settings.DATABASE_URL, "tenant-service")
|
||||
|
||||
async def get_tenant_tier_cached(self, tenant_id: str) -> str:
|
||||
"""
|
||||
Get tenant subscription tier with Redis caching
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
|
||||
Returns:
|
||||
Subscription tier (starter, professional, enterprise)
|
||||
"""
|
||||
try:
|
||||
# Ensure database manager is initialized
|
||||
await self.ensure_database_manager()
|
||||
|
||||
cache_key = f"subscription:tier:{tenant_id}"
|
||||
|
||||
# Try to get from cache
|
||||
if self.redis:
|
||||
try:
|
||||
cached_tier = await self.redis.get(cache_key)
|
||||
if cached_tier:
|
||||
logger.debug("Subscription tier cache hit", tenant_id=tenant_id, tier=cached_tier)
|
||||
return cached_tier.decode('utf-8') if isinstance(cached_tier, bytes) else cached_tier
|
||||
except Exception as e:
|
||||
logger.warning("Redis cache read failed, falling back to database",
|
||||
tenant_id=tenant_id, error=str(e))
|
||||
|
||||
# Cache miss or Redis unavailable - fetch from database
|
||||
logger.debug("Subscription tier cache miss", tenant_id=tenant_id)
|
||||
|
||||
async with self.database_manager.get_session() as db_session:
|
||||
subscription_repo = SubscriptionRepository(Subscription, db_session)
|
||||
subscription = await subscription_repo.get_active_subscription(tenant_id)
|
||||
|
||||
if not subscription:
|
||||
logger.warning("No active subscription found, returning starter tier",
|
||||
tenant_id=tenant_id)
|
||||
return "starter"
|
||||
|
||||
tier = subscription.plan
|
||||
|
||||
# Cache the result
|
||||
if self.redis:
|
||||
try:
|
||||
await self.redis.setex(cache_key, SUBSCRIPTION_CACHE_TTL, tier)
|
||||
logger.debug("Cached subscription tier", tenant_id=tenant_id, tier=tier)
|
||||
except Exception as e:
|
||||
logger.warning("Failed to cache subscription tier",
|
||||
tenant_id=tenant_id, error=str(e))
|
||||
|
||||
return tier
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get subscription tier",
|
||||
tenant_id=tenant_id, error=str(e))
|
||||
return "starter" # Fallback to starter on error
|
||||
|
||||
async def get_tenant_subscription_cached(self, tenant_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get full tenant subscription with Redis caching
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
|
||||
Returns:
|
||||
Subscription data as dictionary or None
|
||||
"""
|
||||
try:
|
||||
# Ensure database manager is initialized
|
||||
await self.ensure_database_manager()
|
||||
|
||||
cache_key = f"subscription:full:{tenant_id}"
|
||||
|
||||
# Try to get from cache
|
||||
if self.redis:
|
||||
try:
|
||||
cached_data = await self.redis.get(cache_key)
|
||||
if cached_data:
|
||||
logger.debug("Subscription cache hit", tenant_id=tenant_id)
|
||||
data = json.loads(cached_data.decode('utf-8') if isinstance(cached_data, bytes) else cached_data)
|
||||
return data
|
||||
except Exception as e:
|
||||
logger.warning("Redis cache read failed, falling back to database",
|
||||
tenant_id=tenant_id, error=str(e))
|
||||
|
||||
# Cache miss or Redis unavailable - fetch from database
|
||||
logger.debug("Subscription cache miss", tenant_id=tenant_id)
|
||||
|
||||
async with self.database_manager.get_session() as db_session:
|
||||
subscription_repo = SubscriptionRepository(Subscription, db_session)
|
||||
subscription = await subscription_repo.get_active_subscription(tenant_id)
|
||||
|
||||
if not subscription:
|
||||
logger.warning("No active subscription found", tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
# Convert to dictionary
|
||||
subscription_data = {
|
||||
"id": str(subscription.id),
|
||||
"tenant_id": str(subscription.tenant_id),
|
||||
"plan": subscription.plan,
|
||||
"status": subscription.status,
|
||||
"monthly_price": subscription.monthly_price,
|
||||
"billing_cycle": subscription.billing_cycle,
|
||||
"next_billing_date": subscription.next_billing_date.isoformat() if subscription.next_billing_date else None,
|
||||
"trial_ends_at": subscription.trial_ends_at.isoformat() if subscription.trial_ends_at else None,
|
||||
"cancelled_at": subscription.cancelled_at.isoformat() if subscription.cancelled_at else None,
|
||||
"cancellation_effective_date": subscription.cancellation_effective_date.isoformat() if subscription.cancellation_effective_date else None,
|
||||
"max_users": subscription.max_users,
|
||||
"max_locations": subscription.max_locations,
|
||||
"max_products": subscription.max_products,
|
||||
"features": subscription.features,
|
||||
"created_at": subscription.created_at.isoformat() if subscription.created_at else None,
|
||||
"updated_at": subscription.updated_at.isoformat() if subscription.updated_at else None
|
||||
}
|
||||
|
||||
# Cache the result
|
||||
if self.redis:
|
||||
try:
|
||||
await self.redis.setex(
|
||||
cache_key,
|
||||
SUBSCRIPTION_CACHE_TTL,
|
||||
json.dumps(subscription_data)
|
||||
)
|
||||
logger.debug("Cached subscription data", tenant_id=tenant_id)
|
||||
except Exception as e:
|
||||
logger.warning("Failed to cache subscription data",
|
||||
tenant_id=tenant_id, error=str(e))
|
||||
|
||||
return subscription_data
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get subscription",
|
||||
tenant_id=tenant_id, error=str(e))
|
||||
return None
|
||||
|
||||
async def invalidate_subscription_cache(self, tenant_id: str) -> None:
|
||||
"""
|
||||
Invalidate subscription cache for a tenant
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
"""
|
||||
try:
|
||||
if not self.redis:
|
||||
logger.debug("Redis not available, skipping cache invalidation",
|
||||
tenant_id=tenant_id)
|
||||
return
|
||||
|
||||
tier_key = f"subscription:tier:{tenant_id}"
|
||||
full_key = f"subscription:full:{tenant_id}"
|
||||
|
||||
# Delete both cache keys
|
||||
await self.redis.delete(tier_key, full_key)
|
||||
|
||||
logger.info("Invalidated subscription cache",
|
||||
tenant_id=tenant_id)
|
||||
|
||||
except Exception as e:
|
||||
logger.warning("Failed to invalidate subscription cache",
|
||||
tenant_id=tenant_id, error=str(e))
|
||||
|
||||
async def warm_cache(self, tenant_id: str) -> None:
|
||||
"""
|
||||
Pre-warm the cache by loading subscription data
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
"""
|
||||
try:
|
||||
logger.debug("Warming subscription cache", tenant_id=tenant_id)
|
||||
|
||||
# Load both tier and full subscription to cache
|
||||
await self.get_tenant_tier_cached(tenant_id)
|
||||
await self.get_tenant_subscription_cached(tenant_id)
|
||||
|
||||
logger.info("Subscription cache warmed", tenant_id=tenant_id)
|
||||
|
||||
except Exception as e:
|
||||
logger.warning("Failed to warm subscription cache",
|
||||
tenant_id=tenant_id, error=str(e))
|
||||
|
||||
|
||||
# Singleton instance for easy access
|
||||
_cache_service_instance: Optional[SubscriptionCacheService] = None
|
||||
|
||||
|
||||
def get_subscription_cache_service(redis_client=None) -> SubscriptionCacheService:
|
||||
"""
|
||||
Get or create subscription cache service singleton
|
||||
|
||||
Args:
|
||||
redis_client: Optional Redis client
|
||||
|
||||
Returns:
|
||||
SubscriptionCacheService instance
|
||||
"""
|
||||
global _cache_service_instance
|
||||
|
||||
if _cache_service_instance is None:
|
||||
from shared.redis_utils import initialize_redis
|
||||
from app.core.config import settings
|
||||
import asyncio
|
||||
|
||||
# Initialize Redis client if not provided
|
||||
redis_client_instance = None
|
||||
if redis_client is None:
|
||||
try:
|
||||
loop = asyncio.get_event_loop()
|
||||
if not loop.is_running():
|
||||
redis_client_instance = asyncio.run(initialize_redis(settings.REDIS_URL))
|
||||
else:
|
||||
# If event loop is running, we can't use asyncio.run
|
||||
# This is a limitation, but we'll handle it by not initializing Redis here
|
||||
pass
|
||||
except:
|
||||
pass
|
||||
else:
|
||||
redis_client_instance = redis_client
|
||||
|
||||
_cache_service_instance = SubscriptionCacheService(redis_client=redis_client_instance)
|
||||
elif redis_client is not None and _cache_service_instance.redis is None:
|
||||
_cache_service_instance.redis = redis_client
|
||||
|
||||
return _cache_service_instance
|
||||
@@ -251,26 +251,34 @@ class SubscriptionLimitService:
|
||||
return {"can_upgrade": False, "reason": f"Invalid plan: {new_plan}"}
|
||||
|
||||
# Check current usage against new plan limits
|
||||
from app.repositories.subscription_repository import SubscriptionRepository
|
||||
temp_repo = SubscriptionRepository(Subscription, db_session)
|
||||
new_plan_config = temp_repo._get_plan_configuration(new_plan)
|
||||
from shared.subscription.plans import SubscriptionPlanMetadata, PlanPricing
|
||||
new_plan_config = SubscriptionPlanMetadata.get_plan_info(new_plan)
|
||||
|
||||
# Get the max_users limit from the plan limits
|
||||
plan_limits = new_plan_config.get('limits', {})
|
||||
max_users_limit = plan_limits.get('users', 5) # Default to 5 if not specified
|
||||
# Convert "Unlimited" string to None for comparison
|
||||
if max_users_limit == "Unlimited":
|
||||
max_users_limit = None
|
||||
elif max_users_limit is None:
|
||||
max_users_limit = -1 # Use -1 to represent unlimited in the comparison
|
||||
|
||||
# Check if current usage fits new plan
|
||||
members = await self.member_repo.get_tenant_members(tenant_id, active_only=True)
|
||||
current_users = len(members)
|
||||
|
||||
if new_plan_config["max_users"] != -1 and current_users > new_plan_config["max_users"]:
|
||||
if max_users_limit is not None and max_users_limit != -1 and current_users > max_users_limit:
|
||||
return {
|
||||
"can_upgrade": False,
|
||||
"reason": f"Current usage ({current_users} users) exceeds {new_plan} plan limits ({new_plan_config['max_users']} users)"
|
||||
"reason": f"Current usage ({current_users} users) exceeds {new_plan} plan limits ({max_users_limit} users)"
|
||||
}
|
||||
|
||||
return {
|
||||
"can_upgrade": True,
|
||||
"current_plan": current_subscription.plan,
|
||||
"new_plan": new_plan,
|
||||
"price_change": new_plan_config["monthly_price"] - current_subscription.monthly_price,
|
||||
"new_features": new_plan_config.get("features", {}),
|
||||
"price_change": float(PlanPricing.get_price(new_plan)) - current_subscription.monthly_price,
|
||||
"new_features": new_plan_config.get("features", []),
|
||||
"reason": "Upgrade validation successful"
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,103 @@
|
||||
"""remove subscription_tier from tenants
|
||||
|
||||
Revision ID: 20251028_remove_sub_tier
|
||||
Revises: 20251025_supplier_approval
|
||||
Create Date: 2025-10-28 12:00:00.000000
|
||||
|
||||
This migration removes the denormalized subscription_tier column from the tenants table.
|
||||
The subscription tier is now sourced exclusively from the subscriptions table (single source of truth).
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '20251028_remove_sub_tier'
|
||||
down_revision = '20251025_supplier_approval'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
"""
|
||||
Remove subscription_tier column from tenants table
|
||||
"""
|
||||
# Pre-flight check: Ensure all tenants have active subscriptions
|
||||
# This is important to avoid breaking the application
|
||||
connection = op.get_bind()
|
||||
|
||||
# Check for tenants without subscriptions
|
||||
result = connection.execute(sa.text("""
|
||||
SELECT COUNT(*) as count
|
||||
FROM tenants t
|
||||
LEFT JOIN subscriptions s ON t.id = s.tenant_id AND s.status = 'active'
|
||||
WHERE s.id IS NULL
|
||||
"""))
|
||||
|
||||
orphaned_count = result.fetchone()[0]
|
||||
|
||||
if orphaned_count > 0:
|
||||
# Create default subscriptions for orphaned tenants
|
||||
connection.execute(sa.text("""
|
||||
INSERT INTO subscriptions (
|
||||
id, tenant_id, plan, status, monthly_price, billing_cycle,
|
||||
max_users, max_locations, max_products, features, created_at, updated_at
|
||||
)
|
||||
SELECT
|
||||
gen_random_uuid(),
|
||||
t.id,
|
||||
'starter',
|
||||
'active',
|
||||
49.0,
|
||||
'monthly',
|
||||
5,
|
||||
1,
|
||||
50,
|
||||
'{"inventory_management": true, "demand_prediction": true}'::jsonb,
|
||||
NOW(),
|
||||
NOW()
|
||||
FROM tenants t
|
||||
LEFT JOIN subscriptions s ON t.id = s.tenant_id AND s.status = 'active'
|
||||
WHERE s.id IS NULL
|
||||
"""))
|
||||
|
||||
print(f"Created default subscriptions for {orphaned_count} tenants without subscriptions")
|
||||
|
||||
# Drop the subscription_tier column
|
||||
op.drop_column('tenants', 'subscription_tier')
|
||||
|
||||
print("Successfully removed subscription_tier column from tenants table")
|
||||
|
||||
|
||||
def downgrade():
|
||||
"""
|
||||
Re-add subscription_tier column and populate from subscriptions table
|
||||
|
||||
Note: This is for rollback purposes only. Going forward, always use subscriptions table.
|
||||
"""
|
||||
# Add the column back
|
||||
op.add_column('tenants',
|
||||
sa.Column('subscription_tier', sa.String(length=50), nullable=True)
|
||||
)
|
||||
|
||||
# Populate from subscriptions table
|
||||
connection = op.get_bind()
|
||||
connection.execute(sa.text("""
|
||||
UPDATE tenants t
|
||||
SET subscription_tier = s.plan
|
||||
FROM subscriptions s
|
||||
WHERE t.id = s.tenant_id
|
||||
AND s.status = 'active'
|
||||
"""))
|
||||
|
||||
# Set default for any tenants without active subscriptions
|
||||
connection.execute(sa.text("""
|
||||
UPDATE tenants
|
||||
SET subscription_tier = 'starter'
|
||||
WHERE subscription_tier IS NULL
|
||||
"""))
|
||||
|
||||
# Make it non-nullable after population
|
||||
op.alter_column('tenants', 'subscription_tier', nullable=False)
|
||||
|
||||
print("Restored subscription_tier column (downgrade)")
|
||||
@@ -55,7 +55,6 @@ TENANTS_DATA = [
|
||||
"id": DEMO_TENANT_SAN_PABLO,
|
||||
"name": "Panadería San Pablo",
|
||||
"business_model": "san_pablo",
|
||||
"subscription_tier": "demo_template",
|
||||
"is_demo": False, # Template tenants are not marked as demo
|
||||
"is_demo_template": True, # They are templates for cloning
|
||||
"is_active": True,
|
||||
@@ -88,7 +87,6 @@ TENANTS_DATA = [
|
||||
"id": DEMO_TENANT_LA_ESPIGA,
|
||||
"name": "Panadería La Espiga - Obrador Central",
|
||||
"business_model": "la_espiga",
|
||||
"subscription_tier": "demo_template",
|
||||
"is_demo": False,
|
||||
"is_demo_template": True,
|
||||
"is_active": True,
|
||||
@@ -173,6 +171,41 @@ async def seed_tenants(db: AsyncSession) -> dict:
|
||||
db.add(tenant)
|
||||
created_count += 1
|
||||
|
||||
# Flush to get tenant IDs before creating subscriptions
|
||||
await db.flush()
|
||||
|
||||
# Create demo subscriptions for all tenants (enterprise tier for full demo access)
|
||||
from app.models.tenants import Subscription
|
||||
|
||||
for tenant_data in TENANTS_DATA:
|
||||
tenant_id = tenant_data["id"]
|
||||
|
||||
# Check if subscription already exists
|
||||
result = await db.execute(
|
||||
select(Subscription).where(Subscription.tenant_id == tenant_id)
|
||||
)
|
||||
existing_subscription = result.scalars().first()
|
||||
|
||||
if not existing_subscription:
|
||||
logger.info(
|
||||
"Creating demo subscription for tenant",
|
||||
tenant_id=str(tenant_id),
|
||||
plan="enterprise"
|
||||
)
|
||||
|
||||
subscription = Subscription(
|
||||
tenant_id=tenant_id,
|
||||
plan="enterprise", # Demo templates get full access
|
||||
status="active",
|
||||
monthly_price=0.0, # Free for demo
|
||||
billing_cycle="monthly",
|
||||
max_users=-1, # Unlimited
|
||||
max_locations=-1,
|
||||
max_products=-1,
|
||||
features={}
|
||||
)
|
||||
db.add(subscription)
|
||||
|
||||
# Commit all changes
|
||||
await db.commit()
|
||||
|
||||
|
||||
@@ -41,6 +41,7 @@ from app.services.training_events import (
|
||||
)
|
||||
from app.core.config import settings
|
||||
from app.core.database import get_db
|
||||
from app.models import AuditLog
|
||||
|
||||
logger = structlog.get_logger()
|
||||
route_builder = RouteBuilder('training')
|
||||
@@ -48,7 +49,7 @@ route_builder = RouteBuilder('training')
|
||||
router = APIRouter(tags=["training-operations"])
|
||||
|
||||
# Initialize audit logger
|
||||
audit_logger = create_audit_logger("training-service")
|
||||
audit_logger = create_audit_logger("training-service", AuditLog)
|
||||
|
||||
# Redis client for rate limiting
|
||||
_redis_client = None
|
||||
|
||||
@@ -263,10 +263,14 @@ class ModelRepository(TrainingBaseRepository):
|
||||
|
||||
average_mape = accuracy_row.average_mape if accuracy_row and accuracy_row.average_mape else 0
|
||||
total_models_with_metrics = accuracy_row.total_models_with_metrics if accuracy_row else 0
|
||||
|
||||
|
||||
# Convert MAPE to accuracy percentage (lower MAPE = higher accuracy)
|
||||
# Use 100 - MAPE as a simple conversion, but cap it at reasonable bounds
|
||||
average_accuracy = max(0, min(100, 100 - float(average_mape))) if average_mape > 0 else 0
|
||||
# Return None if no models have metrics (no data), rather than 0
|
||||
if total_models_with_metrics == 0:
|
||||
average_accuracy = None
|
||||
else:
|
||||
average_accuracy = max(0, min(100, 100 - float(average_mape))) if average_mape > 0 else 0
|
||||
|
||||
return {
|
||||
"total_models": total_models,
|
||||
|
||||
Reference in New Issue
Block a user