New enterprise feature

This commit is contained in:
Urtzi Alfaro
2025-11-30 09:12:40 +01:00
parent f9d0eec6ec
commit 972db02f6d
176 changed files with 19741 additions and 1361 deletions

View File

@@ -23,20 +23,18 @@ from app.models.recipes import (
RecipeStatus, ProductionStatus, MeasurementUnit, ProductionPriority
)
from app.core.config import settings
logger = structlog.get_logger()
router = APIRouter(prefix="/internal/demo", tags=["internal"])
# Internal API key for service-to-service auth
INTERNAL_API_KEY = os.getenv("INTERNAL_API_KEY", "dev-internal-key-change-in-production")
# Base demo tenant IDs
DEMO_TENANT_SAN_PABLO = "a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6"
DEMO_TENANT_LA_ESPIGA = "b2c3d4e5-f6a7-48b9-c0d1-e2f3a4b5c6d7"
DEMO_TENANT_PROFESSIONAL = "a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6"
def verify_internal_api_key(x_internal_api_key: Optional[str] = Header(None)):
"""Verify internal API key for service-to-service communication"""
if x_internal_api_key != INTERNAL_API_KEY:
if x_internal_api_key != settings.INTERNAL_API_KEY:
logger.warning("Unauthorized internal API access attempted")
raise HTTPException(status_code=403, detail="Invalid internal API key")
return True
@@ -115,6 +113,7 @@ async def clone_demo_data(
recipe_ingredient_map = {}
# Clone Recipes
logger.info("Starting to clone recipes", base_tenant=str(base_uuid))
result = await db.execute(
select(Recipe).where(Recipe.tenant_id == base_uuid)
)
@@ -130,11 +129,23 @@ async def clone_demo_data(
new_recipe_id = uuid.uuid4()
recipe_id_map[recipe.id] = new_recipe_id
# Validate required fields before creating new recipe
if recipe.finished_product_id is None:
logger.warning(
"Recipe has null finished_product_id, skipping clone",
recipe_id=recipe.id,
recipe_name=recipe.name
)
continue # Skip recipes with null required field
# Generate a unique recipe code to avoid potential duplicates
recipe_code = f"REC-{uuid.uuid4().hex[:8].upper()}"
new_recipe = Recipe(
id=new_recipe_id,
tenant_id=virtual_uuid,
name=recipe.name,
recipe_code=f"REC-{uuid.uuid4().hex[:8].upper()}", # New unique code
recipe_code=recipe_code, # New unique code
version=recipe.version,
finished_product_id=recipe.finished_product_id, # Keep product reference
description=recipe.description,
@@ -175,13 +186,16 @@ async def clone_demo_data(
created_by=recipe.created_by,
updated_by=recipe.updated_by
)
# Add to session
db.add(new_recipe)
stats["recipes"] += 1
# Flush to get recipe IDs for foreign keys
logger.debug("Flushing recipe changes to get IDs")
await db.flush()
# Clone Recipe Ingredients
logger.info("Cloning recipe ingredients", recipe_ingredients_count=len(recipe_id_map))
for old_recipe_id, new_recipe_id in recipe_id_map.items():
result = await db.execute(
select(RecipeIngredient).where(RecipeIngredient.recipe_id == old_recipe_id)
@@ -217,9 +231,11 @@ async def clone_demo_data(
stats["recipe_ingredients"] += 1
# Flush to get recipe ingredient IDs
logger.debug("Flushing recipe ingredient changes to get IDs")
await db.flush()
# Clone Production Batches
logger.info("Starting to clone production batches", base_tenant=str(base_uuid))
result = await db.execute(
select(ProductionBatch).where(ProductionBatch.tenant_id == base_uuid)
)
@@ -237,8 +253,15 @@ async def clone_demo_data(
new_batch_id = uuid.uuid4()
batch_id_map[batch.id] = new_batch_id
# Get the new recipe ID
new_recipe_id = recipe_id_map.get(batch.recipe_id, batch.recipe_id)
# Get the new recipe ID (this might be None if the recipe was skipped due to null finished_product_id)
new_recipe_id = recipe_id_map.get(batch.recipe_id)
if new_recipe_id is None:
logger.warning(
"Skipping production batch with no corresponding recipe",
batch_id=batch.id,
original_recipe_id=batch.recipe_id
)
continue
# Adjust all date fields using the shared utility
adjusted_production_date = adjust_date_for_demo(
@@ -314,10 +337,16 @@ async def clone_demo_data(
stats["production_batches"] += 1
# Flush to get batch IDs
logger.debug("Flushing production batch changes to get IDs")
await db.flush()
# Clone Production Ingredient Consumption
logger.info("Cloning production ingredient consumption")
for old_batch_id, new_batch_id in batch_id_map.items():
# Skip consumption if the batch was skipped (no corresponding recipe)
if old_batch_id not in batch_id_map: # This condition was redundant/incorrect
continue # This batch was skipped, so skip its consumption too
result = await db.execute(
select(ProductionIngredientConsumption).where(
ProductionIngredientConsumption.production_batch_id == old_batch_id
@@ -326,11 +355,17 @@ async def clone_demo_data(
consumptions = result.scalars().all()
for consumption in consumptions:
# Get the new recipe ingredient ID
# Get the new recipe ingredient ID (skip if original ingredient's recipe was skipped)
new_recipe_ingredient_id = recipe_ingredient_map.get(
consumption.recipe_ingredient_id,
consumption.recipe_ingredient_id
)
if new_recipe_ingredient_id is None:
logger.warning(
"Skipping consumption with no corresponding recipe ingredient",
consumption_id=consumption.id,
original_recipe_ingredient_id=consumption.recipe_ingredient_id
)
continue
adjusted_consumption_time = adjust_date_for_demo(
consumption.consumption_time,
@@ -364,6 +399,7 @@ async def clone_demo_data(
stats["ingredient_consumptions"] += 1
# Commit all changes
logger.debug("Committing all cloned changes")
await db.commit()
total_records = sum(stats.values())

View File

@@ -5,18 +5,21 @@ Configuration management for Recipe Service
import os
from typing import Optional
from shared.config.base import BaseServiceSettings
class Settings:
"""Recipe service configuration settings"""
# Service identification
SERVICE_NAME: str = "recipes"
SERVICE_VERSION: str = "1.0.0"
# API settings
API_V1_PREFIX: str = "/api/v1"
class Settings(BaseServiceSettings):
"""Recipe service configuration extending base configuration"""
# Override service-specific settings
SERVICE_NAME: str = "recipes-service"
VERSION: str = "1.0.0"
APP_NAME: str = "Recipe Service"
DESCRIPTION: str = "Recipe management and planning service"
# API Configuration
API_V1_STR: str = "/api/v1"
# Database configuration (secure approach - build from components)
@property
def DATABASE_URL(self) -> str:
@@ -34,12 +37,32 @@ class Settings:
name = os.getenv("RECIPES_DB_NAME", "recipes_db")
return f"postgresql+asyncpg://{user}:{password}@{host}:{port}/{name}"
# Redis (if needed for caching)
REDIS_URL: str = os.getenv("REDIS_URL", "redis://localhost:6379/0")
# External service URLs
GATEWAY_URL: str = os.getenv("GATEWAY_URL", "http://gateway-service:8000")
# Redis configuration - use a specific database number
REDIS_DB: int = 2
# Recipe-specific settings
MAX_RECIPE_INGREDIENTS: int = int(os.getenv("MAX_RECIPE_INGREDIENTS", "50"))
MAX_BATCH_SIZE_MULTIPLIER: float = float(os.getenv("MAX_BATCH_SIZE_MULTIPLIER", "10.0"))
DEFAULT_RECIPE_VERSION: str = "1.0"
# Production settings (integration with production service)
MAX_PRODUCTION_BATCHES_PER_DAY: int = int(os.getenv("MAX_PRODUCTION_BATCHES_PER_DAY", "100"))
PRODUCTION_SCHEDULE_DAYS_AHEAD: int = int(os.getenv("PRODUCTION_SCHEDULE_DAYS_AHEAD", "7"))
# Cost calculation settings
OVERHEAD_PERCENTAGE: float = float(os.getenv("OVERHEAD_PERCENTAGE", "15.0")) # Default 15% overhead
LABOR_COST_PER_HOUR: float = float(os.getenv("LABOR_COST_PER_HOUR", "25.0")) # Default €25/hour
# Quality control
MIN_QUALITY_SCORE: float = float(os.getenv("MIN_QUALITY_SCORE", "6.0")) # Minimum acceptable quality score
MAX_DEFECT_RATE: float = float(os.getenv("MAX_DEFECT_RATE", "5.0")) # Maximum 5% defect rate
# External service URLs (specific to recipes service)
PRODUCTION_SERVICE_URL: str = os.getenv(
"PRODUCTION_SERVICE_URL",
"http://production-service:8000"
)
INVENTORY_SERVICE_URL: str = os.getenv(
"INVENTORY_SERVICE_URL",
"http://inventory-service:8000"
@@ -48,48 +71,6 @@ class Settings:
"SALES_SERVICE_URL",
"http://sales-service:8000"
)
# Authentication
SECRET_KEY: str = os.getenv("SECRET_KEY", "your-secret-key-here")
JWT_SECRET_KEY: str = os.getenv("JWT_SECRET_KEY", "your-super-secret-jwt-key-change-in-production-min-32-characters-long")
ACCESS_TOKEN_EXPIRE_MINUTES: int = int(os.getenv("ACCESS_TOKEN_EXPIRE_MINUTES", "30"))
# Logging
LOG_LEVEL: str = os.getenv("LOG_LEVEL", "INFO")
# Production configuration
ENVIRONMENT: str = os.getenv("ENVIRONMENT", "development")
DEBUG: bool = os.getenv("DEBUG", "False").lower() == "true"
# CORS settings
ALLOWED_ORIGINS: list = os.getenv("ALLOWED_ORIGINS", "http://localhost:3000").split(",")
# Recipe-specific settings
MAX_RECIPE_INGREDIENTS: int = int(os.getenv("MAX_RECIPE_INGREDIENTS", "50"))
MAX_BATCH_SIZE_MULTIPLIER: float = float(os.getenv("MAX_BATCH_SIZE_MULTIPLIER", "10.0"))
DEFAULT_RECIPE_VERSION: str = "1.0"
# Production settings
MAX_PRODUCTION_BATCHES_PER_DAY: int = int(os.getenv("MAX_PRODUCTION_BATCHES_PER_DAY", "100"))
PRODUCTION_SCHEDULE_DAYS_AHEAD: int = int(os.getenv("PRODUCTION_SCHEDULE_DAYS_AHEAD", "7"))
# Cost calculation settings
OVERHEAD_PERCENTAGE: float = float(os.getenv("OVERHEAD_PERCENTAGE", "15.0")) # Default 15% overhead
LABOR_COST_PER_HOUR: float = float(os.getenv("LABOR_COST_PER_HOUR", "25.0")) # Default €25/hour
# Quality control
MIN_QUALITY_SCORE: float = float(os.getenv("MIN_QUALITY_SCORE", "6.0")) # Minimum acceptable quality score
MAX_DEFECT_RATE: float = float(os.getenv("MAX_DEFECT_RATE", "5.0")) # Maximum 5% defect rate
# Messaging/Events (if using message queues)
RABBITMQ_URL: Optional[str] = os.getenv("RABBITMQ_URL")
KAFKA_BOOTSTRAP_SERVERS: Optional[str] = os.getenv("KAFKA_BOOTSTRAP_SERVERS")
# Health check settings
HEALTH_CHECK_TIMEOUT: int = int(os.getenv("HEALTH_CHECK_TIMEOUT", "30"))
class Config:
case_sensitive = True
# Global settings instance

View File

@@ -55,9 +55,9 @@ class RecipesService(StandardFastAPIService):
service_name="recipes-service",
app_name="Recipe Management Service",
description="Comprehensive recipe management, production planning, and inventory consumption tracking for bakery operations",
version=settings.SERVICE_VERSION,
version=settings.VERSION,
log_level=settings.LOG_LEVEL,
cors_origins=settings.ALLOWED_ORIGINS,
cors_origins=settings.CORS_ORIGINS,
api_prefix="", # Empty because RouteBuilder already includes /api/v1
database_manager=db_manager,
expected_tables=recipes_expected_tables