New enterprise feature
This commit is contained in:
@@ -23,20 +23,18 @@ from app.models.recipes import (
|
||||
RecipeStatus, ProductionStatus, MeasurementUnit, ProductionPriority
|
||||
)
|
||||
|
||||
from app.core.config import settings
|
||||
|
||||
logger = structlog.get_logger()
|
||||
router = APIRouter(prefix="/internal/demo", tags=["internal"])
|
||||
|
||||
# Internal API key for service-to-service auth
|
||||
INTERNAL_API_KEY = os.getenv("INTERNAL_API_KEY", "dev-internal-key-change-in-production")
|
||||
|
||||
# Base demo tenant IDs
|
||||
DEMO_TENANT_SAN_PABLO = "a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6"
|
||||
DEMO_TENANT_LA_ESPIGA = "b2c3d4e5-f6a7-48b9-c0d1-e2f3a4b5c6d7"
|
||||
DEMO_TENANT_PROFESSIONAL = "a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6"
|
||||
|
||||
|
||||
def verify_internal_api_key(x_internal_api_key: Optional[str] = Header(None)):
|
||||
"""Verify internal API key for service-to-service communication"""
|
||||
if x_internal_api_key != INTERNAL_API_KEY:
|
||||
if x_internal_api_key != settings.INTERNAL_API_KEY:
|
||||
logger.warning("Unauthorized internal API access attempted")
|
||||
raise HTTPException(status_code=403, detail="Invalid internal API key")
|
||||
return True
|
||||
@@ -115,6 +113,7 @@ async def clone_demo_data(
|
||||
recipe_ingredient_map = {}
|
||||
|
||||
# Clone Recipes
|
||||
logger.info("Starting to clone recipes", base_tenant=str(base_uuid))
|
||||
result = await db.execute(
|
||||
select(Recipe).where(Recipe.tenant_id == base_uuid)
|
||||
)
|
||||
@@ -130,11 +129,23 @@ async def clone_demo_data(
|
||||
new_recipe_id = uuid.uuid4()
|
||||
recipe_id_map[recipe.id] = new_recipe_id
|
||||
|
||||
# Validate required fields before creating new recipe
|
||||
if recipe.finished_product_id is None:
|
||||
logger.warning(
|
||||
"Recipe has null finished_product_id, skipping clone",
|
||||
recipe_id=recipe.id,
|
||||
recipe_name=recipe.name
|
||||
)
|
||||
continue # Skip recipes with null required field
|
||||
|
||||
# Generate a unique recipe code to avoid potential duplicates
|
||||
recipe_code = f"REC-{uuid.uuid4().hex[:8].upper()}"
|
||||
|
||||
new_recipe = Recipe(
|
||||
id=new_recipe_id,
|
||||
tenant_id=virtual_uuid,
|
||||
name=recipe.name,
|
||||
recipe_code=f"REC-{uuid.uuid4().hex[:8].upper()}", # New unique code
|
||||
recipe_code=recipe_code, # New unique code
|
||||
version=recipe.version,
|
||||
finished_product_id=recipe.finished_product_id, # Keep product reference
|
||||
description=recipe.description,
|
||||
@@ -175,13 +186,16 @@ async def clone_demo_data(
|
||||
created_by=recipe.created_by,
|
||||
updated_by=recipe.updated_by
|
||||
)
|
||||
# Add to session
|
||||
db.add(new_recipe)
|
||||
stats["recipes"] += 1
|
||||
|
||||
# Flush to get recipe IDs for foreign keys
|
||||
logger.debug("Flushing recipe changes to get IDs")
|
||||
await db.flush()
|
||||
|
||||
# Clone Recipe Ingredients
|
||||
logger.info("Cloning recipe ingredients", recipe_ingredients_count=len(recipe_id_map))
|
||||
for old_recipe_id, new_recipe_id in recipe_id_map.items():
|
||||
result = await db.execute(
|
||||
select(RecipeIngredient).where(RecipeIngredient.recipe_id == old_recipe_id)
|
||||
@@ -217,9 +231,11 @@ async def clone_demo_data(
|
||||
stats["recipe_ingredients"] += 1
|
||||
|
||||
# Flush to get recipe ingredient IDs
|
||||
logger.debug("Flushing recipe ingredient changes to get IDs")
|
||||
await db.flush()
|
||||
|
||||
# Clone Production Batches
|
||||
logger.info("Starting to clone production batches", base_tenant=str(base_uuid))
|
||||
result = await db.execute(
|
||||
select(ProductionBatch).where(ProductionBatch.tenant_id == base_uuid)
|
||||
)
|
||||
@@ -237,8 +253,15 @@ async def clone_demo_data(
|
||||
new_batch_id = uuid.uuid4()
|
||||
batch_id_map[batch.id] = new_batch_id
|
||||
|
||||
# Get the new recipe ID
|
||||
new_recipe_id = recipe_id_map.get(batch.recipe_id, batch.recipe_id)
|
||||
# Get the new recipe ID (this might be None if the recipe was skipped due to null finished_product_id)
|
||||
new_recipe_id = recipe_id_map.get(batch.recipe_id)
|
||||
if new_recipe_id is None:
|
||||
logger.warning(
|
||||
"Skipping production batch with no corresponding recipe",
|
||||
batch_id=batch.id,
|
||||
original_recipe_id=batch.recipe_id
|
||||
)
|
||||
continue
|
||||
|
||||
# Adjust all date fields using the shared utility
|
||||
adjusted_production_date = adjust_date_for_demo(
|
||||
@@ -314,10 +337,16 @@ async def clone_demo_data(
|
||||
stats["production_batches"] += 1
|
||||
|
||||
# Flush to get batch IDs
|
||||
logger.debug("Flushing production batch changes to get IDs")
|
||||
await db.flush()
|
||||
|
||||
# Clone Production Ingredient Consumption
|
||||
logger.info("Cloning production ingredient consumption")
|
||||
for old_batch_id, new_batch_id in batch_id_map.items():
|
||||
# Skip consumption if the batch was skipped (no corresponding recipe)
|
||||
if old_batch_id not in batch_id_map: # This condition was redundant/incorrect
|
||||
continue # This batch was skipped, so skip its consumption too
|
||||
|
||||
result = await db.execute(
|
||||
select(ProductionIngredientConsumption).where(
|
||||
ProductionIngredientConsumption.production_batch_id == old_batch_id
|
||||
@@ -326,11 +355,17 @@ async def clone_demo_data(
|
||||
consumptions = result.scalars().all()
|
||||
|
||||
for consumption in consumptions:
|
||||
# Get the new recipe ingredient ID
|
||||
# Get the new recipe ingredient ID (skip if original ingredient's recipe was skipped)
|
||||
new_recipe_ingredient_id = recipe_ingredient_map.get(
|
||||
consumption.recipe_ingredient_id,
|
||||
consumption.recipe_ingredient_id
|
||||
)
|
||||
if new_recipe_ingredient_id is None:
|
||||
logger.warning(
|
||||
"Skipping consumption with no corresponding recipe ingredient",
|
||||
consumption_id=consumption.id,
|
||||
original_recipe_ingredient_id=consumption.recipe_ingredient_id
|
||||
)
|
||||
continue
|
||||
|
||||
adjusted_consumption_time = adjust_date_for_demo(
|
||||
consumption.consumption_time,
|
||||
@@ -364,6 +399,7 @@ async def clone_demo_data(
|
||||
stats["ingredient_consumptions"] += 1
|
||||
|
||||
# Commit all changes
|
||||
logger.debug("Committing all cloned changes")
|
||||
await db.commit()
|
||||
|
||||
total_records = sum(stats.values())
|
||||
|
||||
Reference in New Issue
Block a user