demo seed change 3
This commit is contained in:
@@ -96,10 +96,9 @@ class DemoCleanupService:
|
||||
await self._delete_redis_cache(virtual_tenant_id)
|
||||
|
||||
# Delete child tenants if enterprise
|
||||
if session.demo_account_type == "enterprise":
|
||||
child_metadata = session.session_metadata.get("children", [])
|
||||
for child in child_metadata:
|
||||
child_tenant_id = child["virtual_tenant_id"]
|
||||
if session.demo_account_type == "enterprise" and session.session_metadata:
|
||||
child_tenant_ids = session.session_metadata.get("child_tenant_ids", [])
|
||||
for child_tenant_id in child_tenant_ids:
|
||||
await self._delete_from_all_services(child_tenant_id)
|
||||
|
||||
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
||||
|
||||
@@ -209,41 +209,57 @@ class DemoSessionManager:
|
||||
logger.warning("Session not found for destruction", session_id=session_id)
|
||||
return
|
||||
|
||||
# Update status to DESTROYING
|
||||
await self.repository.update_fields(
|
||||
session_id,
|
||||
status=DemoSessionStatus.DESTROYING
|
||||
)
|
||||
|
||||
# Trigger cleanup across all services
|
||||
cleanup_service = DemoCleanupService(self.db, self.redis)
|
||||
result = await cleanup_service.cleanup_session(session)
|
||||
|
||||
if result["success"]:
|
||||
# Update status to DESTROYED
|
||||
try:
|
||||
# Update status to DESTROYING
|
||||
await self.repository.update_fields(
|
||||
session_id,
|
||||
status=DemoSessionStatus.DESTROYED,
|
||||
destroyed_at=datetime.now(timezone.utc)
|
||||
status=DemoSessionStatus.DESTROYING
|
||||
)
|
||||
|
||||
# Trigger cleanup across all services
|
||||
cleanup_service = DemoCleanupService(self.db, self.redis)
|
||||
result = await cleanup_service.cleanup_session(session)
|
||||
|
||||
if result["success"]:
|
||||
# Update status to DESTROYED
|
||||
await self.repository.update_fields(
|
||||
session_id,
|
||||
status=DemoSessionStatus.DESTROYED,
|
||||
destroyed_at=datetime.now(timezone.utc)
|
||||
)
|
||||
else:
|
||||
# Update status to FAILED with error details
|
||||
await self.repository.update_fields(
|
||||
session_id,
|
||||
status=DemoSessionStatus.FAILED,
|
||||
error_details=result["errors"]
|
||||
)
|
||||
|
||||
# Delete Redis data
|
||||
await self.redis.delete_session_data(session_id)
|
||||
|
||||
logger.info(
|
||||
"Session destroyed",
|
||||
session_id=session_id,
|
||||
virtual_tenant_id=str(session.virtual_tenant_id),
|
||||
total_records_deleted=result.get("total_deleted", 0),
|
||||
duration_ms=result.get("duration_ms", 0)
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to destroy session",
|
||||
session_id=session_id,
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
else:
|
||||
# Update status to FAILED with error details
|
||||
await self.repository.update_fields(
|
||||
session_id,
|
||||
status=DemoSessionStatus.FAILED,
|
||||
error_details=result["errors"]
|
||||
error_details=[f"Cleanup failed: {str(e)}"]
|
||||
)
|
||||
|
||||
# Delete Redis data
|
||||
await self.redis.delete_session_data(session_id)
|
||||
|
||||
logger.info(
|
||||
"Session destroyed",
|
||||
session_id=session_id,
|
||||
virtual_tenant_id=str(session.virtual_tenant_id),
|
||||
total_records_deleted=result.get("total_deleted", 0),
|
||||
duration_ms=result.get("duration_ms", 0)
|
||||
)
|
||||
raise
|
||||
|
||||
async def _check_database_disk_space(self):
|
||||
"""Check if database has sufficient disk space for demo operations"""
|
||||
|
||||
@@ -218,32 +218,29 @@ async def clone_demo_data(
|
||||
detail=f"Invalid UUID format in forecast data: {str(e)}"
|
||||
)
|
||||
|
||||
# Transform dates
|
||||
# Transform dates using the proper parse_date_field function
|
||||
for date_field in ['forecast_date', 'created_at']:
|
||||
if date_field in forecast_data:
|
||||
try:
|
||||
date_value = forecast_data[date_field]
|
||||
if isinstance(date_value, str):
|
||||
original_date = datetime.fromisoformat(date_value)
|
||||
elif hasattr(date_value, 'isoformat'):
|
||||
original_date = date_value
|
||||
else:
|
||||
logger.warning("Skipping invalid date format",
|
||||
date_field=date_field,
|
||||
date_value=date_value)
|
||||
continue
|
||||
|
||||
adjusted_forecast_date = adjust_date_for_demo(
|
||||
original_date,
|
||||
session_time
|
||||
parsed_date = parse_date_field(
|
||||
forecast_data[date_field],
|
||||
session_time,
|
||||
date_field
|
||||
)
|
||||
forecast_data[date_field] = adjusted_forecast_date
|
||||
except (ValueError, AttributeError) as e:
|
||||
logger.warning("Failed to parse date, skipping",
|
||||
if parsed_date:
|
||||
forecast_data[date_field] = parsed_date
|
||||
else:
|
||||
# If parsing fails, use session_time as fallback
|
||||
forecast_data[date_field] = session_time
|
||||
logger.warning("Using fallback date for failed parsing",
|
||||
date_field=date_field,
|
||||
original_value=forecast_data[date_field])
|
||||
except Exception as e:
|
||||
logger.warning("Failed to parse date, using fallback",
|
||||
date_field=date_field,
|
||||
date_value=forecast_data[date_field],
|
||||
error=str(e))
|
||||
forecast_data.pop(date_field, None)
|
||||
forecast_data[date_field] = session_time
|
||||
|
||||
# Create forecast
|
||||
# Map product_id to inventory_product_id if needed
|
||||
@@ -252,17 +249,20 @@ async def clone_demo_data(
|
||||
# Map predicted_quantity to predicted_demand if needed
|
||||
predicted_demand = forecast_data.get('predicted_demand') or forecast_data.get('predicted_quantity')
|
||||
|
||||
# Set default location if not provided in seed data
|
||||
location = forecast_data.get('location') or "Main Bakery"
|
||||
|
||||
new_forecast = Forecast(
|
||||
id=transformed_id,
|
||||
tenant_id=virtual_uuid,
|
||||
inventory_product_id=inventory_product_id,
|
||||
product_name=forecast_data.get('product_name'),
|
||||
location=forecast_data.get('location'),
|
||||
location=location,
|
||||
forecast_date=forecast_data.get('forecast_date'),
|
||||
created_at=forecast_data.get('created_at', session_time),
|
||||
predicted_demand=predicted_demand,
|
||||
confidence_lower=forecast_data.get('confidence_lower'),
|
||||
confidence_upper=forecast_data.get('confidence_upper'),
|
||||
confidence_lower=forecast_data.get('confidence_lower', max(0.0, float(predicted_demand or 0.0) * 0.8)),
|
||||
confidence_upper=forecast_data.get('confidence_upper', max(0.0, float(predicted_demand or 0.0) * 1.2)),
|
||||
confidence_level=forecast_data.get('confidence_level', 0.8),
|
||||
model_id=forecast_data.get('model_id'),
|
||||
model_version=forecast_data.get('model_version'),
|
||||
@@ -299,32 +299,29 @@ async def clone_demo_data(
|
||||
detail=f"Invalid UUID format in batch data: {str(e)}"
|
||||
)
|
||||
|
||||
# Transform dates
|
||||
# Transform dates using proper parse_date_field function
|
||||
for date_field in ['requested_at', 'completed_at']:
|
||||
if date_field in batch_data:
|
||||
try:
|
||||
date_value = batch_data[date_field]
|
||||
if isinstance(date_value, str):
|
||||
original_date = datetime.fromisoformat(date_value)
|
||||
elif hasattr(date_value, 'isoformat'):
|
||||
original_date = date_value
|
||||
else:
|
||||
logger.warning("Skipping invalid date format",
|
||||
date_field=date_field,
|
||||
date_value=date_value)
|
||||
continue
|
||||
|
||||
adjusted_batch_date = adjust_date_for_demo(
|
||||
original_date,
|
||||
session_time
|
||||
parsed_date = parse_date_field(
|
||||
batch_data[date_field],
|
||||
session_time,
|
||||
date_field
|
||||
)
|
||||
batch_data[date_field] = adjusted_batch_date
|
||||
except (ValueError, AttributeError) as e:
|
||||
logger.warning("Failed to parse date, skipping",
|
||||
if parsed_date:
|
||||
batch_data[date_field] = parsed_date
|
||||
else:
|
||||
# If parsing fails, use session_time as fallback
|
||||
batch_data[date_field] = session_time
|
||||
logger.warning("Using fallback date for failed parsing",
|
||||
date_field=date_field,
|
||||
original_value=batch_data[date_field])
|
||||
except Exception as e:
|
||||
logger.warning("Failed to parse date, using fallback",
|
||||
date_field=date_field,
|
||||
date_value=batch_data[date_field],
|
||||
error=str(e))
|
||||
batch_data.pop(date_field, None)
|
||||
batch_data[date_field] = session_time
|
||||
|
||||
# Create prediction batch
|
||||
new_batch = PredictionBatch(
|
||||
|
||||
@@ -382,8 +382,8 @@ class EnhancedForecastingService:
|
||||
"location": request.location,
|
||||
"forecast_date": forecast_datetime,
|
||||
"predicted_demand": adjusted_prediction['prediction'],
|
||||
"confidence_lower": adjusted_prediction.get('lower_bound', adjusted_prediction['prediction'] * 0.8),
|
||||
"confidence_upper": adjusted_prediction.get('upper_bound', adjusted_prediction['prediction'] * 1.2),
|
||||
"confidence_lower": adjusted_prediction.get('lower_bound', max(0.0, float(adjusted_prediction.get('prediction') or 0.0) * 0.8)),
|
||||
"confidence_upper": adjusted_prediction.get('upper_bound', max(0.0, float(adjusted_prediction.get('prediction') or 0.0) * 1.2)),
|
||||
"confidence_level": request.confidence_level,
|
||||
"model_id": model_data['model_id'],
|
||||
"model_version": str(model_data.get('version', '1.0')),
|
||||
@@ -410,8 +410,8 @@ class EnhancedForecastingService:
|
||||
location=request.location,
|
||||
forecast_date=forecast_datetime,
|
||||
predicted_demand=adjusted_prediction['prediction'],
|
||||
confidence_lower=adjusted_prediction.get('lower_bound', adjusted_prediction['prediction'] * 0.8),
|
||||
confidence_upper=adjusted_prediction.get('upper_bound', adjusted_prediction['prediction'] * 1.2),
|
||||
confidence_lower=adjusted_prediction.get('lower_bound', max(0.0, float(adjusted_prediction.get('prediction') or 0.0) * 0.8)),
|
||||
confidence_upper=adjusted_prediction.get('upper_bound', max(0.0, float(adjusted_prediction.get('prediction') or 0.0) * 1.2)),
|
||||
model_id=model_data['model_id'],
|
||||
expires_in_hours=24
|
||||
)
|
||||
@@ -652,8 +652,8 @@ class EnhancedForecastingService:
|
||||
"location": request.location,
|
||||
"forecast_date": forecast_datetime,
|
||||
"predicted_demand": adjusted_prediction['prediction'],
|
||||
"confidence_lower": adjusted_prediction.get('lower_bound', adjusted_prediction['prediction'] * 0.8),
|
||||
"confidence_upper": adjusted_prediction.get('upper_bound', adjusted_prediction['prediction'] * 1.2),
|
||||
"confidence_lower": adjusted_prediction.get('lower_bound', max(0.0, float(adjusted_prediction.get('prediction') or 0.0) * 0.8)),
|
||||
"confidence_upper": adjusted_prediction.get('upper_bound', max(0.0, float(adjusted_prediction.get('prediction') or 0.0) * 1.2)),
|
||||
"confidence_level": request.confidence_level,
|
||||
"model_id": model_data['model_id'],
|
||||
"model_version": str(model_data.get('version', '1.0')),
|
||||
@@ -679,8 +679,8 @@ class EnhancedForecastingService:
|
||||
location=request.location,
|
||||
forecast_date=forecast_datetime,
|
||||
predicted_demand=adjusted_prediction['prediction'],
|
||||
confidence_lower=adjusted_prediction.get('lower_bound', adjusted_prediction['prediction'] * 0.8),
|
||||
confidence_upper=adjusted_prediction.get('upper_bound', adjusted_prediction['prediction'] * 1.2),
|
||||
confidence_lower=adjusted_prediction.get('lower_bound', max(0.0, float(adjusted_prediction.get('prediction') or 0.0) * 0.8)),
|
||||
confidence_upper=adjusted_prediction.get('upper_bound', max(0.0, float(adjusted_prediction.get('prediction') or 0.0) * 1.2)),
|
||||
model_id=model_data['model_id'],
|
||||
expires_in_hours=24
|
||||
)
|
||||
|
||||
@@ -315,7 +315,7 @@ async def clone_demo_data_internal(
|
||||
records_cloned += 1
|
||||
|
||||
# Clone stock batches
|
||||
for stock_data in seed_data.get('stock_batches', []):
|
||||
for stock_data in seed_data.get('stock', []):
|
||||
# Transform ID - handle both UUID and string IDs
|
||||
from shared.utils.demo_id_transformer import transform_id
|
||||
try:
|
||||
@@ -358,6 +358,40 @@ async def clone_demo_data_internal(
|
||||
# Remove original id and tenant_id from stock_data to avoid conflict
|
||||
stock_data.pop('id', None)
|
||||
stock_data.pop('tenant_id', None)
|
||||
# Remove notes field as it doesn't exist in the Stock model
|
||||
stock_data.pop('notes', None)
|
||||
|
||||
# Transform ingredient_id to match transformed ingredient IDs
|
||||
if 'ingredient_id' in stock_data:
|
||||
ingredient_id_str = stock_data['ingredient_id']
|
||||
try:
|
||||
ingredient_uuid = UUID(ingredient_id_str)
|
||||
transformed_ingredient_id = transform_id(ingredient_id_str, tenant_uuid)
|
||||
stock_data['ingredient_id'] = str(transformed_ingredient_id)
|
||||
except ValueError as e:
|
||||
logger.error("Failed to transform ingredient_id",
|
||||
original_ingredient_id=ingredient_id_str,
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"Invalid ingredient_id format: {str(e)}"
|
||||
)
|
||||
|
||||
# Transform supplier_id if present
|
||||
if 'supplier_id' in stock_data:
|
||||
supplier_id_str = stock_data['supplier_id']
|
||||
try:
|
||||
supplier_uuid = UUID(supplier_id_str)
|
||||
transformed_supplier_id = transform_id(supplier_id_str, tenant_uuid)
|
||||
stock_data['supplier_id'] = str(transformed_supplier_id)
|
||||
except ValueError as e:
|
||||
logger.error("Failed to transform supplier_id",
|
||||
original_supplier_id=supplier_id_str,
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"Invalid supplier_id format: {str(e)}"
|
||||
)
|
||||
|
||||
# Create stock batch
|
||||
stock = Stock(
|
||||
@@ -368,88 +402,16 @@ async def clone_demo_data_internal(
|
||||
db.add(stock)
|
||||
records_cloned += 1
|
||||
|
||||
# Add deterministic edge case stock records
|
||||
edge_times = calculate_edge_case_times(session_time)
|
||||
|
||||
# Get sample ingredients for edge cases (flour and dairy)
|
||||
flour_ingredient_id = None
|
||||
dairy_ingredient_id = None
|
||||
for ing in seed_data.get('ingredients', []):
|
||||
if ing.get('ingredient_category') == 'FLOUR' and not flour_ingredient_id and 'id' in ing:
|
||||
from shared.utils.demo_id_transformer import transform_id
|
||||
flour_ingredient_id = str(transform_id(ing['id'], UUID(virtual_tenant_id)))
|
||||
elif ing.get('ingredient_category') == 'DAIRY' and not dairy_ingredient_id and 'id' in ing:
|
||||
from shared.utils.demo_id_transformer import transform_id
|
||||
dairy_ingredient_id = str(transform_id(ing['id'], UUID(virtual_tenant_id)))
|
||||
|
||||
# Edge Case 1: Expiring Soon Stock (expires in 2 days)
|
||||
if flour_ingredient_id:
|
||||
expiring_stock = Stock(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=str(virtual_tenant_id),
|
||||
inventory_product_id=flour_ingredient_id,
|
||||
batch_number=f"{session_id[:8]}-EDGE-EXPIRING",
|
||||
quantity=25.0,
|
||||
received_date=session_time - timedelta(days=12),
|
||||
expiration_date=session_time + timedelta(days=2),
|
||||
best_before_date=session_time + timedelta(days=2),
|
||||
supplier_id=None,
|
||||
purchase_order_id=None,
|
||||
lot_number=f"LOT-EXPIRING-{session_id[:8]}",
|
||||
storage_location="Almacén A - Estante 3",
|
||||
quality_grade="GOOD",
|
||||
notes="⚠️ EDGE CASE: Expires in 2 days - triggers orange 'Caducidad próxima' alert"
|
||||
)
|
||||
db.add(expiring_stock)
|
||||
records_cloned += 1
|
||||
|
||||
# Edge Case 2: Low Stock (below reorder point)
|
||||
if dairy_ingredient_id:
|
||||
low_stock = Stock(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=str(virtual_tenant_id),
|
||||
inventory_product_id=dairy_ingredient_id,
|
||||
batch_number=f"{session_id[:8]}-EDGE-LOWSTOCK",
|
||||
quantity=3.0,
|
||||
received_date=session_time - timedelta(days=5),
|
||||
expiration_date=session_time + timedelta(days=10),
|
||||
best_before_date=session_time + timedelta(days=10),
|
||||
supplier_id=None,
|
||||
purchase_order_id=None,
|
||||
lot_number=f"LOT-LOWSTOCK-{session_id[:8]}",
|
||||
storage_location="Cámara Fría 1",
|
||||
quality_grade="GOOD",
|
||||
notes="⚠️ EDGE CASE: Below reorder point - triggers inventory alert if no pending PO"
|
||||
)
|
||||
db.add(low_stock)
|
||||
records_cloned += 1
|
||||
|
||||
# Edge Case 3: Just Received Stock (received today)
|
||||
if flour_ingredient_id:
|
||||
fresh_stock = Stock(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=str(virtual_tenant_id),
|
||||
inventory_product_id=flour_ingredient_id,
|
||||
batch_number=f"{session_id[:8]}-EDGE-FRESH",
|
||||
quantity=200.0,
|
||||
received_date=session_time - timedelta(hours=2),
|
||||
expiration_date=session_time + timedelta(days=180),
|
||||
best_before_date=session_time + timedelta(days=180),
|
||||
supplier_id=None,
|
||||
purchase_order_id=None,
|
||||
lot_number=f"LOT-FRESH-{session_id[:8]}",
|
||||
storage_location="Almacén A - Estante 1",
|
||||
quality_grade="EXCELLENT",
|
||||
notes="⚠️ EDGE CASE: Just received 2 hours ago - shows as new stock"
|
||||
)
|
||||
db.add(fresh_stock)
|
||||
records_cloned += 1
|
||||
|
||||
# Note: Edge cases are now handled exclusively through JSON seed data
|
||||
# The seed data files already contain comprehensive edge cases including:
|
||||
# - Low stock items below reorder points
|
||||
# - Items expiring soon
|
||||
# - Freshly received stock
|
||||
# This ensures standardization and single source of truth for demo data
|
||||
|
||||
logger.info(
|
||||
"Added deterministic edge case stock records",
|
||||
edge_cases_added=3,
|
||||
expiring_date=(session_time + timedelta(days=2)).isoformat(),
|
||||
low_stock_qty=3.0
|
||||
"Edge cases handled by JSON seed data - no manual creation needed",
|
||||
seed_data_edge_cases="low_stock, expiring_soon, fresh_stock"
|
||||
)
|
||||
|
||||
await db.commit()
|
||||
@@ -462,7 +424,7 @@ async def clone_demo_data_internal(
|
||||
records_cloned=records_cloned,
|
||||
duration_ms=duration_ms,
|
||||
ingredients_cloned=len(seed_data.get('ingredients', [])),
|
||||
stock_batches_cloned=len(seed_data.get('stock_batches', []))
|
||||
stock_batches_cloned=len(seed_data.get('stock', []))
|
||||
)
|
||||
|
||||
return {
|
||||
@@ -472,7 +434,7 @@ async def clone_demo_data_internal(
|
||||
"duration_ms": duration_ms,
|
||||
"details": {
|
||||
"ingredients": len(seed_data.get('ingredients', [])),
|
||||
"stock_batches": len(seed_data.get('stock_batches', [])),
|
||||
"stock": len(seed_data.get('stock', [])),
|
||||
"virtual_tenant_id": str(virtual_tenant_id)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -157,7 +157,7 @@ async def trigger_safety_stock_optimization(
|
||||
|
||||
try:
|
||||
# Fetch sales data for this product
|
||||
sales_response = await sales_client.get_sales_by_product(
|
||||
sales_response = await sales_client.get_sales_data(
|
||||
tenant_id=tenant_id,
|
||||
product_id=product_id,
|
||||
start_date=start_date.strftime('%Y-%m-%d'),
|
||||
|
||||
@@ -212,6 +212,9 @@ class DashboardService:
|
||||
ingredients = await repos['ingredient_repo'].get_ingredients_by_tenant(tenant_id, limit=1000)
|
||||
stock_summary = await repos['stock_repo'].get_stock_summary_by_tenant(tenant_id)
|
||||
|
||||
# Get dashboard repository
|
||||
dashboard_repo = repos['dashboard_repo']
|
||||
|
||||
# Get current stock levels for all ingredients using repository
|
||||
ingredient_stock_levels = {}
|
||||
try:
|
||||
@@ -693,6 +696,9 @@ class DashboardService:
|
||||
try:
|
||||
repos = self._get_repositories(db)
|
||||
|
||||
# Get dashboard repository
|
||||
dashboard_repo = repos['dashboard_repo']
|
||||
|
||||
# Get stock summary for total costs
|
||||
stock_summary = await repos['stock_repo'].get_stock_summary_by_tenant(tenant_id)
|
||||
total_inventory_cost = Decimal(str(stock_summary['total_stock_value']))
|
||||
@@ -703,7 +709,7 @@ class DashboardService:
|
||||
# Get current stock levels for all ingredients using repository
|
||||
ingredient_stock_levels = {}
|
||||
try:
|
||||
ingredient_stock_levels = await repos['dashboard_repo'].get_ingredient_stock_levels(tenant_id)
|
||||
ingredient_stock_levels = await dashboard_repo.get_ingredient_stock_levels(tenant_id)
|
||||
except Exception as e:
|
||||
logger.warning(f"Could not fetch current stock levels for cost analysis: {e}")
|
||||
|
||||
|
||||
@@ -199,9 +199,14 @@ class InventoryScheduler:
|
||||
alerts_generated += 1
|
||||
|
||||
except Exception as e:
|
||||
# Ensure ingredient_id is converted to string for logging to prevent UUID issues
|
||||
ingredient_id_val = shortage.get("ingredient_id", "unknown")
|
||||
if hasattr(ingredient_id_val, '__str__') and not isinstance(ingredient_id_val, str):
|
||||
ingredient_id_val = str(ingredient_id_val)
|
||||
|
||||
logger.error(
|
||||
"Error emitting critical stock shortage alert",
|
||||
ingredient_id=shortage.get("ingredient_id", "unknown"),
|
||||
ingredient_id=ingredient_id_val,
|
||||
error=str(e)
|
||||
)
|
||||
continue
|
||||
@@ -531,10 +536,15 @@ class InventoryScheduler:
|
||||
alerts_generated += 1
|
||||
|
||||
except Exception as e:
|
||||
# Ensure ingredient_id is converted to string for logging to prevent UUID issues
|
||||
ingredient_id_val = shortage.get("id", "unknown")
|
||||
if hasattr(ingredient_id_val, '__str__') and not isinstance(ingredient_id_val, str):
|
||||
ingredient_id_val = str(ingredient_id_val)
|
||||
|
||||
logger.error(
|
||||
"Error emitting critical stock shortage alert",
|
||||
tenant_id=str(tenant_id),
|
||||
ingredient_id=shortage.get("id", "unknown"),
|
||||
ingredient_id=ingredient_id_val,
|
||||
error=str(e)
|
||||
)
|
||||
continue
|
||||
@@ -744,10 +754,19 @@ class InventoryScheduler:
|
||||
alerts_generated += 1
|
||||
|
||||
except Exception as e:
|
||||
# Ensure ingredient_id and tenant_id are converted to strings for logging to prevent UUID issues
|
||||
ingredient_id_val = shortage.get("id", "unknown")
|
||||
if hasattr(ingredient_id_val, '__str__') and not isinstance(ingredient_id_val, str):
|
||||
ingredient_id_val = str(ingredient_id_val)
|
||||
|
||||
tenant_id_val = shortage.get("tenant_id", "unknown")
|
||||
if hasattr(tenant_id_val, '__str__') and not isinstance(tenant_id_val, str):
|
||||
tenant_id_val = str(tenant_id_val)
|
||||
|
||||
logger.error(
|
||||
"Error emitting critical stock shortage alert",
|
||||
ingredient_id=shortage.get("id", "unknown"),
|
||||
tenant_id=shortage.get("tenant_id", "unknown"),
|
||||
ingredient_id=ingredient_id_val,
|
||||
tenant_id=tenant_id_val,
|
||||
error=str(e)
|
||||
)
|
||||
continue
|
||||
|
||||
@@ -23,7 +23,7 @@ from app.models.production import (
|
||||
EquipmentStatus, EquipmentType
|
||||
)
|
||||
from shared.utils.demo_dates import (
|
||||
adjust_date_for_demo, resolve_time_marker, calculate_edge_case_times
|
||||
adjust_date_for_demo, resolve_time_marker
|
||||
)
|
||||
|
||||
from app.core.config import settings
|
||||
@@ -625,142 +625,17 @@ async def clone_demo_data(
|
||||
db.add(new_capacity)
|
||||
stats["production_capacity"] += 1
|
||||
|
||||
# Add deterministic edge case batches
|
||||
edge_times = calculate_edge_case_times(session_time)
|
||||
|
||||
# Get a sample product_id from existing batches for edge cases
|
||||
sample_product_id = None
|
||||
if seed_data.get('batches'):
|
||||
sample_product_id = seed_data['batches'][0].get('product_id')
|
||||
|
||||
if sample_product_id:
|
||||
# Edge Case 1: Overdue Batch (should have started 2 hours ago)
|
||||
overdue_batch = ProductionBatch(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=virtual_uuid,
|
||||
batch_number=f"{session_id[:8]}-EDGE-OVERDUE",
|
||||
product_id=sample_product_id,
|
||||
product_name="Pan Integral (Edge Case)",
|
||||
planned_start_time=edge_times["overdue_batch_planned_start"],
|
||||
planned_end_time=edge_times["overdue_batch_planned_start"] + timedelta(hours=3),
|
||||
planned_quantity=50.0,
|
||||
planned_duration_minutes=180,
|
||||
actual_start_time=None,
|
||||
actual_end_time=None,
|
||||
actual_quantity=None,
|
||||
status=ProductionStatus.PENDING,
|
||||
priority=ProductionPriority.URGENT,
|
||||
current_process_stage=None,
|
||||
production_notes="⚠️ EDGE CASE: Should have started 2 hours ago - triggers yellow alert for delayed production",
|
||||
created_at=session_time,
|
||||
updated_at=session_time
|
||||
)
|
||||
db.add(overdue_batch)
|
||||
stats["batches"] += 1
|
||||
|
||||
# Edge Case 2: In-Progress Batch (started 1h45m ago)
|
||||
in_progress_batch = ProductionBatch(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=virtual_uuid,
|
||||
batch_number=f"{session_id[:8]}-EDGE-INPROGRESS",
|
||||
product_id=sample_product_id,
|
||||
product_name="Croissant de Mantequilla (Edge Case)",
|
||||
planned_start_time=edge_times["in_progress_batch_actual_start"],
|
||||
planned_end_time=edge_times["upcoming_batch_planned_start"],
|
||||
planned_quantity=100.0,
|
||||
planned_duration_minutes=195,
|
||||
actual_start_time=edge_times["in_progress_batch_actual_start"],
|
||||
actual_end_time=None,
|
||||
actual_quantity=None,
|
||||
status=ProductionStatus.IN_PROGRESS,
|
||||
priority=ProductionPriority.HIGH,
|
||||
current_process_stage=ProcessStage.BAKING,
|
||||
production_notes="⚠️ EDGE CASE: Currently in progress - visible in active production dashboard",
|
||||
created_at=session_time,
|
||||
updated_at=session_time
|
||||
)
|
||||
db.add(in_progress_batch)
|
||||
stats["batches"] += 1
|
||||
|
||||
# Edge Case 3: Upcoming Batch (starts in 1.5 hours)
|
||||
upcoming_batch = ProductionBatch(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=virtual_uuid,
|
||||
batch_number=f"{session_id[:8]}-EDGE-UPCOMING",
|
||||
product_id=sample_product_id,
|
||||
product_name="Baguette Tradicional (Edge Case)",
|
||||
planned_start_time=edge_times["upcoming_batch_planned_start"],
|
||||
planned_end_time=edge_times["upcoming_batch_planned_start"] + timedelta(hours=2),
|
||||
planned_quantity=75.0,
|
||||
planned_duration_minutes=120,
|
||||
actual_start_time=None,
|
||||
actual_end_time=None,
|
||||
actual_quantity=None,
|
||||
status=ProductionStatus.PENDING,
|
||||
priority=ProductionPriority.MEDIUM,
|
||||
current_process_stage=None,
|
||||
production_notes="⚠️ EDGE CASE: Starting in 1.5 hours - visible in upcoming production schedule",
|
||||
created_at=session_time,
|
||||
updated_at=session_time
|
||||
)
|
||||
db.add(upcoming_batch)
|
||||
stats["batches"] += 1
|
||||
|
||||
# Edge Case 4: Evening Batch (starts at 17:00 today)
|
||||
evening_batch = ProductionBatch(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=virtual_uuid,
|
||||
batch_number=f"{session_id[:8]}-EDGE-EVENING",
|
||||
product_id=sample_product_id,
|
||||
product_name="Pan de Molde (Edge Case)",
|
||||
planned_start_time=edge_times["evening_batch_planned_start"],
|
||||
planned_end_time=edge_times["evening_batch_planned_start"] + timedelta(hours=2, minutes=30),
|
||||
planned_quantity=60.0,
|
||||
planned_duration_minutes=150,
|
||||
actual_start_time=None,
|
||||
actual_end_time=None,
|
||||
actual_quantity=None,
|
||||
status=ProductionStatus.PENDING,
|
||||
priority=ProductionPriority.MEDIUM,
|
||||
current_process_stage=None,
|
||||
production_notes="⚠️ EDGE CASE: Evening shift production - scheduled for 17:00",
|
||||
created_at=session_time,
|
||||
updated_at=session_time
|
||||
)
|
||||
db.add(evening_batch)
|
||||
stats["batches"] += 1
|
||||
|
||||
# Edge Case 5: Tomorrow Morning Batch (starts at 05:00 tomorrow)
|
||||
tomorrow_batch = ProductionBatch(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=virtual_uuid,
|
||||
batch_number=f"{session_id[:8]}-EDGE-TOMORROW",
|
||||
product_id=sample_product_id,
|
||||
product_name="Bollería Variada (Edge Case)",
|
||||
planned_start_time=edge_times["tomorrow_morning_planned_start"],
|
||||
planned_end_time=edge_times["tomorrow_morning_planned_start"] + timedelta(hours=4),
|
||||
planned_quantity=120.0,
|
||||
planned_duration_minutes=240,
|
||||
actual_start_time=None,
|
||||
actual_end_time=None,
|
||||
actual_quantity=None,
|
||||
status=ProductionStatus.PENDING,
|
||||
priority=ProductionPriority.MEDIUM,
|
||||
current_process_stage=None,
|
||||
production_notes="⚠️ EDGE CASE: Tomorrow morning production - scheduled for 05:00",
|
||||
created_at=session_time,
|
||||
updated_at=session_time
|
||||
)
|
||||
db.add(tomorrow_batch)
|
||||
stats["batches"] += 1
|
||||
|
||||
logger.info(
|
||||
"Added deterministic edge case batches",
|
||||
edge_cases_added=5,
|
||||
overdue=edge_times["overdue_batch_planned_start"].isoformat(),
|
||||
in_progress=edge_times["in_progress_batch_actual_start"].isoformat(),
|
||||
upcoming=edge_times["upcoming_batch_planned_start"].isoformat()
|
||||
)
|
||||
# Note: Edge cases are now handled exclusively through JSON seed data
|
||||
# The seed data files already contain comprehensive edge cases including:
|
||||
# - Overdue batches (should have started 2 hours ago)
|
||||
# - In-progress batches (currently being processed)
|
||||
# - Upcoming batches (scheduled for later today/tomorrow)
|
||||
# This ensures standardization and single source of truth for demo data
|
||||
|
||||
logger.info(
|
||||
"Edge cases handled by JSON seed data - no manual creation needed",
|
||||
seed_data_edge_cases="overdue_batches, in_progress_batches, upcoming_batches"
|
||||
)
|
||||
|
||||
# Commit cloned data
|
||||
await db.commit()
|
||||
|
||||
@@ -199,9 +199,9 @@ async def clone_demo_data(
|
||||
for sale_data in seed_data.get('sales_data', []):
|
||||
# Parse date field (supports BASE_TS markers and ISO timestamps)
|
||||
adjusted_date = parse_date_field(
|
||||
sale_data.get('sale_date'),
|
||||
sale_data.get('sales_date'),
|
||||
session_time,
|
||||
"sale_date"
|
||||
"sales_date"
|
||||
)
|
||||
|
||||
# Create new sales record with adjusted date
|
||||
@@ -210,14 +210,14 @@ async def clone_demo_data(
|
||||
tenant_id=virtual_uuid,
|
||||
date=adjusted_date,
|
||||
inventory_product_id=sale_data.get('product_id'), # Use product_id from seed data
|
||||
quantity_sold=sale_data.get('quantity_sold', 0.0),
|
||||
quantity_sold=sale_data.get('quantity', 0.0), # Map quantity to quantity_sold
|
||||
unit_price=sale_data.get('unit_price', 0.0),
|
||||
revenue=sale_data.get('total_revenue', 0.0),
|
||||
revenue=sale_data.get('total_amount', 0.0), # Map total_amount to revenue
|
||||
cost_of_goods=sale_data.get('cost_of_goods', 0.0),
|
||||
discount_applied=sale_data.get('discount_applied', 0.0),
|
||||
location_id=sale_data.get('location_id'),
|
||||
sales_channel=sale_data.get('sales_channel', 'IN_STORE'),
|
||||
source="demo_seed", # Mark as seeded
|
||||
source="demo_clone", # Mark as seeded
|
||||
is_validated=sale_data.get('is_validated', True),
|
||||
validation_notes=sale_data.get('validation_notes'),
|
||||
notes=sale_data.get('notes'),
|
||||
|
||||
@@ -101,13 +101,30 @@ class TenantMemberRepository(TenantBaseRepository):
|
||||
|
||||
# For internal service access, return None to indicate no user membership
|
||||
# Service access should be handled at the API layer
|
||||
if not is_valid_uuid and is_internal_service(user_id):
|
||||
# This is an internal service request, return None
|
||||
# Service access is granted at the API endpoint level
|
||||
logger.debug("Internal service detected in membership lookup",
|
||||
service=user_id,
|
||||
tenant_id=tenant_id)
|
||||
return None
|
||||
if not is_valid_uuid:
|
||||
if is_internal_service(user_id):
|
||||
# This is a known internal service request, return None
|
||||
# Service access is granted at the API endpoint level
|
||||
logger.debug("Internal service detected in membership lookup",
|
||||
service=user_id,
|
||||
tenant_id=tenant_id)
|
||||
return None
|
||||
elif user_id == "unknown-service":
|
||||
# Special handling for 'unknown-service' which commonly occurs in demo sessions
|
||||
# This happens when service identification fails during demo operations
|
||||
logger.warning("Demo session service identification issue",
|
||||
service=user_id,
|
||||
tenant_id=tenant_id,
|
||||
message="Service not properly identified - likely demo session context")
|
||||
return None
|
||||
else:
|
||||
# This is an unknown service
|
||||
# Return None to prevent database errors, but log a warning
|
||||
logger.warning("Unknown service detected in membership lookup",
|
||||
service=user_id,
|
||||
tenant_id=tenant_id,
|
||||
message="Service not in internal services registry")
|
||||
return None
|
||||
|
||||
memberships = await self.get_multi(
|
||||
filters={
|
||||
|
||||
Reference in New Issue
Block a user