demo seed change 2
This commit is contained in:
@@ -9,13 +9,14 @@ from typing import Optional
|
||||
import structlog
|
||||
import json
|
||||
from pathlib import Path
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timezone, timedelta
|
||||
import uuid
|
||||
from uuid import UUID
|
||||
|
||||
from app.core.database import get_db
|
||||
from app.core.config import settings
|
||||
from app.models import Ingredient, Stock, ProductType
|
||||
from shared.utils.demo_dates import adjust_date_for_demo, resolve_time_marker, calculate_edge_case_times
|
||||
|
||||
logger = structlog.get_logger()
|
||||
router = APIRouter()
|
||||
@@ -30,6 +31,52 @@ async def verify_internal_api_key(x_internal_api_key: str = Header(None)):
|
||||
return True
|
||||
|
||||
|
||||
def parse_date_field(date_value, session_time: datetime, field_name: str = "date") -> Optional[datetime]:
|
||||
"""
|
||||
Parse date field, handling both ISO strings and BASE_TS markers.
|
||||
|
||||
Supports:
|
||||
- BASE_TS markers: "BASE_TS + 1h30m", "BASE_TS - 2d"
|
||||
- ISO 8601 strings: "2025-01-15T06:00:00Z"
|
||||
- None values (returns None)
|
||||
|
||||
Returns timezone-aware datetime or None.
|
||||
"""
|
||||
if not date_value:
|
||||
return None
|
||||
|
||||
# Check if it's a BASE_TS marker
|
||||
if isinstance(date_value, str) and date_value.startswith("BASE_TS"):
|
||||
try:
|
||||
return resolve_time_marker(date_value, session_time)
|
||||
except ValueError as e:
|
||||
logger.warning(
|
||||
f"Invalid BASE_TS marker in {field_name}",
|
||||
marker=date_value,
|
||||
error=str(e)
|
||||
)
|
||||
return None
|
||||
|
||||
# Handle regular ISO date strings
|
||||
try:
|
||||
if isinstance(date_value, str):
|
||||
original_date = datetime.fromisoformat(date_value.replace('Z', '+00:00'))
|
||||
elif hasattr(date_value, 'isoformat'):
|
||||
original_date = date_value
|
||||
else:
|
||||
logger.warning(f"Unsupported date format in {field_name}", date_value=date_value)
|
||||
return None
|
||||
|
||||
return adjust_date_for_demo(original_date, session_time)
|
||||
except (ValueError, AttributeError) as e:
|
||||
logger.warning(
|
||||
f"Invalid date format in {field_name}",
|
||||
date_value=date_value,
|
||||
error=str(e)
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
@router.post("/internal/demo/clone")
|
||||
async def clone_demo_data_internal(
|
||||
base_tenant_id: str,
|
||||
@@ -56,14 +103,14 @@ async def clone_demo_data_internal(
|
||||
session_id: Originating session ID for tracing
|
||||
session_created_at: Session creation timestamp for date adjustment
|
||||
db: Database session
|
||||
|
||||
|
||||
Returns:
|
||||
Dictionary with cloning results
|
||||
|
||||
|
||||
Raises:
|
||||
HTTPException: On validation or cloning errors
|
||||
"""
|
||||
start_time = datetime.now()
|
||||
start_time = datetime.now(timezone.utc)
|
||||
|
||||
try:
|
||||
# Validate UUIDs
|
||||
@@ -106,9 +153,9 @@ async def clone_demo_data_internal(
|
||||
try:
|
||||
session_created_at_parsed = datetime.fromisoformat(session_created_at.replace('Z', '+00:00'))
|
||||
except (ValueError, AttributeError):
|
||||
session_created_at_parsed = datetime.now()
|
||||
session_created_at_parsed = datetime.now(timezone.utc)
|
||||
else:
|
||||
session_created_at_parsed = datetime.now()
|
||||
session_created_at_parsed = datetime.now(timezone.utc)
|
||||
|
||||
# Determine profile based on demo_account_type
|
||||
if demo_account_type == "enterprise":
|
||||
@@ -195,37 +242,13 @@ async def clone_demo_data_internal(
|
||||
detail=f"Invalid UUID format in ingredient data: {str(e)}"
|
||||
)
|
||||
|
||||
# Transform dates
|
||||
from shared.utils.demo_dates import adjust_date_for_demo
|
||||
for date_field in ['expiration_date', 'received_date', 'created_at', 'updated_at']:
|
||||
if date_field in ingredient_data:
|
||||
try:
|
||||
date_value = ingredient_data[date_field]
|
||||
# Handle both string dates and date objects
|
||||
if isinstance(date_value, str):
|
||||
original_date = datetime.fromisoformat(date_value)
|
||||
elif hasattr(date_value, 'isoformat'):
|
||||
# Already a date/datetime object
|
||||
original_date = date_value
|
||||
else:
|
||||
# Skip if not a valid date format
|
||||
logger.warning("Skipping invalid date format",
|
||||
date_field=date_field,
|
||||
date_value=date_value)
|
||||
continue
|
||||
|
||||
adjusted_date = adjust_date_for_demo(
|
||||
original_date,
|
||||
session_created_at_parsed
|
||||
)
|
||||
ingredient_data[date_field] = adjusted_date
|
||||
except (ValueError, AttributeError) as e:
|
||||
logger.warning("Failed to parse date, skipping",
|
||||
date_field=date_field,
|
||||
date_value=ingredient_data[date_field],
|
||||
error=str(e))
|
||||
# Remove invalid date to avoid model errors
|
||||
ingredient_data.pop(date_field, None)
|
||||
# Transform dates using standardized helper
|
||||
ingredient_data['created_at'] = parse_date_field(
|
||||
ingredient_data.get('created_at'), session_time, 'created_at'
|
||||
) or session_time
|
||||
ingredient_data['updated_at'] = parse_date_field(
|
||||
ingredient_data.get('updated_at'), session_time, 'updated_at'
|
||||
) or session_time
|
||||
|
||||
# Map category field to ingredient_category enum
|
||||
if 'category' in ingredient_data:
|
||||
@@ -252,14 +275,27 @@ async def clone_demo_data_internal(
|
||||
'bags': UnitOfMeasure.BAGS,
|
||||
'boxes': UnitOfMeasure.BOXES
|
||||
}
|
||||
|
||||
|
||||
# Also support uppercase versions
|
||||
unit_mapping.update({
|
||||
'KILOGRAMS': UnitOfMeasure.KILOGRAMS,
|
||||
'GRAMS': UnitOfMeasure.GRAMS,
|
||||
'LITERS': UnitOfMeasure.LITERS,
|
||||
'MILLILITERS': UnitOfMeasure.MILLILITERS,
|
||||
'UNITS': UnitOfMeasure.UNITS,
|
||||
'PIECES': UnitOfMeasure.PIECES,
|
||||
'PACKAGES': UnitOfMeasure.PACKAGES,
|
||||
'BAGS': UnitOfMeasure.BAGS,
|
||||
'BOXES': UnitOfMeasure.BOXES
|
||||
})
|
||||
|
||||
unit_str = ingredient_data['unit_of_measure']
|
||||
if unit_str in unit_mapping:
|
||||
ingredient_data['unit_of_measure'] = unit_mapping[unit_str]
|
||||
else:
|
||||
# Default to units if not found
|
||||
ingredient_data['unit_of_measure'] = UnitOfMeasure.UNITS
|
||||
logger.warning("Unknown unit_of_measure, defaulting to UNITS",
|
||||
logger.warning("Unknown unit_of_measure, defaulting to UNITS",
|
||||
original_unit=unit_str)
|
||||
|
||||
# Note: All seed data fields now match the model schema exactly
|
||||
@@ -302,46 +338,22 @@ async def clone_demo_data_internal(
|
||||
original_id=stock_id_string,
|
||||
generated_id=str(transformed_id))
|
||||
|
||||
# Transform dates - handle both timestamp dictionaries and ISO strings
|
||||
for date_field in ['received_date', 'expiration_date', 'best_before_date', 'original_expiration_date', 'transformation_date', 'final_expiration_date', 'created_at', 'updated_at']:
|
||||
if date_field in stock_data:
|
||||
try:
|
||||
date_value = stock_data[date_field]
|
||||
|
||||
# Handle timestamp dictionaries (offset_days, hour, minute)
|
||||
if isinstance(date_value, dict) and 'offset_days' in date_value:
|
||||
from shared.utils.demo_dates import calculate_demo_datetime
|
||||
original_date = calculate_demo_datetime(
|
||||
offset_days=date_value.get('offset_days', 0),
|
||||
hour=date_value.get('hour', 0),
|
||||
minute=date_value.get('minute', 0),
|
||||
session_created_at=session_created_at_parsed
|
||||
)
|
||||
elif isinstance(date_value, str):
|
||||
# ISO string
|
||||
original_date = datetime.fromisoformat(date_value)
|
||||
elif hasattr(date_value, 'isoformat'):
|
||||
# Already a date/datetime object
|
||||
original_date = date_value
|
||||
else:
|
||||
# Skip if not a valid date format
|
||||
logger.warning("Skipping invalid date format",
|
||||
date_field=date_field,
|
||||
date_value=date_value)
|
||||
continue
|
||||
|
||||
adjusted_stock_date = adjust_date_for_demo(
|
||||
original_date,
|
||||
session_created_at_parsed
|
||||
)
|
||||
stock_data[date_field] = adjusted_stock_date
|
||||
except (ValueError, AttributeError) as e:
|
||||
logger.warning("Failed to parse date, skipping",
|
||||
date_field=date_field,
|
||||
date_value=stock_data[date_field],
|
||||
error=str(e))
|
||||
# Remove invalid date to avoid model errors
|
||||
stock_data.pop(date_field, None)
|
||||
# Transform dates using standardized helper
|
||||
stock_data['received_date'] = parse_date_field(
|
||||
stock_data.get('received_date'), session_time, 'received_date'
|
||||
)
|
||||
stock_data['expiration_date'] = parse_date_field(
|
||||
stock_data.get('expiration_date'), session_time, 'expiration_date'
|
||||
)
|
||||
stock_data['best_before_date'] = parse_date_field(
|
||||
stock_data.get('best_before_date'), session_time, 'best_before_date'
|
||||
)
|
||||
stock_data['created_at'] = parse_date_field(
|
||||
stock_data.get('created_at'), session_time, 'created_at'
|
||||
) or session_time
|
||||
stock_data['updated_at'] = parse_date_field(
|
||||
stock_data.get('updated_at'), session_time, 'updated_at'
|
||||
) or session_time
|
||||
|
||||
# Remove original id and tenant_id from stock_data to avoid conflict
|
||||
stock_data.pop('id', None)
|
||||
@@ -356,9 +368,93 @@ async def clone_demo_data_internal(
|
||||
db.add(stock)
|
||||
records_cloned += 1
|
||||
|
||||
# Add deterministic edge case stock records
|
||||
edge_times = calculate_edge_case_times(session_time)
|
||||
|
||||
# Get sample ingredients for edge cases (flour and dairy)
|
||||
flour_ingredient_id = None
|
||||
dairy_ingredient_id = None
|
||||
for ing in seed_data.get('ingredients', []):
|
||||
if ing.get('ingredient_category') == 'FLOUR' and not flour_ingredient_id and 'id' in ing:
|
||||
from shared.utils.demo_id_transformer import transform_id
|
||||
flour_ingredient_id = str(transform_id(ing['id'], UUID(virtual_tenant_id)))
|
||||
elif ing.get('ingredient_category') == 'DAIRY' and not dairy_ingredient_id and 'id' in ing:
|
||||
from shared.utils.demo_id_transformer import transform_id
|
||||
dairy_ingredient_id = str(transform_id(ing['id'], UUID(virtual_tenant_id)))
|
||||
|
||||
# Edge Case 1: Expiring Soon Stock (expires in 2 days)
|
||||
if flour_ingredient_id:
|
||||
expiring_stock = Stock(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=str(virtual_tenant_id),
|
||||
inventory_product_id=flour_ingredient_id,
|
||||
batch_number=f"{session_id[:8]}-EDGE-EXPIRING",
|
||||
quantity=25.0,
|
||||
received_date=session_time - timedelta(days=12),
|
||||
expiration_date=session_time + timedelta(days=2),
|
||||
best_before_date=session_time + timedelta(days=2),
|
||||
supplier_id=None,
|
||||
purchase_order_id=None,
|
||||
lot_number=f"LOT-EXPIRING-{session_id[:8]}",
|
||||
storage_location="Almacén A - Estante 3",
|
||||
quality_grade="GOOD",
|
||||
notes="⚠️ EDGE CASE: Expires in 2 days - triggers orange 'Caducidad próxima' alert"
|
||||
)
|
||||
db.add(expiring_stock)
|
||||
records_cloned += 1
|
||||
|
||||
# Edge Case 2: Low Stock (below reorder point)
|
||||
if dairy_ingredient_id:
|
||||
low_stock = Stock(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=str(virtual_tenant_id),
|
||||
inventory_product_id=dairy_ingredient_id,
|
||||
batch_number=f"{session_id[:8]}-EDGE-LOWSTOCK",
|
||||
quantity=3.0,
|
||||
received_date=session_time - timedelta(days=5),
|
||||
expiration_date=session_time + timedelta(days=10),
|
||||
best_before_date=session_time + timedelta(days=10),
|
||||
supplier_id=None,
|
||||
purchase_order_id=None,
|
||||
lot_number=f"LOT-LOWSTOCK-{session_id[:8]}",
|
||||
storage_location="Cámara Fría 1",
|
||||
quality_grade="GOOD",
|
||||
notes="⚠️ EDGE CASE: Below reorder point - triggers inventory alert if no pending PO"
|
||||
)
|
||||
db.add(low_stock)
|
||||
records_cloned += 1
|
||||
|
||||
# Edge Case 3: Just Received Stock (received today)
|
||||
if flour_ingredient_id:
|
||||
fresh_stock = Stock(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=str(virtual_tenant_id),
|
||||
inventory_product_id=flour_ingredient_id,
|
||||
batch_number=f"{session_id[:8]}-EDGE-FRESH",
|
||||
quantity=200.0,
|
||||
received_date=session_time - timedelta(hours=2),
|
||||
expiration_date=session_time + timedelta(days=180),
|
||||
best_before_date=session_time + timedelta(days=180),
|
||||
supplier_id=None,
|
||||
purchase_order_id=None,
|
||||
lot_number=f"LOT-FRESH-{session_id[:8]}",
|
||||
storage_location="Almacén A - Estante 1",
|
||||
quality_grade="EXCELLENT",
|
||||
notes="⚠️ EDGE CASE: Just received 2 hours ago - shows as new stock"
|
||||
)
|
||||
db.add(fresh_stock)
|
||||
records_cloned += 1
|
||||
|
||||
logger.info(
|
||||
"Added deterministic edge case stock records",
|
||||
edge_cases_added=3,
|
||||
expiring_date=(session_time + timedelta(days=2)).isoformat(),
|
||||
low_stock_qty=3.0
|
||||
)
|
||||
|
||||
await db.commit()
|
||||
|
||||
duration_ms = int((datetime.now() - start_time).total_seconds() * 1000)
|
||||
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
||||
|
||||
logger.info(
|
||||
"Inventory data cloned successfully",
|
||||
@@ -400,7 +496,7 @@ async def clone_demo_data_internal(
|
||||
"service": "inventory",
|
||||
"status": "failed",
|
||||
"records_cloned": 0,
|
||||
"duration_ms": int((datetime.now() - start_time).total_seconds() * 1000),
|
||||
"duration_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000),
|
||||
"error": str(e)
|
||||
}
|
||||
|
||||
@@ -428,7 +524,7 @@ async def delete_demo_tenant_data(
|
||||
Delete all demo data for a virtual tenant.
|
||||
This endpoint is idempotent - safe to call multiple times.
|
||||
"""
|
||||
start_time = datetime.now()
|
||||
start_time = datetime.now(timezone.utc)
|
||||
|
||||
records_deleted = {
|
||||
"ingredients": 0,
|
||||
@@ -469,7 +565,7 @@ async def delete_demo_tenant_data(
|
||||
"status": "deleted",
|
||||
"virtual_tenant_id": str(virtual_tenant_id),
|
||||
"records_deleted": records_deleted,
|
||||
"duration_ms": int((datetime.now() - start_time).total_seconds() * 1000)
|
||||
"duration_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
|
||||
Reference in New Issue
Block a user