demo seed change 2
This commit is contained in:
@@ -13,6 +13,7 @@ from typing import Optional
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
import json
|
||||
|
||||
# Add shared path
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent.parent.parent.parent))
|
||||
|
||||
@@ -16,7 +16,7 @@ from pathlib import Path
|
||||
import json
|
||||
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent.parent.parent))
|
||||
from shared.utils.demo_dates import adjust_date_for_demo, BASE_REFERENCE_DATE
|
||||
from shared.utils.demo_dates import adjust_date_for_demo, resolve_time_marker
|
||||
|
||||
from app.core.database import get_db
|
||||
from app.models.forecasts import Forecast, PredictionBatch
|
||||
@@ -37,6 +37,60 @@ def verify_internal_api_key(x_internal_api_key: Optional[str] = Header(None)):
|
||||
return True
|
||||
|
||||
|
||||
def parse_date_field(date_value, session_time: datetime, field_name: str = "date") -> Optional[datetime]:
|
||||
"""
|
||||
Parse date field, handling both ISO strings and BASE_TS markers.
|
||||
|
||||
Supports:
|
||||
- BASE_TS markers: "BASE_TS + 1h30m", "BASE_TS - 2d"
|
||||
- ISO 8601 strings: "2025-01-15T06:00:00Z"
|
||||
- None values (returns None)
|
||||
|
||||
Returns timezone-aware datetime or None.
|
||||
"""
|
||||
if not date_value:
|
||||
return None
|
||||
|
||||
# Check if it's a BASE_TS marker
|
||||
if isinstance(date_value, str) and date_value.startswith("BASE_TS"):
|
||||
try:
|
||||
return resolve_time_marker(date_value, session_time)
|
||||
except ValueError as e:
|
||||
logger.warning(
|
||||
f"Invalid BASE_TS marker in {field_name}",
|
||||
marker=date_value,
|
||||
error=str(e)
|
||||
)
|
||||
return None
|
||||
|
||||
# Handle regular ISO date strings
|
||||
try:
|
||||
if isinstance(date_value, str):
|
||||
original_date = datetime.fromisoformat(date_value.replace('Z', '+00:00'))
|
||||
elif hasattr(date_value, 'isoformat'):
|
||||
original_date = date_value
|
||||
else:
|
||||
logger.warning(f"Unsupported date format in {field_name}", date_value=date_value)
|
||||
return None
|
||||
|
||||
return adjust_date_for_demo(original_date, session_time)
|
||||
except (ValueError, AttributeError) as e:
|
||||
logger.warning(
|
||||
f"Invalid date format in {field_name}",
|
||||
date_value=date_value,
|
||||
error=str(e)
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
def align_to_week_start(target_date: datetime) -> datetime:
|
||||
"""Align forecast date to Monday (start of week)"""
|
||||
if target_date:
|
||||
days_since_monday = target_date.weekday()
|
||||
return target_date - timedelta(days=days_since_monday)
|
||||
return target_date
|
||||
|
||||
|
||||
@router.post("/internal/demo/clone")
|
||||
async def clone_demo_data(
|
||||
base_tenant_id: str,
|
||||
@@ -181,8 +235,7 @@ async def clone_demo_data(
|
||||
|
||||
adjusted_forecast_date = adjust_date_for_demo(
|
||||
original_date,
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
session_time
|
||||
)
|
||||
forecast_data[date_field] = adjusted_forecast_date
|
||||
except (ValueError, AttributeError) as e:
|
||||
@@ -263,8 +316,7 @@ async def clone_demo_data(
|
||||
|
||||
adjusted_batch_date = adjust_date_for_demo(
|
||||
original_date,
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
session_time
|
||||
)
|
||||
batch_data[date_field] = adjusted_batch_date
|
||||
except (ValueError, AttributeError) as e:
|
||||
|
||||
@@ -9,13 +9,14 @@ from typing import Optional
|
||||
import structlog
|
||||
import json
|
||||
from pathlib import Path
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timezone, timedelta
|
||||
import uuid
|
||||
from uuid import UUID
|
||||
|
||||
from app.core.database import get_db
|
||||
from app.core.config import settings
|
||||
from app.models import Ingredient, Stock, ProductType
|
||||
from shared.utils.demo_dates import adjust_date_for_demo, resolve_time_marker, calculate_edge_case_times
|
||||
|
||||
logger = structlog.get_logger()
|
||||
router = APIRouter()
|
||||
@@ -30,6 +31,52 @@ async def verify_internal_api_key(x_internal_api_key: str = Header(None)):
|
||||
return True
|
||||
|
||||
|
||||
def parse_date_field(date_value, session_time: datetime, field_name: str = "date") -> Optional[datetime]:
|
||||
"""
|
||||
Parse date field, handling both ISO strings and BASE_TS markers.
|
||||
|
||||
Supports:
|
||||
- BASE_TS markers: "BASE_TS + 1h30m", "BASE_TS - 2d"
|
||||
- ISO 8601 strings: "2025-01-15T06:00:00Z"
|
||||
- None values (returns None)
|
||||
|
||||
Returns timezone-aware datetime or None.
|
||||
"""
|
||||
if not date_value:
|
||||
return None
|
||||
|
||||
# Check if it's a BASE_TS marker
|
||||
if isinstance(date_value, str) and date_value.startswith("BASE_TS"):
|
||||
try:
|
||||
return resolve_time_marker(date_value, session_time)
|
||||
except ValueError as e:
|
||||
logger.warning(
|
||||
f"Invalid BASE_TS marker in {field_name}",
|
||||
marker=date_value,
|
||||
error=str(e)
|
||||
)
|
||||
return None
|
||||
|
||||
# Handle regular ISO date strings
|
||||
try:
|
||||
if isinstance(date_value, str):
|
||||
original_date = datetime.fromisoformat(date_value.replace('Z', '+00:00'))
|
||||
elif hasattr(date_value, 'isoformat'):
|
||||
original_date = date_value
|
||||
else:
|
||||
logger.warning(f"Unsupported date format in {field_name}", date_value=date_value)
|
||||
return None
|
||||
|
||||
return adjust_date_for_demo(original_date, session_time)
|
||||
except (ValueError, AttributeError) as e:
|
||||
logger.warning(
|
||||
f"Invalid date format in {field_name}",
|
||||
date_value=date_value,
|
||||
error=str(e)
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
@router.post("/internal/demo/clone")
|
||||
async def clone_demo_data_internal(
|
||||
base_tenant_id: str,
|
||||
@@ -56,14 +103,14 @@ async def clone_demo_data_internal(
|
||||
session_id: Originating session ID for tracing
|
||||
session_created_at: Session creation timestamp for date adjustment
|
||||
db: Database session
|
||||
|
||||
|
||||
Returns:
|
||||
Dictionary with cloning results
|
||||
|
||||
|
||||
Raises:
|
||||
HTTPException: On validation or cloning errors
|
||||
"""
|
||||
start_time = datetime.now()
|
||||
start_time = datetime.now(timezone.utc)
|
||||
|
||||
try:
|
||||
# Validate UUIDs
|
||||
@@ -106,9 +153,9 @@ async def clone_demo_data_internal(
|
||||
try:
|
||||
session_created_at_parsed = datetime.fromisoformat(session_created_at.replace('Z', '+00:00'))
|
||||
except (ValueError, AttributeError):
|
||||
session_created_at_parsed = datetime.now()
|
||||
session_created_at_parsed = datetime.now(timezone.utc)
|
||||
else:
|
||||
session_created_at_parsed = datetime.now()
|
||||
session_created_at_parsed = datetime.now(timezone.utc)
|
||||
|
||||
# Determine profile based on demo_account_type
|
||||
if demo_account_type == "enterprise":
|
||||
@@ -195,37 +242,13 @@ async def clone_demo_data_internal(
|
||||
detail=f"Invalid UUID format in ingredient data: {str(e)}"
|
||||
)
|
||||
|
||||
# Transform dates
|
||||
from shared.utils.demo_dates import adjust_date_for_demo
|
||||
for date_field in ['expiration_date', 'received_date', 'created_at', 'updated_at']:
|
||||
if date_field in ingredient_data:
|
||||
try:
|
||||
date_value = ingredient_data[date_field]
|
||||
# Handle both string dates and date objects
|
||||
if isinstance(date_value, str):
|
||||
original_date = datetime.fromisoformat(date_value)
|
||||
elif hasattr(date_value, 'isoformat'):
|
||||
# Already a date/datetime object
|
||||
original_date = date_value
|
||||
else:
|
||||
# Skip if not a valid date format
|
||||
logger.warning("Skipping invalid date format",
|
||||
date_field=date_field,
|
||||
date_value=date_value)
|
||||
continue
|
||||
|
||||
adjusted_date = adjust_date_for_demo(
|
||||
original_date,
|
||||
session_created_at_parsed
|
||||
)
|
||||
ingredient_data[date_field] = adjusted_date
|
||||
except (ValueError, AttributeError) as e:
|
||||
logger.warning("Failed to parse date, skipping",
|
||||
date_field=date_field,
|
||||
date_value=ingredient_data[date_field],
|
||||
error=str(e))
|
||||
# Remove invalid date to avoid model errors
|
||||
ingredient_data.pop(date_field, None)
|
||||
# Transform dates using standardized helper
|
||||
ingredient_data['created_at'] = parse_date_field(
|
||||
ingredient_data.get('created_at'), session_time, 'created_at'
|
||||
) or session_time
|
||||
ingredient_data['updated_at'] = parse_date_field(
|
||||
ingredient_data.get('updated_at'), session_time, 'updated_at'
|
||||
) or session_time
|
||||
|
||||
# Map category field to ingredient_category enum
|
||||
if 'category' in ingredient_data:
|
||||
@@ -252,14 +275,27 @@ async def clone_demo_data_internal(
|
||||
'bags': UnitOfMeasure.BAGS,
|
||||
'boxes': UnitOfMeasure.BOXES
|
||||
}
|
||||
|
||||
|
||||
# Also support uppercase versions
|
||||
unit_mapping.update({
|
||||
'KILOGRAMS': UnitOfMeasure.KILOGRAMS,
|
||||
'GRAMS': UnitOfMeasure.GRAMS,
|
||||
'LITERS': UnitOfMeasure.LITERS,
|
||||
'MILLILITERS': UnitOfMeasure.MILLILITERS,
|
||||
'UNITS': UnitOfMeasure.UNITS,
|
||||
'PIECES': UnitOfMeasure.PIECES,
|
||||
'PACKAGES': UnitOfMeasure.PACKAGES,
|
||||
'BAGS': UnitOfMeasure.BAGS,
|
||||
'BOXES': UnitOfMeasure.BOXES
|
||||
})
|
||||
|
||||
unit_str = ingredient_data['unit_of_measure']
|
||||
if unit_str in unit_mapping:
|
||||
ingredient_data['unit_of_measure'] = unit_mapping[unit_str]
|
||||
else:
|
||||
# Default to units if not found
|
||||
ingredient_data['unit_of_measure'] = UnitOfMeasure.UNITS
|
||||
logger.warning("Unknown unit_of_measure, defaulting to UNITS",
|
||||
logger.warning("Unknown unit_of_measure, defaulting to UNITS",
|
||||
original_unit=unit_str)
|
||||
|
||||
# Note: All seed data fields now match the model schema exactly
|
||||
@@ -302,46 +338,22 @@ async def clone_demo_data_internal(
|
||||
original_id=stock_id_string,
|
||||
generated_id=str(transformed_id))
|
||||
|
||||
# Transform dates - handle both timestamp dictionaries and ISO strings
|
||||
for date_field in ['received_date', 'expiration_date', 'best_before_date', 'original_expiration_date', 'transformation_date', 'final_expiration_date', 'created_at', 'updated_at']:
|
||||
if date_field in stock_data:
|
||||
try:
|
||||
date_value = stock_data[date_field]
|
||||
|
||||
# Handle timestamp dictionaries (offset_days, hour, minute)
|
||||
if isinstance(date_value, dict) and 'offset_days' in date_value:
|
||||
from shared.utils.demo_dates import calculate_demo_datetime
|
||||
original_date = calculate_demo_datetime(
|
||||
offset_days=date_value.get('offset_days', 0),
|
||||
hour=date_value.get('hour', 0),
|
||||
minute=date_value.get('minute', 0),
|
||||
session_created_at=session_created_at_parsed
|
||||
)
|
||||
elif isinstance(date_value, str):
|
||||
# ISO string
|
||||
original_date = datetime.fromisoformat(date_value)
|
||||
elif hasattr(date_value, 'isoformat'):
|
||||
# Already a date/datetime object
|
||||
original_date = date_value
|
||||
else:
|
||||
# Skip if not a valid date format
|
||||
logger.warning("Skipping invalid date format",
|
||||
date_field=date_field,
|
||||
date_value=date_value)
|
||||
continue
|
||||
|
||||
adjusted_stock_date = adjust_date_for_demo(
|
||||
original_date,
|
||||
session_created_at_parsed
|
||||
)
|
||||
stock_data[date_field] = adjusted_stock_date
|
||||
except (ValueError, AttributeError) as e:
|
||||
logger.warning("Failed to parse date, skipping",
|
||||
date_field=date_field,
|
||||
date_value=stock_data[date_field],
|
||||
error=str(e))
|
||||
# Remove invalid date to avoid model errors
|
||||
stock_data.pop(date_field, None)
|
||||
# Transform dates using standardized helper
|
||||
stock_data['received_date'] = parse_date_field(
|
||||
stock_data.get('received_date'), session_time, 'received_date'
|
||||
)
|
||||
stock_data['expiration_date'] = parse_date_field(
|
||||
stock_data.get('expiration_date'), session_time, 'expiration_date'
|
||||
)
|
||||
stock_data['best_before_date'] = parse_date_field(
|
||||
stock_data.get('best_before_date'), session_time, 'best_before_date'
|
||||
)
|
||||
stock_data['created_at'] = parse_date_field(
|
||||
stock_data.get('created_at'), session_time, 'created_at'
|
||||
) or session_time
|
||||
stock_data['updated_at'] = parse_date_field(
|
||||
stock_data.get('updated_at'), session_time, 'updated_at'
|
||||
) or session_time
|
||||
|
||||
# Remove original id and tenant_id from stock_data to avoid conflict
|
||||
stock_data.pop('id', None)
|
||||
@@ -356,9 +368,93 @@ async def clone_demo_data_internal(
|
||||
db.add(stock)
|
||||
records_cloned += 1
|
||||
|
||||
# Add deterministic edge case stock records
|
||||
edge_times = calculate_edge_case_times(session_time)
|
||||
|
||||
# Get sample ingredients for edge cases (flour and dairy)
|
||||
flour_ingredient_id = None
|
||||
dairy_ingredient_id = None
|
||||
for ing in seed_data.get('ingredients', []):
|
||||
if ing.get('ingredient_category') == 'FLOUR' and not flour_ingredient_id and 'id' in ing:
|
||||
from shared.utils.demo_id_transformer import transform_id
|
||||
flour_ingredient_id = str(transform_id(ing['id'], UUID(virtual_tenant_id)))
|
||||
elif ing.get('ingredient_category') == 'DAIRY' and not dairy_ingredient_id and 'id' in ing:
|
||||
from shared.utils.demo_id_transformer import transform_id
|
||||
dairy_ingredient_id = str(transform_id(ing['id'], UUID(virtual_tenant_id)))
|
||||
|
||||
# Edge Case 1: Expiring Soon Stock (expires in 2 days)
|
||||
if flour_ingredient_id:
|
||||
expiring_stock = Stock(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=str(virtual_tenant_id),
|
||||
inventory_product_id=flour_ingredient_id,
|
||||
batch_number=f"{session_id[:8]}-EDGE-EXPIRING",
|
||||
quantity=25.0,
|
||||
received_date=session_time - timedelta(days=12),
|
||||
expiration_date=session_time + timedelta(days=2),
|
||||
best_before_date=session_time + timedelta(days=2),
|
||||
supplier_id=None,
|
||||
purchase_order_id=None,
|
||||
lot_number=f"LOT-EXPIRING-{session_id[:8]}",
|
||||
storage_location="Almacén A - Estante 3",
|
||||
quality_grade="GOOD",
|
||||
notes="⚠️ EDGE CASE: Expires in 2 days - triggers orange 'Caducidad próxima' alert"
|
||||
)
|
||||
db.add(expiring_stock)
|
||||
records_cloned += 1
|
||||
|
||||
# Edge Case 2: Low Stock (below reorder point)
|
||||
if dairy_ingredient_id:
|
||||
low_stock = Stock(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=str(virtual_tenant_id),
|
||||
inventory_product_id=dairy_ingredient_id,
|
||||
batch_number=f"{session_id[:8]}-EDGE-LOWSTOCK",
|
||||
quantity=3.0,
|
||||
received_date=session_time - timedelta(days=5),
|
||||
expiration_date=session_time + timedelta(days=10),
|
||||
best_before_date=session_time + timedelta(days=10),
|
||||
supplier_id=None,
|
||||
purchase_order_id=None,
|
||||
lot_number=f"LOT-LOWSTOCK-{session_id[:8]}",
|
||||
storage_location="Cámara Fría 1",
|
||||
quality_grade="GOOD",
|
||||
notes="⚠️ EDGE CASE: Below reorder point - triggers inventory alert if no pending PO"
|
||||
)
|
||||
db.add(low_stock)
|
||||
records_cloned += 1
|
||||
|
||||
# Edge Case 3: Just Received Stock (received today)
|
||||
if flour_ingredient_id:
|
||||
fresh_stock = Stock(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=str(virtual_tenant_id),
|
||||
inventory_product_id=flour_ingredient_id,
|
||||
batch_number=f"{session_id[:8]}-EDGE-FRESH",
|
||||
quantity=200.0,
|
||||
received_date=session_time - timedelta(hours=2),
|
||||
expiration_date=session_time + timedelta(days=180),
|
||||
best_before_date=session_time + timedelta(days=180),
|
||||
supplier_id=None,
|
||||
purchase_order_id=None,
|
||||
lot_number=f"LOT-FRESH-{session_id[:8]}",
|
||||
storage_location="Almacén A - Estante 1",
|
||||
quality_grade="EXCELLENT",
|
||||
notes="⚠️ EDGE CASE: Just received 2 hours ago - shows as new stock"
|
||||
)
|
||||
db.add(fresh_stock)
|
||||
records_cloned += 1
|
||||
|
||||
logger.info(
|
||||
"Added deterministic edge case stock records",
|
||||
edge_cases_added=3,
|
||||
expiring_date=(session_time + timedelta(days=2)).isoformat(),
|
||||
low_stock_qty=3.0
|
||||
)
|
||||
|
||||
await db.commit()
|
||||
|
||||
duration_ms = int((datetime.now() - start_time).total_seconds() * 1000)
|
||||
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
||||
|
||||
logger.info(
|
||||
"Inventory data cloned successfully",
|
||||
@@ -400,7 +496,7 @@ async def clone_demo_data_internal(
|
||||
"service": "inventory",
|
||||
"status": "failed",
|
||||
"records_cloned": 0,
|
||||
"duration_ms": int((datetime.now() - start_time).total_seconds() * 1000),
|
||||
"duration_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000),
|
||||
"error": str(e)
|
||||
}
|
||||
|
||||
@@ -428,7 +524,7 @@ async def delete_demo_tenant_data(
|
||||
Delete all demo data for a virtual tenant.
|
||||
This endpoint is idempotent - safe to call multiple times.
|
||||
"""
|
||||
start_time = datetime.now()
|
||||
start_time = datetime.now(timezone.utc)
|
||||
|
||||
records_deleted = {
|
||||
"ingredients": 0,
|
||||
@@ -469,7 +565,7 @@ async def delete_demo_tenant_data(
|
||||
"status": "deleted",
|
||||
"virtual_tenant_id": str(virtual_tenant_id),
|
||||
"records_deleted": records_deleted,
|
||||
"duration_ms": int((datetime.now() - start_time).total_seconds() * 1000)
|
||||
"duration_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
|
||||
@@ -22,7 +22,7 @@ from pathlib import Path
|
||||
|
||||
# Add shared utilities to path
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent.parent.parent))
|
||||
from shared.utils.demo_dates import adjust_date_for_demo, BASE_REFERENCE_DATE
|
||||
from shared.utils.demo_dates import adjust_date_for_demo
|
||||
|
||||
from app.core.config import settings
|
||||
|
||||
@@ -117,7 +117,7 @@ async def clone_demo_data(
|
||||
# This calculates the offset from BASE_REFERENCE_DATE and applies it to session creation time
|
||||
if base_run.started_at:
|
||||
new_started_at = adjust_date_for_demo(
|
||||
base_run.started_at, reference_time, BASE_REFERENCE_DATE
|
||||
base_run.started_at, reference_time
|
||||
)
|
||||
else:
|
||||
new_started_at = reference_time - timedelta(hours=2)
|
||||
@@ -125,7 +125,7 @@ async def clone_demo_data(
|
||||
# Adjust completed_at using the same utility
|
||||
if base_run.completed_at:
|
||||
new_completed_at = adjust_date_for_demo(
|
||||
base_run.completed_at, reference_time, BASE_REFERENCE_DATE
|
||||
base_run.completed_at, reference_time
|
||||
)
|
||||
# Ensure completion is after start (in case of edge cases)
|
||||
if new_completed_at and new_started_at and new_completed_at < new_started_at:
|
||||
@@ -139,7 +139,7 @@ async def clone_demo_data(
|
||||
def adjust_timestamp(original_timestamp):
|
||||
if not original_timestamp:
|
||||
return None
|
||||
return adjust_date_for_demo(original_timestamp, reference_time, BASE_REFERENCE_DATE)
|
||||
return adjust_date_for_demo(original_timestamp, reference_time)
|
||||
|
||||
# Create new orchestration run for virtual tenant
|
||||
# Update run_number to have current year instead of original year, and make it unique
|
||||
|
||||
@@ -12,11 +12,13 @@ from datetime import datetime, timezone, timedelta, date
|
||||
from typing import Optional
|
||||
import os
|
||||
from decimal import Decimal
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
from app.core.database import get_db
|
||||
from app.models.order import CustomerOrder, OrderItem
|
||||
from app.models.customer import Customer
|
||||
from shared.utils.demo_dates import adjust_date_for_demo, BASE_REFERENCE_DATE
|
||||
from shared.utils.demo_dates import adjust_date_for_demo, resolve_time_marker, get_next_workday
|
||||
|
||||
from app.core.config import settings
|
||||
|
||||
@@ -35,6 +37,59 @@ def verify_internal_api_key(x_internal_api_key: Optional[str] = Header(None)):
|
||||
return True
|
||||
|
||||
|
||||
def parse_date_field(date_value, session_time: datetime, field_name: str = "date") -> Optional[datetime]:
|
||||
"""
|
||||
Parse date field, handling both ISO strings and BASE_TS markers.
|
||||
|
||||
Supports:
|
||||
- BASE_TS markers: "BASE_TS + 1h30m", "BASE_TS - 2d"
|
||||
- ISO 8601 strings: "2025-01-15T06:00:00Z"
|
||||
- None values (returns None)
|
||||
|
||||
Returns timezone-aware datetime or None.
|
||||
"""
|
||||
if not date_value:
|
||||
return None
|
||||
|
||||
# Check if it's a BASE_TS marker
|
||||
if isinstance(date_value, str) and date_value.startswith("BASE_TS"):
|
||||
try:
|
||||
return resolve_time_marker(date_value, session_time)
|
||||
except ValueError as e:
|
||||
logger.warning(
|
||||
f"Invalid BASE_TS marker in {field_name}",
|
||||
marker=date_value,
|
||||
error=str(e)
|
||||
)
|
||||
return None
|
||||
|
||||
# Handle regular ISO date strings
|
||||
try:
|
||||
if isinstance(date_value, str):
|
||||
original_date = datetime.fromisoformat(date_value.replace('Z', '+00:00'))
|
||||
elif hasattr(date_value, 'isoformat'):
|
||||
original_date = date_value
|
||||
else:
|
||||
logger.warning(f"Unsupported date format in {field_name}", date_value=date_value)
|
||||
return None
|
||||
|
||||
return adjust_date_for_demo(original_date, session_time)
|
||||
except (ValueError, AttributeError) as e:
|
||||
logger.warning(
|
||||
f"Invalid date format in {field_name}",
|
||||
date_value=date_value,
|
||||
error=str(e)
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
def ensure_workday(target_date: datetime) -> datetime:
|
||||
"""Ensure delivery date falls on a workday (Monday-Friday)"""
|
||||
if target_date and target_date.weekday() >= 5: # Saturday or Sunday
|
||||
return get_next_workday(target_date)
|
||||
return target_date
|
||||
|
||||
|
||||
@router.post("/clone")
|
||||
async def clone_demo_data(
|
||||
base_tenant_id: str,
|
||||
@@ -180,11 +235,11 @@ async def clone_demo_data(
|
||||
total_orders=customer_data.get('total_orders', 0),
|
||||
total_spent=customer_data.get('total_spent', 0.0),
|
||||
average_order_value=customer_data.get('average_order_value', 0.0),
|
||||
last_order_date=adjust_date_for_demo(
|
||||
datetime.fromisoformat(customer_data['last_order_date'].replace('Z', '+00:00')),
|
||||
last_order_date=parse_date_field(
|
||||
customer_data.get('last_order_date'),
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
) if customer_data.get('last_order_date') else None,
|
||||
"last_order_date"
|
||||
),
|
||||
created_at=session_time,
|
||||
updated_at=session_time
|
||||
)
|
||||
@@ -213,18 +268,18 @@ async def clone_demo_data(
|
||||
if customer_id_value:
|
||||
customer_id_value = customer_id_map.get(uuid.UUID(customer_id_value), uuid.UUID(customer_id_value))
|
||||
|
||||
# Adjust dates using demo_dates utility
|
||||
adjusted_order_date = adjust_date_for_demo(
|
||||
datetime.fromisoformat(order_data['order_date'].replace('Z', '+00:00')),
|
||||
# Parse date fields (supports BASE_TS markers and ISO timestamps)
|
||||
adjusted_order_date = parse_date_field(
|
||||
order_data.get('order_date'),
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
) if order_data.get('order_date') else session_time
|
||||
"order_date"
|
||||
) or session_time
|
||||
|
||||
adjusted_requested_delivery = adjust_date_for_demo(
|
||||
datetime.fromisoformat(order_data['requested_delivery_date'].replace('Z', '+00:00')),
|
||||
adjusted_requested_delivery = parse_date_field(
|
||||
order_data.get('requested_delivery_date'),
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
) if order_data.get('requested_delivery_date') else None
|
||||
"requested_delivery_date"
|
||||
)
|
||||
|
||||
# Create new order from seed data
|
||||
new_order = CustomerOrder(
|
||||
|
||||
@@ -12,6 +12,11 @@ class CustomerType(enum.Enum):
|
||||
INDIVIDUAL = "individual"
|
||||
BUSINESS = "business"
|
||||
CENTRAL_BAKERY = "central_bakery"
|
||||
RETAIL = "RETAIL"
|
||||
WHOLESALE = "WHOLESALE"
|
||||
RESTAURANT = "RESTAURANT"
|
||||
HOTEL = "HOTEL"
|
||||
ENTERPRISE = "ENTERPRISE"
|
||||
|
||||
|
||||
class DeliveryMethod(enum.Enum):
|
||||
|
||||
@@ -18,7 +18,7 @@ from app.core.database import get_db
|
||||
from app.models.procurement_plan import ProcurementPlan, ProcurementRequirement
|
||||
from app.models.purchase_order import PurchaseOrder, PurchaseOrderItem
|
||||
from app.models.replenishment import ReplenishmentPlan, ReplenishmentPlanItem
|
||||
from shared.utils.demo_dates import adjust_date_for_demo, BASE_REFERENCE_DATE, resolve_time_marker
|
||||
from shared.utils.demo_dates import adjust_date_for_demo, resolve_time_marker
|
||||
from shared.messaging import RabbitMQClient, UnifiedEventPublisher
|
||||
from sqlalchemy.orm import selectinload
|
||||
from shared.schemas.reasoning_types import (
|
||||
@@ -105,11 +105,11 @@ async def clone_demo_data(
|
||||
"replenishment_items": 0
|
||||
}
|
||||
|
||||
def parse_date_field(date_value, field_name="date"):
|
||||
def parse_date_field(date_value, session_time, field_name="date"):
|
||||
"""Parse date field, handling both ISO strings and BASE_TS markers"""
|
||||
if not date_value:
|
||||
return None
|
||||
|
||||
|
||||
# Check if it's a BASE_TS marker
|
||||
if isinstance(date_value, str) and date_value.startswith("BASE_TS"):
|
||||
try:
|
||||
@@ -121,13 +121,12 @@ async def clone_demo_data(
|
||||
error=str(e)
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
# Handle regular ISO date strings
|
||||
try:
|
||||
return adjust_date_for_demo(
|
||||
datetime.fromisoformat(date_value.replace('Z', '+00:00')),
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
session_time
|
||||
)
|
||||
except (ValueError, AttributeError) as e:
|
||||
logger.warning(
|
||||
@@ -206,17 +205,17 @@ async def clone_demo_data(
|
||||
if 'order_date_offset_days' in po_data:
|
||||
adjusted_order_date = session_time + timedelta(days=po_data['order_date_offset_days'])
|
||||
else:
|
||||
adjusted_order_date = parse_date_field(po_data.get('order_date'), "order_date") or session_time
|
||||
|
||||
adjusted_order_date = parse_date_field(po_data.get('order_date'), session_time, "order_date") or session_time
|
||||
|
||||
if 'required_delivery_date_offset_days' in po_data:
|
||||
adjusted_required_delivery = session_time + timedelta(days=po_data['required_delivery_date_offset_days'])
|
||||
else:
|
||||
adjusted_required_delivery = parse_date_field(po_data.get('required_delivery_date'), "required_delivery_date")
|
||||
|
||||
adjusted_required_delivery = parse_date_field(po_data.get('required_delivery_date'), session_time, "required_delivery_date")
|
||||
|
||||
if 'estimated_delivery_date_offset_days' in po_data:
|
||||
adjusted_estimated_delivery = session_time + timedelta(days=po_data['estimated_delivery_date_offset_days'])
|
||||
else:
|
||||
adjusted_estimated_delivery = parse_date_field(po_data.get('estimated_delivery_date'), "estimated_delivery_date")
|
||||
adjusted_estimated_delivery = parse_date_field(po_data.get('estimated_delivery_date'), session_time, "estimated_delivery_date")
|
||||
|
||||
# Calculate expected delivery date (use estimated delivery if not specified separately)
|
||||
# FIX: Use current UTC time for future delivery dates
|
||||
@@ -277,8 +276,8 @@ async def clone_demo_data(
|
||||
auto_approved=po_data.get('auto_approved', False),
|
||||
auto_approval_rule_id=po_data.get('auto_approval_rule_id') if po_data.get('auto_approval_rule_id') and len(po_data.get('auto_approval_rule_id', '')) >= 32 else None,
|
||||
rejection_reason=po_data.get('rejection_reason'),
|
||||
sent_to_supplier_at=parse_date_field(po_data.get('sent_to_supplier_at'), "sent_to_supplier_at"),
|
||||
supplier_confirmation_date=parse_date_field(po_data.get('supplier_confirmation_date'), "supplier_confirmation_date"),
|
||||
sent_to_supplier_at=parse_date_field(po_data.get('sent_to_supplier_at'), session_time, "sent_to_supplier_at"),
|
||||
supplier_confirmation_date=parse_date_field(po_data.get('supplier_confirmation_date'), session_time, "supplier_confirmation_date"),
|
||||
supplier_reference=po_data.get('supplier_reference'),
|
||||
notes=po_data.get('notes'),
|
||||
internal_notes=po_data.get('internal_notes'),
|
||||
@@ -357,15 +356,15 @@ async def clone_demo_data(
|
||||
continue
|
||||
|
||||
# Adjust dates
|
||||
adjusted_plan_date = parse_date_field(plan_data.get('plan_date'), "plan_date")
|
||||
adjusted_plan_date = parse_date_field(plan_data.get('plan_date'), session_time, "plan_date")
|
||||
|
||||
new_plan = ProcurementPlan(
|
||||
id=str(transformed_id),
|
||||
tenant_id=virtual_uuid,
|
||||
plan_number=plan_data.get('plan_number', f"PROC-{uuid.uuid4().hex[:8].upper()}"),
|
||||
plan_date=adjusted_plan_date,
|
||||
plan_period_start=parse_date_field(plan_data.get('plan_period_start'), "plan_period_start"),
|
||||
plan_period_end=parse_date_field(plan_data.get('plan_period_end'), "plan_period_end"),
|
||||
plan_period_start=parse_date_field(plan_data.get('plan_period_start'), session_time, "plan_period_start"),
|
||||
plan_period_end=parse_date_field(plan_data.get('plan_period_end'), session_time, "plan_period_end"),
|
||||
planning_horizon_days=plan_data.get('planning_horizon_days'),
|
||||
status=plan_data.get('status', 'draft'),
|
||||
plan_type=plan_data.get('plan_type'),
|
||||
@@ -396,15 +395,15 @@ async def clone_demo_data(
|
||||
continue
|
||||
|
||||
# Adjust dates
|
||||
adjusted_plan_date = parse_date_field(replan_data.get('plan_date'), "plan_date")
|
||||
adjusted_plan_date = parse_date_field(replan_data.get('plan_date'), session_time, "plan_date")
|
||||
|
||||
new_replan = ReplenishmentPlan(
|
||||
id=str(transformed_id),
|
||||
tenant_id=virtual_uuid,
|
||||
plan_number=replan_data.get('plan_number', f"REPL-{uuid.uuid4().hex[:8].upper()}"),
|
||||
plan_date=adjusted_plan_date,
|
||||
plan_period_start=parse_date_field(replan_data.get('plan_period_start'), "plan_period_start"),
|
||||
plan_period_end=parse_date_field(replan_data.get('plan_period_end'), "plan_period_end"),
|
||||
plan_period_start=parse_date_field(replan_data.get('plan_period_start'), session_time, "plan_period_start"),
|
||||
plan_period_end=parse_date_field(replan_data.get('plan_period_end'), session_time, "plan_period_end"),
|
||||
planning_horizon_days=replan_data.get('planning_horizon_days'),
|
||||
status=replan_data.get('status', 'draft'),
|
||||
plan_type=replan_data.get('plan_type'),
|
||||
|
||||
@@ -22,7 +22,9 @@ from app.models.production import (
|
||||
ProductionStatus, ProductionPriority, ProcessStage,
|
||||
EquipmentStatus, EquipmentType
|
||||
)
|
||||
from shared.utils.demo_dates import adjust_date_for_demo, BASE_REFERENCE_DATE, resolve_time_marker
|
||||
from shared.utils.demo_dates import (
|
||||
adjust_date_for_demo, resolve_time_marker, calculate_edge_case_times
|
||||
)
|
||||
|
||||
from app.core.config import settings
|
||||
|
||||
@@ -107,11 +109,11 @@ async def clone_demo_data(
|
||||
"alerts_generated": 0
|
||||
}
|
||||
|
||||
def parse_date_field(date_value, field_name="date"):
|
||||
def parse_date_field(date_value, session_time, field_name="date"):
|
||||
"""Parse date field, handling both ISO strings and BASE_TS markers"""
|
||||
if not date_value:
|
||||
return None
|
||||
|
||||
|
||||
# Check if it's a BASE_TS marker
|
||||
if isinstance(date_value, str) and date_value.startswith("BASE_TS"):
|
||||
try:
|
||||
@@ -123,13 +125,12 @@ async def clone_demo_data(
|
||||
error=str(e)
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
# Handle regular ISO date strings
|
||||
try:
|
||||
return adjust_date_for_demo(
|
||||
datetime.fromisoformat(date_value.replace('Z', '+00:00')),
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
session_time
|
||||
)
|
||||
except (ValueError, AttributeError) as e:
|
||||
logger.warning(
|
||||
@@ -186,31 +187,31 @@ async def clone_demo_data(
|
||||
detail=f"Invalid UUID format in equipment data: {str(e)}"
|
||||
)
|
||||
|
||||
# Adjust dates relative to session creation time
|
||||
adjusted_install_date = adjust_date_for_demo(
|
||||
datetime.fromisoformat(equipment_data['install_date'].replace('Z', '+00:00')),
|
||||
# Parse date fields (supports BASE_TS markers and ISO timestamps)
|
||||
adjusted_install_date = parse_date_field(
|
||||
equipment_data.get('install_date'),
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
"install_date"
|
||||
)
|
||||
adjusted_last_maintenance = adjust_date_for_demo(
|
||||
datetime.fromisoformat(equipment_data['last_maintenance_date'].replace('Z', '+00:00')),
|
||||
adjusted_last_maintenance = parse_date_field(
|
||||
equipment_data.get('last_maintenance_date'),
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
"last_maintenance_date"
|
||||
)
|
||||
adjusted_next_maintenance = adjust_date_for_demo(
|
||||
datetime.fromisoformat(equipment_data['next_maintenance_date'].replace('Z', '+00:00')),
|
||||
adjusted_next_maintenance = parse_date_field(
|
||||
equipment_data.get('next_maintenance_date'),
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
"next_maintenance_date"
|
||||
)
|
||||
adjusted_created_at = adjust_date_for_demo(
|
||||
datetime.fromisoformat(equipment_data['created_at'].replace('Z', '+00:00')),
|
||||
adjusted_created_at = parse_date_field(
|
||||
equipment_data.get('created_at'),
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
"created_at"
|
||||
)
|
||||
adjusted_updated_at = adjust_date_for_demo(
|
||||
datetime.fromisoformat(equipment_data['updated_at'].replace('Z', '+00:00')),
|
||||
adjusted_updated_at = parse_date_field(
|
||||
equipment_data.get('updated_at'),
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
"updated_at"
|
||||
)
|
||||
|
||||
new_equipment = Equipment(
|
||||
@@ -313,13 +314,13 @@ async def clone_demo_data(
|
||||
batch_id_map[UUID(batch_data['id'])] = transformed_id
|
||||
|
||||
# Adjust dates relative to session creation time
|
||||
adjusted_planned_start = parse_date_field(batch_data.get('planned_start_time'), "planned_start_time")
|
||||
adjusted_planned_end = parse_date_field(batch_data.get('planned_end_time'), "planned_end_time")
|
||||
adjusted_actual_start = parse_date_field(batch_data.get('actual_start_time'), "actual_start_time")
|
||||
adjusted_actual_end = parse_date_field(batch_data.get('actual_end_time'), "actual_end_time")
|
||||
adjusted_completed = parse_date_field(batch_data.get('completed_at'), "completed_at")
|
||||
adjusted_created_at = parse_date_field(batch_data.get('created_at'), "created_at") or session_time
|
||||
adjusted_updated_at = parse_date_field(batch_data.get('updated_at'), "updated_at") or adjusted_created_at
|
||||
adjusted_planned_start = parse_date_field(batch_data.get('planned_start_time'), session_time, "planned_start_time")
|
||||
adjusted_planned_end = parse_date_field(batch_data.get('planned_end_time'), session_time, "planned_end_time")
|
||||
adjusted_actual_start = parse_date_field(batch_data.get('actual_start_time'), session_time, "actual_start_time")
|
||||
adjusted_actual_end = parse_date_field(batch_data.get('actual_end_time'), session_time, "actual_end_time")
|
||||
adjusted_completed = parse_date_field(batch_data.get('completed_at'), session_time, "completed_at")
|
||||
adjusted_created_at = parse_date_field(batch_data.get('created_at'), session_time, "created_at") or session_time
|
||||
adjusted_updated_at = parse_date_field(batch_data.get('updated_at'), session_time, "updated_at") or adjusted_created_at
|
||||
|
||||
# Map status and priority enums
|
||||
status_value = batch_data.get('status', 'PENDING')
|
||||
@@ -418,23 +419,23 @@ async def clone_demo_data(
|
||||
if template_id_value:
|
||||
template_id_value = template_id_map.get(UUID(template_id_value), UUID(template_id_value))
|
||||
|
||||
# Adjust check time relative to session creation time
|
||||
adjusted_check_time = adjust_date_for_demo(
|
||||
datetime.fromisoformat(check_data['check_time'].replace('Z', '+00:00')),
|
||||
# Parse date fields (supports BASE_TS markers and ISO timestamps)
|
||||
adjusted_check_time = parse_date_field(
|
||||
check_data.get('check_time'),
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
) if check_data.get('check_time') else None
|
||||
|
||||
adjusted_created_at = adjust_date_for_demo(
|
||||
datetime.fromisoformat(check_data['created_at'].replace('Z', '+00:00')),
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
"check_time"
|
||||
)
|
||||
adjusted_updated_at = adjust_date_for_demo(
|
||||
datetime.fromisoformat(check_data['updated_at'].replace('Z', '+00:00')),
|
||||
|
||||
adjusted_created_at = parse_date_field(
|
||||
check_data.get('created_at'),
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
) if check_data.get('updated_at') else adjusted_created_at
|
||||
"created_at"
|
||||
)
|
||||
adjusted_updated_at = parse_date_field(
|
||||
check_data.get('updated_at'),
|
||||
session_time,
|
||||
"updated_at"
|
||||
) or adjusted_created_at
|
||||
|
||||
new_check = QualityCheck(
|
||||
id=str(transformed_id),
|
||||
@@ -485,37 +486,37 @@ async def clone_demo_data(
|
||||
error=str(e))
|
||||
continue
|
||||
|
||||
# Adjust schedule dates relative to session creation time
|
||||
adjusted_schedule_date = adjust_date_for_demo(
|
||||
datetime.fromisoformat(schedule_data['schedule_date'].replace('Z', '+00:00')),
|
||||
# Parse date fields (supports BASE_TS markers and ISO timestamps)
|
||||
adjusted_schedule_date = parse_date_field(
|
||||
schedule_data.get('schedule_date'),
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
) if schedule_data.get('schedule_date') else None
|
||||
adjusted_shift_start = adjust_date_for_demo(
|
||||
datetime.fromisoformat(schedule_data['shift_start'].replace('Z', '+00:00')),
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
) if schedule_data.get('shift_start') else None
|
||||
adjusted_shift_end = adjust_date_for_demo(
|
||||
datetime.fromisoformat(schedule_data['shift_end'].replace('Z', '+00:00')),
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
) if schedule_data.get('shift_end') else None
|
||||
adjusted_finalized = adjust_date_for_demo(
|
||||
datetime.fromisoformat(schedule_data['finalized_at'].replace('Z', '+00:00')),
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
) if schedule_data.get('finalized_at') else None
|
||||
adjusted_created_at = adjust_date_for_demo(
|
||||
datetime.fromisoformat(schedule_data['created_at'].replace('Z', '+00:00')),
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
"schedule_date"
|
||||
)
|
||||
adjusted_updated_at = adjust_date_for_demo(
|
||||
datetime.fromisoformat(schedule_data['updated_at'].replace('Z', '+00:00')),
|
||||
adjusted_shift_start = parse_date_field(
|
||||
schedule_data.get('shift_start'),
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
) if schedule_data.get('updated_at') else adjusted_created_at
|
||||
"shift_start"
|
||||
)
|
||||
adjusted_shift_end = parse_date_field(
|
||||
schedule_data.get('shift_end'),
|
||||
session_time,
|
||||
"shift_end"
|
||||
)
|
||||
adjusted_finalized = parse_date_field(
|
||||
schedule_data.get('finalized_at'),
|
||||
session_time,
|
||||
"finalized_at"
|
||||
)
|
||||
adjusted_created_at = parse_date_field(
|
||||
schedule_data.get('created_at'),
|
||||
session_time,
|
||||
"created_at"
|
||||
)
|
||||
adjusted_updated_at = parse_date_field(
|
||||
schedule_data.get('updated_at'),
|
||||
session_time,
|
||||
"updated_at"
|
||||
) or adjusted_created_at
|
||||
|
||||
new_schedule = ProductionSchedule(
|
||||
id=str(transformed_id),
|
||||
@@ -561,37 +562,37 @@ async def clone_demo_data(
|
||||
error=str(e))
|
||||
continue
|
||||
|
||||
# Adjust capacity dates relative to session creation time
|
||||
adjusted_date = adjust_date_for_demo(
|
||||
datetime.fromisoformat(capacity_data['date'].replace('Z', '+00:00')),
|
||||
# Parse date fields (supports BASE_TS markers and ISO timestamps)
|
||||
adjusted_date = parse_date_field(
|
||||
capacity_data.get('date'),
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
) if capacity_data.get('date') else None
|
||||
adjusted_start_time = adjust_date_for_demo(
|
||||
datetime.fromisoformat(capacity_data['start_time'].replace('Z', '+00:00')),
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
) if capacity_data.get('start_time') else None
|
||||
adjusted_end_time = adjust_date_for_demo(
|
||||
datetime.fromisoformat(capacity_data['end_time'].replace('Z', '+00:00')),
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
) if capacity_data.get('end_time') else None
|
||||
adjusted_last_maintenance = adjust_date_for_demo(
|
||||
datetime.fromisoformat(capacity_data['last_maintenance_date'].replace('Z', '+00:00')),
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
) if capacity_data.get('last_maintenance_date') else None
|
||||
adjusted_created_at = adjust_date_for_demo(
|
||||
datetime.fromisoformat(capacity_data['created_at'].replace('Z', '+00:00')),
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
"date"
|
||||
)
|
||||
adjusted_updated_at = adjust_date_for_demo(
|
||||
datetime.fromisoformat(capacity_data['updated_at'].replace('Z', '+00:00')),
|
||||
adjusted_start_time = parse_date_field(
|
||||
capacity_data.get('start_time'),
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
) if capacity_data.get('updated_at') else adjusted_created_at
|
||||
"start_time"
|
||||
)
|
||||
adjusted_end_time = parse_date_field(
|
||||
capacity_data.get('end_time'),
|
||||
session_time,
|
||||
"end_time"
|
||||
)
|
||||
adjusted_last_maintenance = parse_date_field(
|
||||
capacity_data.get('last_maintenance_date'),
|
||||
session_time,
|
||||
"last_maintenance_date"
|
||||
)
|
||||
adjusted_created_at = parse_date_field(
|
||||
capacity_data.get('created_at'),
|
||||
session_time,
|
||||
"created_at"
|
||||
)
|
||||
adjusted_updated_at = parse_date_field(
|
||||
capacity_data.get('updated_at'),
|
||||
session_time,
|
||||
"updated_at"
|
||||
) or adjusted_created_at
|
||||
|
||||
new_capacity = ProductionCapacity(
|
||||
id=str(transformed_id),
|
||||
@@ -624,6 +625,143 @@ async def clone_demo_data(
|
||||
db.add(new_capacity)
|
||||
stats["production_capacity"] += 1
|
||||
|
||||
# Add deterministic edge case batches
|
||||
edge_times = calculate_edge_case_times(session_time)
|
||||
|
||||
# Get a sample product_id from existing batches for edge cases
|
||||
sample_product_id = None
|
||||
if seed_data.get('batches'):
|
||||
sample_product_id = seed_data['batches'][0].get('product_id')
|
||||
|
||||
if sample_product_id:
|
||||
# Edge Case 1: Overdue Batch (should have started 2 hours ago)
|
||||
overdue_batch = ProductionBatch(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=virtual_uuid,
|
||||
batch_number=f"{session_id[:8]}-EDGE-OVERDUE",
|
||||
product_id=sample_product_id,
|
||||
product_name="Pan Integral (Edge Case)",
|
||||
planned_start_time=edge_times["overdue_batch_planned_start"],
|
||||
planned_end_time=edge_times["overdue_batch_planned_start"] + timedelta(hours=3),
|
||||
planned_quantity=50.0,
|
||||
planned_duration_minutes=180,
|
||||
actual_start_time=None,
|
||||
actual_end_time=None,
|
||||
actual_quantity=None,
|
||||
status=ProductionStatus.PENDING,
|
||||
priority=ProductionPriority.URGENT,
|
||||
current_process_stage=None,
|
||||
production_notes="⚠️ EDGE CASE: Should have started 2 hours ago - triggers yellow alert for delayed production",
|
||||
created_at=session_time,
|
||||
updated_at=session_time
|
||||
)
|
||||
db.add(overdue_batch)
|
||||
stats["batches"] += 1
|
||||
|
||||
# Edge Case 2: In-Progress Batch (started 1h45m ago)
|
||||
in_progress_batch = ProductionBatch(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=virtual_uuid,
|
||||
batch_number=f"{session_id[:8]}-EDGE-INPROGRESS",
|
||||
product_id=sample_product_id,
|
||||
product_name="Croissant de Mantequilla (Edge Case)",
|
||||
planned_start_time=edge_times["in_progress_batch_actual_start"],
|
||||
planned_end_time=edge_times["upcoming_batch_planned_start"],
|
||||
planned_quantity=100.0,
|
||||
planned_duration_minutes=195,
|
||||
actual_start_time=edge_times["in_progress_batch_actual_start"],
|
||||
actual_end_time=None,
|
||||
actual_quantity=None,
|
||||
status=ProductionStatus.IN_PROGRESS,
|
||||
priority=ProductionPriority.HIGH,
|
||||
current_process_stage=ProcessStage.BAKING,
|
||||
production_notes="⚠️ EDGE CASE: Currently in progress - visible in active production dashboard",
|
||||
created_at=session_time,
|
||||
updated_at=session_time
|
||||
)
|
||||
db.add(in_progress_batch)
|
||||
stats["batches"] += 1
|
||||
|
||||
# Edge Case 3: Upcoming Batch (starts in 1.5 hours)
|
||||
upcoming_batch = ProductionBatch(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=virtual_uuid,
|
||||
batch_number=f"{session_id[:8]}-EDGE-UPCOMING",
|
||||
product_id=sample_product_id,
|
||||
product_name="Baguette Tradicional (Edge Case)",
|
||||
planned_start_time=edge_times["upcoming_batch_planned_start"],
|
||||
planned_end_time=edge_times["upcoming_batch_planned_start"] + timedelta(hours=2),
|
||||
planned_quantity=75.0,
|
||||
planned_duration_minutes=120,
|
||||
actual_start_time=None,
|
||||
actual_end_time=None,
|
||||
actual_quantity=None,
|
||||
status=ProductionStatus.PENDING,
|
||||
priority=ProductionPriority.MEDIUM,
|
||||
current_process_stage=None,
|
||||
production_notes="⚠️ EDGE CASE: Starting in 1.5 hours - visible in upcoming production schedule",
|
||||
created_at=session_time,
|
||||
updated_at=session_time
|
||||
)
|
||||
db.add(upcoming_batch)
|
||||
stats["batches"] += 1
|
||||
|
||||
# Edge Case 4: Evening Batch (starts at 17:00 today)
|
||||
evening_batch = ProductionBatch(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=virtual_uuid,
|
||||
batch_number=f"{session_id[:8]}-EDGE-EVENING",
|
||||
product_id=sample_product_id,
|
||||
product_name="Pan de Molde (Edge Case)",
|
||||
planned_start_time=edge_times["evening_batch_planned_start"],
|
||||
planned_end_time=edge_times["evening_batch_planned_start"] + timedelta(hours=2, minutes=30),
|
||||
planned_quantity=60.0,
|
||||
planned_duration_minutes=150,
|
||||
actual_start_time=None,
|
||||
actual_end_time=None,
|
||||
actual_quantity=None,
|
||||
status=ProductionStatus.PENDING,
|
||||
priority=ProductionPriority.MEDIUM,
|
||||
current_process_stage=None,
|
||||
production_notes="⚠️ EDGE CASE: Evening shift production - scheduled for 17:00",
|
||||
created_at=session_time,
|
||||
updated_at=session_time
|
||||
)
|
||||
db.add(evening_batch)
|
||||
stats["batches"] += 1
|
||||
|
||||
# Edge Case 5: Tomorrow Morning Batch (starts at 05:00 tomorrow)
|
||||
tomorrow_batch = ProductionBatch(
|
||||
id=str(uuid.uuid4()),
|
||||
tenant_id=virtual_uuid,
|
||||
batch_number=f"{session_id[:8]}-EDGE-TOMORROW",
|
||||
product_id=sample_product_id,
|
||||
product_name="Bollería Variada (Edge Case)",
|
||||
planned_start_time=edge_times["tomorrow_morning_planned_start"],
|
||||
planned_end_time=edge_times["tomorrow_morning_planned_start"] + timedelta(hours=4),
|
||||
planned_quantity=120.0,
|
||||
planned_duration_minutes=240,
|
||||
actual_start_time=None,
|
||||
actual_end_time=None,
|
||||
actual_quantity=None,
|
||||
status=ProductionStatus.PENDING,
|
||||
priority=ProductionPriority.MEDIUM,
|
||||
current_process_stage=None,
|
||||
production_notes="⚠️ EDGE CASE: Tomorrow morning production - scheduled for 05:00",
|
||||
created_at=session_time,
|
||||
updated_at=session_time
|
||||
)
|
||||
db.add(tomorrow_batch)
|
||||
stats["batches"] += 1
|
||||
|
||||
logger.info(
|
||||
"Added deterministic edge case batches",
|
||||
edge_cases_added=5,
|
||||
overdue=edge_times["overdue_batch_planned_start"].isoformat(),
|
||||
in_progress=edge_times["in_progress_batch_actual_start"].isoformat(),
|
||||
upcoming=edge_times["upcoming_batch_planned_start"].isoformat()
|
||||
)
|
||||
|
||||
# Commit cloned data
|
||||
await db.commit()
|
||||
|
||||
|
||||
@@ -17,7 +17,7 @@ import json
|
||||
from pathlib import Path
|
||||
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent.parent.parent))
|
||||
from shared.utils.demo_dates import adjust_date_for_demo, BASE_REFERENCE_DATE
|
||||
from shared.utils.demo_dates import adjust_date_for_demo, resolve_time_marker
|
||||
|
||||
from app.core.database import get_db
|
||||
from app.models.recipes import (
|
||||
@@ -34,6 +34,62 @@ router = APIRouter()
|
||||
DEMO_TENANT_PROFESSIONAL = "a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6"
|
||||
|
||||
|
||||
def parse_date_field(
|
||||
field_value: any,
|
||||
session_time: datetime,
|
||||
field_name: str = "date"
|
||||
) -> Optional[datetime]:
|
||||
"""
|
||||
Parse a date field from JSON, supporting BASE_TS markers and ISO timestamps.
|
||||
|
||||
Args:
|
||||
field_value: The date field value (can be BASE_TS marker, ISO string, or None)
|
||||
session_time: Session creation time (timezone-aware UTC)
|
||||
field_name: Name of the field (for logging)
|
||||
|
||||
Returns:
|
||||
Timezone-aware UTC datetime or None
|
||||
"""
|
||||
if field_value is None:
|
||||
return None
|
||||
|
||||
# Handle BASE_TS markers
|
||||
if isinstance(field_value, str) and field_value.startswith("BASE_TS"):
|
||||
try:
|
||||
return resolve_time_marker(field_value, session_time)
|
||||
except (ValueError, AttributeError) as e:
|
||||
logger.warning(
|
||||
"Failed to resolve BASE_TS marker",
|
||||
field_name=field_name,
|
||||
marker=field_value,
|
||||
error=str(e)
|
||||
)
|
||||
return None
|
||||
|
||||
# Handle ISO timestamps (legacy format - convert to absolute datetime)
|
||||
if isinstance(field_value, str) and ('T' in field_value or 'Z' in field_value):
|
||||
try:
|
||||
parsed_date = datetime.fromisoformat(field_value.replace('Z', '+00:00'))
|
||||
# Adjust relative to session time
|
||||
return adjust_date_for_demo(parsed_date, session_time)
|
||||
except (ValueError, AttributeError) as e:
|
||||
logger.warning(
|
||||
"Failed to parse ISO timestamp",
|
||||
field_name=field_name,
|
||||
value=field_value,
|
||||
error=str(e)
|
||||
)
|
||||
return None
|
||||
|
||||
logger.warning(
|
||||
"Unknown date format",
|
||||
field_name=field_name,
|
||||
value=field_value,
|
||||
value_type=type(field_value).__name__
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
def verify_internal_api_key(x_internal_api_key: Optional[str] = Header(None)):
|
||||
"""Verify internal API key for service-to-service communication"""
|
||||
if x_internal_api_key != settings.INTERNAL_API_KEY:
|
||||
@@ -148,16 +204,16 @@ async def clone_demo_data(
|
||||
detail=f"Invalid UUID format in recipe data: {str(e)}"
|
||||
)
|
||||
|
||||
# Adjust dates relative to session creation time
|
||||
adjusted_created_at = adjust_date_for_demo(
|
||||
datetime.fromisoformat(recipe_data['created_at'].replace('Z', '+00:00')),
|
||||
# Parse date fields (supports BASE_TS markers and ISO timestamps)
|
||||
adjusted_created_at = parse_date_field(
|
||||
recipe_data.get('created_at'),
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
"created_at"
|
||||
)
|
||||
adjusted_updated_at = adjust_date_for_demo(
|
||||
datetime.fromisoformat(recipe_data['updated_at'].replace('Z', '+00:00')),
|
||||
adjusted_updated_at = parse_date_field(
|
||||
recipe_data.get('updated_at'),
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
"updated_at"
|
||||
)
|
||||
|
||||
# Map field names from seed data to model fields
|
||||
@@ -332,7 +388,7 @@ async def delete_demo_tenant_data(
|
||||
Delete all demo data for a virtual tenant.
|
||||
This endpoint is idempotent - safe to call multiple times.
|
||||
"""
|
||||
start_time = datetime.now()
|
||||
start_time = datetime.now(timezone.utc)
|
||||
|
||||
records_deleted = {
|
||||
"recipes": 0,
|
||||
@@ -373,7 +429,7 @@ async def delete_demo_tenant_data(
|
||||
"status": "deleted",
|
||||
"virtual_tenant_id": str(virtual_tenant_id),
|
||||
"records_deleted": records_deleted,
|
||||
"duration_ms": int((datetime.now() - start_time).total_seconds() * 1000)
|
||||
"duration_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
|
||||
@@ -50,7 +50,7 @@ class MeasurementUnit(enum.Enum):
|
||||
class ProductionPriority(enum.Enum):
|
||||
"""Production batch priority levels"""
|
||||
LOW = "low"
|
||||
NORMAL = "normal"
|
||||
MEDIUM = "medium"
|
||||
HIGH = "high"
|
||||
URGENT = "urgent"
|
||||
|
||||
@@ -284,7 +284,7 @@ class ProductionBatch(Base):
|
||||
|
||||
# Production details
|
||||
status = Column(SQLEnum(ProductionStatus), nullable=False, default=ProductionStatus.PLANNED, index=True)
|
||||
priority = Column(SQLEnum(ProductionPriority), nullable=False, default=ProductionPriority.NORMAL)
|
||||
priority = Column(SQLEnum(ProductionPriority), nullable=False, default=ProductionPriority.MEDIUM)
|
||||
assigned_staff = Column(JSONB, nullable=True) # List of staff assigned to this batch
|
||||
production_notes = Column(Text, nullable=True)
|
||||
|
||||
|
||||
@@ -17,7 +17,7 @@ import json
|
||||
from pathlib import Path
|
||||
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent.parent.parent))
|
||||
from shared.utils.demo_dates import adjust_date_for_demo, BASE_REFERENCE_DATE
|
||||
from shared.utils.demo_dates import adjust_date_for_demo, resolve_time_marker
|
||||
|
||||
from app.core.database import get_db
|
||||
from app.models.sales import SalesData
|
||||
@@ -31,6 +31,62 @@ router = APIRouter()
|
||||
DEMO_TENANT_PROFESSIONAL = "a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6"
|
||||
|
||||
|
||||
def parse_date_field(
|
||||
field_value: any,
|
||||
session_time: datetime,
|
||||
field_name: str = "date"
|
||||
) -> Optional[datetime]:
|
||||
"""
|
||||
Parse a date field from JSON, supporting BASE_TS markers and ISO timestamps.
|
||||
|
||||
Args:
|
||||
field_value: The date field value (can be BASE_TS marker, ISO string, or None)
|
||||
session_time: Session creation time (timezone-aware UTC)
|
||||
field_name: Name of the field (for logging)
|
||||
|
||||
Returns:
|
||||
Timezone-aware UTC datetime or None
|
||||
"""
|
||||
if field_value is None:
|
||||
return None
|
||||
|
||||
# Handle BASE_TS markers
|
||||
if isinstance(field_value, str) and field_value.startswith("BASE_TS"):
|
||||
try:
|
||||
return resolve_time_marker(field_value, session_time)
|
||||
except (ValueError, AttributeError) as e:
|
||||
logger.warning(
|
||||
"Failed to resolve BASE_TS marker",
|
||||
field_name=field_name,
|
||||
marker=field_value,
|
||||
error=str(e)
|
||||
)
|
||||
return None
|
||||
|
||||
# Handle ISO timestamps (legacy format - convert to absolute datetime)
|
||||
if isinstance(field_value, str) and ('T' in field_value or 'Z' in field_value):
|
||||
try:
|
||||
parsed_date = datetime.fromisoformat(field_value.replace('Z', '+00:00'))
|
||||
# Adjust relative to session time
|
||||
return adjust_date_for_demo(parsed_date, session_time)
|
||||
except (ValueError, AttributeError) as e:
|
||||
logger.warning(
|
||||
"Failed to parse ISO timestamp",
|
||||
field_name=field_name,
|
||||
value=field_value,
|
||||
error=str(e)
|
||||
)
|
||||
return None
|
||||
|
||||
logger.warning(
|
||||
"Unknown date format",
|
||||
field_name=field_name,
|
||||
value=field_value,
|
||||
value_type=type(field_value).__name__
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
def verify_internal_api_key(x_internal_api_key: Optional[str] = Header(None)):
|
||||
"""Verify internal API key for service-to-service communication"""
|
||||
if x_internal_api_key != settings.INTERNAL_API_KEY:
|
||||
@@ -141,12 +197,12 @@ async def clone_demo_data(
|
||||
|
||||
# Load Sales Data from seed data
|
||||
for sale_data in seed_data.get('sales_data', []):
|
||||
# Adjust date using the shared utility
|
||||
adjusted_date = adjust_date_for_demo(
|
||||
datetime.fromisoformat(sale_data['sale_date'].replace('Z', '+00:00')),
|
||||
# Parse date field (supports BASE_TS markers and ISO timestamps)
|
||||
adjusted_date = parse_date_field(
|
||||
sale_data.get('sale_date'),
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
) if sale_data.get('sale_date') else None
|
||||
"sale_date"
|
||||
)
|
||||
|
||||
# Create new sales record with adjusted date
|
||||
new_sale = SalesData(
|
||||
|
||||
@@ -18,6 +18,11 @@ from app.core.database import get_db
|
||||
from app.models.suppliers import Supplier
|
||||
from app.core.config import settings
|
||||
|
||||
# Import demo_dates utilities at the top level
|
||||
import sys
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent.parent.parent))
|
||||
from shared.utils.demo_dates import adjust_date_for_demo, resolve_time_marker
|
||||
|
||||
logger = structlog.get_logger()
|
||||
router = APIRouter()
|
||||
|
||||
@@ -25,6 +30,62 @@ router = APIRouter()
|
||||
DEMO_TENANT_PROFESSIONAL = "a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6"
|
||||
|
||||
|
||||
def parse_date_field(
|
||||
field_value: any,
|
||||
session_time: datetime,
|
||||
field_name: str = "date"
|
||||
) -> Optional[datetime]:
|
||||
"""
|
||||
Parse a date field from JSON, supporting BASE_TS markers and ISO timestamps.
|
||||
|
||||
Args:
|
||||
field_value: The date field value (can be BASE_TS marker, ISO string, or None)
|
||||
session_time: Session creation time (timezone-aware UTC)
|
||||
field_name: Name of the field (for logging)
|
||||
|
||||
Returns:
|
||||
Timezone-aware UTC datetime or None
|
||||
"""
|
||||
if field_value is None:
|
||||
return None
|
||||
|
||||
# Handle BASE_TS markers
|
||||
if isinstance(field_value, str) and field_value.startswith("BASE_TS"):
|
||||
try:
|
||||
return resolve_time_marker(field_value, session_time)
|
||||
except (ValueError, AttributeError) as e:
|
||||
logger.warning(
|
||||
"Failed to resolve BASE_TS marker",
|
||||
field_name=field_name,
|
||||
marker=field_value,
|
||||
error=str(e)
|
||||
)
|
||||
return None
|
||||
|
||||
# Handle ISO timestamps (legacy format - convert to absolute datetime)
|
||||
if isinstance(field_value, str) and ('T' in field_value or 'Z' in field_value):
|
||||
try:
|
||||
parsed_date = datetime.fromisoformat(field_value.replace('Z', '+00:00'))
|
||||
# Adjust relative to session time
|
||||
return adjust_date_for_demo(parsed_date, session_time)
|
||||
except (ValueError, AttributeError) as e:
|
||||
logger.warning(
|
||||
"Failed to parse ISO timestamp",
|
||||
field_name=field_name,
|
||||
value=field_value,
|
||||
error=str(e)
|
||||
)
|
||||
return None
|
||||
|
||||
logger.warning(
|
||||
"Unknown date format",
|
||||
field_name=field_name,
|
||||
value=field_value,
|
||||
value_type=type(field_value).__name__
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
def verify_internal_api_key(x_internal_api_key: Optional[str] = Header(None)):
|
||||
"""Verify internal API key for service-to-service communication"""
|
||||
if x_internal_api_key != settings.INTERNAL_API_KEY:
|
||||
@@ -138,22 +199,17 @@ async def clone_demo_data(
|
||||
detail=f"Invalid UUID format in supplier data: {str(e)}"
|
||||
)
|
||||
|
||||
# Adjust dates relative to session creation time
|
||||
from shared.utils.demo_dates import adjust_date_for_demo, BASE_REFERENCE_DATE
|
||||
adjusted_created_at = adjust_date_for_demo(
|
||||
datetime.fromisoformat(supplier_data['created_at'].replace('Z', '+00:00')),
|
||||
# Parse date fields (supports BASE_TS markers and ISO timestamps)
|
||||
adjusted_created_at = parse_date_field(
|
||||
supplier_data.get('created_at'),
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
"created_at"
|
||||
)
|
||||
# Handle optional updated_at field
|
||||
if 'updated_at' in supplier_data:
|
||||
adjusted_updated_at = adjust_date_for_demo(
|
||||
datetime.fromisoformat(supplier_data['updated_at'].replace('Z', '+00:00')),
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
)
|
||||
else:
|
||||
adjusted_updated_at = adjusted_created_at
|
||||
adjusted_updated_at = parse_date_field(
|
||||
supplier_data.get('updated_at'),
|
||||
session_time,
|
||||
"updated_at"
|
||||
) or adjusted_created_at # Fallback to created_at if not provided
|
||||
|
||||
# Map supplier_type to enum if it's a string
|
||||
from app.models.suppliers import SupplierType, SupplierStatus, PaymentTerms
|
||||
@@ -226,17 +282,17 @@ async def clone_demo_data(
|
||||
approved_pos_count=supplier_data.get('approved_pos_count', 0),
|
||||
on_time_delivery_rate=supplier_data.get('on_time_delivery_rate', 0.0),
|
||||
fulfillment_rate=supplier_data.get('fulfillment_rate', 0.0),
|
||||
last_performance_update=adjust_date_for_demo(
|
||||
datetime.fromisoformat(supplier_data['last_performance_update'].replace('Z', '+00:00')),
|
||||
last_performance_update=parse_date_field(
|
||||
supplier_data.get('last_performance_update'),
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
) if supplier_data.get('last_performance_update') else None,
|
||||
"last_performance_update"
|
||||
),
|
||||
approved_by=supplier_data.get('approved_by'),
|
||||
approved_at=adjust_date_for_demo(
|
||||
datetime.fromisoformat(supplier_data['approved_at'].replace('Z', '+00:00')),
|
||||
approved_at=parse_date_field(
|
||||
supplier_data.get('approved_at'),
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
) if supplier_data.get('approved_at') else None,
|
||||
"approved_at"
|
||||
),
|
||||
rejection_reason=supplier_data.get('rejection_reason'),
|
||||
notes=supplier_data.get('notes'),
|
||||
certifications=supplier_data.get('certifications'),
|
||||
@@ -320,7 +376,7 @@ async def delete_demo_tenant_data(
|
||||
Delete all demo data for a virtual tenant.
|
||||
This endpoint is idempotent - safe to call multiple times.
|
||||
"""
|
||||
start_time = datetime.now()
|
||||
start_time = datetime.now(timezone.utc)
|
||||
|
||||
records_deleted = {
|
||||
"suppliers": 0,
|
||||
@@ -351,7 +407,7 @@ async def delete_demo_tenant_data(
|
||||
"status": "deleted",
|
||||
"virtual_tenant_id": str(virtual_tenant_id),
|
||||
"records_deleted": records_deleted,
|
||||
"duration_ms": int((datetime.now() - start_time).total_seconds() * 1000)
|
||||
"duration_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
|
||||
@@ -17,7 +17,7 @@ from pathlib import Path
|
||||
from app.core.database import get_db
|
||||
from app.models.tenants import Tenant, Subscription, TenantMember
|
||||
from app.models.tenant_location import TenantLocation
|
||||
from shared.utils.demo_dates import adjust_date_for_demo, BASE_REFERENCE_DATE
|
||||
from shared.utils.demo_dates import adjust_date_for_demo, resolve_time_marker
|
||||
|
||||
from app.core.config import settings
|
||||
|
||||
@@ -28,6 +28,62 @@ router = APIRouter()
|
||||
DEMO_TENANT_PROFESSIONAL = "a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6"
|
||||
|
||||
|
||||
def parse_date_field(
|
||||
field_value: any,
|
||||
session_time: datetime,
|
||||
field_name: str = "date"
|
||||
) -> Optional[datetime]:
|
||||
"""
|
||||
Parse a date field from JSON, supporting BASE_TS markers and ISO timestamps.
|
||||
|
||||
Args:
|
||||
field_value: The date field value (can be BASE_TS marker, ISO string, or None)
|
||||
session_time: Session creation time (timezone-aware UTC)
|
||||
field_name: Name of the field (for logging)
|
||||
|
||||
Returns:
|
||||
Timezone-aware UTC datetime or None
|
||||
"""
|
||||
if field_value is None:
|
||||
return None
|
||||
|
||||
# Handle BASE_TS markers
|
||||
if isinstance(field_value, str) and field_value.startswith("BASE_TS"):
|
||||
try:
|
||||
return resolve_time_marker(field_value, session_time)
|
||||
except (ValueError, AttributeError) as e:
|
||||
logger.warning(
|
||||
"Failed to resolve BASE_TS marker",
|
||||
field_name=field_name,
|
||||
marker=field_value,
|
||||
error=str(e)
|
||||
)
|
||||
return None
|
||||
|
||||
# Handle ISO timestamps (legacy format - convert to absolute datetime)
|
||||
if isinstance(field_value, str) and ('T' in field_value or 'Z' in field_value):
|
||||
try:
|
||||
parsed_date = datetime.fromisoformat(field_value.replace('Z', '+00:00'))
|
||||
# Adjust relative to session time
|
||||
return adjust_date_for_demo(parsed_date, session_time)
|
||||
except (ValueError, AttributeError) as e:
|
||||
logger.warning(
|
||||
"Failed to parse ISO timestamp",
|
||||
field_name=field_name,
|
||||
value=field_value,
|
||||
error=str(e)
|
||||
)
|
||||
return None
|
||||
|
||||
logger.warning(
|
||||
"Unknown date format",
|
||||
field_name=field_name,
|
||||
value=field_value,
|
||||
value_type=type(field_value).__name__
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
def verify_internal_api_key(x_internal_api_key: Optional[str] = Header(None)):
|
||||
"""Verify internal API key for service-to-service communication"""
|
||||
if x_internal_api_key != settings.INTERNAL_API_KEY:
|
||||
@@ -141,16 +197,16 @@ async def clone_demo_data(
|
||||
max_locations=subscription_data.get('max_locations', 3),
|
||||
max_products=subscription_data.get('max_products', 500),
|
||||
features=subscription_data.get('features', {}),
|
||||
trial_ends_at=adjust_date_for_demo(
|
||||
datetime.fromisoformat(subscription_data['trial_ends_at'].replace('Z', '+00:00')),
|
||||
trial_ends_at=parse_date_field(
|
||||
subscription_data.get('trial_ends_at'),
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
) if subscription_data.get('trial_ends_at') else None,
|
||||
next_billing_date=adjust_date_for_demo(
|
||||
datetime.fromisoformat(subscription_data['next_billing_date'].replace('Z', '+00:00')),
|
||||
"trial_ends_at"
|
||||
),
|
||||
next_billing_date=parse_date_field(
|
||||
subscription_data.get('next_billing_date'),
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
) if subscription_data.get('next_billing_date') else None
|
||||
"next_billing_date"
|
||||
)
|
||||
)
|
||||
|
||||
db.add(subscription)
|
||||
|
||||
Reference in New Issue
Block a user