Fix Demo enterprise

This commit is contained in:
Urtzi Alfaro
2025-12-17 13:03:52 +01:00
parent 0bbfa010bf
commit 8bfe4f2dd7
111 changed files with 26200 additions and 2245 deletions

View File

@@ -95,31 +95,24 @@ async def clone_demo_data(
# Idempotency is handled by checking if each user email already exists below
# Load demo users from JSON seed file
try:
from shared.utils.seed_data_paths import get_seed_data_path
if demo_account_type == "professional":
json_file = get_seed_data_path("professional", "02-auth.json")
elif demo_account_type == "enterprise":
json_file = get_seed_data_path("enterprise", "02-auth.json")
else:
raise ValueError(f"Invalid demo account type: {demo_account_type}")
from shared.utils.seed_data_paths import get_seed_data_path
except ImportError:
# Fallback to original path
seed_data_dir = Path(__file__).parent.parent.parent.parent / "infrastructure" / "seed-data"
if demo_account_type == "professional":
json_file = seed_data_dir / "professional" / "02-auth.json"
elif demo_account_type == "enterprise":
json_file = seed_data_dir / "enterprise" / "parent" / "02-auth.json"
else:
raise ValueError(f"Invalid demo account type: {demo_account_type}")
if not json_file.exists():
raise HTTPException(
status_code=404,
detail=f"Seed data file not found: {json_file}"
)
if demo_account_type == "professional":
json_file = get_seed_data_path("professional", "02-auth.json")
elif demo_account_type == "enterprise":
json_file = get_seed_data_path("enterprise", "02-auth.json")
elif demo_account_type == "enterprise_child":
# Child locations don't have separate auth data - they share parent's users
logger.info("enterprise_child uses parent tenant auth, skipping user cloning", virtual_tenant_id=virtual_tenant_id)
return {
"service": "auth",
"status": "completed",
"records_cloned": 0,
"duration_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000),
"details": {"users": 0, "note": "Child locations share parent auth"}
}
else:
raise ValueError(f"Invalid demo account type: {demo_account_type}")
# Load JSON data
import json

View File

@@ -117,6 +117,12 @@ class CloneOrchestrator:
required=False, # Optional - provides orchestration history
timeout=15.0
),
ServiceDefinition(
name="distribution",
url=os.getenv("DISTRIBUTION_SERVICE_URL", "http://distribution-service:8000"),
required=False, # Optional - provides distribution routes and shipments
timeout=20.0
),
]
async def _update_progress_in_redis(

View File

@@ -0,0 +1,418 @@
"""
Internal Demo Cloning API for Distribution Service
Service-to-service endpoint for cloning distribution data
"""
from fastapi import APIRouter, Depends, HTTPException, Header
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select, delete, func
import structlog
import uuid
from datetime import datetime, timezone, timedelta
from typing import Optional
import os
import json
from pathlib import Path
from app.core.database import get_db
from app.models.distribution import DeliveryRoute, Shipment
from shared.utils.demo_dates import adjust_date_for_demo, resolve_time_marker
from app.core.config import settings
logger = structlog.get_logger()
router = APIRouter(prefix="/internal/demo", tags=["internal"])
def verify_internal_api_key(x_internal_api_key: Optional[str] = Header(None)):
"""Verify internal API key for service-to-service communication"""
if x_internal_api_key != settings.INTERNAL_API_KEY:
logger.warning("Unauthorized internal API access attempted")
raise HTTPException(status_code=403, detail="Invalid internal API key")
return True
def parse_date_field(date_value, session_time: datetime, field_name: str = "date") -> Optional[datetime]:
"""
Parse date field, handling both ISO strings and BASE_TS markers.
Supports:
- BASE_TS markers: "BASE_TS + 1h30m", "BASE_TS - 2d"
- ISO 8601 strings: "2025-01-15T06:00:00Z"
- None values (returns None)
Returns timezone-aware datetime or None.
"""
if not date_value:
return None
# Check if it's a BASE_TS marker
if isinstance(date_value, str) and date_value.startswith("BASE_TS"):
try:
return resolve_time_marker(date_value, session_time)
except ValueError as e:
logger.warning(
f"Invalid BASE_TS marker in {field_name}",
marker=date_value,
error=str(e)
)
return None
# Handle regular ISO date strings
try:
if isinstance(date_value, str):
original_date = datetime.fromisoformat(date_value.replace('Z', '+00:00'))
elif hasattr(date_value, 'isoformat'):
original_date = date_value
else:
logger.warning(f"Unsupported date format in {field_name}", date_value=date_value)
return None
return adjust_date_for_demo(original_date, session_time)
except (ValueError, AttributeError) as e:
logger.warning(
f"Invalid date format in {field_name}",
date_value=date_value,
error=str(e)
)
return None
@router.post("/clone")
async def clone_demo_data(
base_tenant_id: str,
virtual_tenant_id: str,
demo_account_type: str,
session_id: Optional[str] = None,
session_created_at: Optional[str] = None,
db: AsyncSession = Depends(get_db),
_: bool = Depends(verify_internal_api_key)
):
"""
Clone distribution service data for a virtual demo tenant
Clones:
- Delivery routes
- Shipments
- Adjusts dates to recent timeframe
Args:
base_tenant_id: Template tenant UUID to clone from
virtual_tenant_id: Target virtual tenant UUID
demo_account_type: Type of demo account
session_id: Originating session ID for tracing
Returns:
Cloning status and record counts
"""
start_time = datetime.now(timezone.utc)
# Parse session creation time for date adjustment
if session_created_at:
try:
session_time = datetime.fromisoformat(session_created_at.replace('Z', '+00:00'))
except (ValueError, AttributeError):
session_time = start_time
else:
session_time = start_time
logger.info(
"Starting distribution data cloning",
base_tenant_id=base_tenant_id,
virtual_tenant_id=virtual_tenant_id,
demo_account_type=demo_account_type,
session_id=session_id,
session_created_at=session_created_at
)
try:
# Validate UUIDs
base_uuid = uuid.UUID(base_tenant_id)
virtual_uuid = uuid.UUID(virtual_tenant_id)
# Track cloning statistics
stats = {
"delivery_routes": 0,
"shipments": 0,
"alerts_generated": 0
}
# Load seed data from JSON files
from shared.utils.seed_data_paths import get_seed_data_path
if demo_account_type == "professional":
json_file = get_seed_data_path("professional", "12-distribution.json")
elif demo_account_type == "enterprise":
json_file = get_seed_data_path("enterprise", "12-distribution.json")
elif demo_account_type == "enterprise_child":
# Child outlets don't have their own distribution data
# Distribution is managed centrally by the parent tenant
# Child locations are delivery destinations, not distribution hubs
logger.info(
"Skipping distribution cloning for child outlet - distribution managed by parent",
base_tenant_id=base_tenant_id,
virtual_tenant_id=virtual_tenant_id,
session_id=session_id
)
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
return {
"service": "distribution",
"status": "completed",
"records_cloned": 0,
"duration_ms": duration_ms,
"details": {
"note": "Child outlets don't manage distribution - handled by parent tenant"
}
}
else:
raise ValueError(f"Invalid demo account type: {demo_account_type}")
# Load JSON data
with open(json_file, 'r', encoding='utf-8') as f:
seed_data = json.load(f)
logger.info(
"Loaded distribution seed data",
delivery_routes=len(seed_data.get('delivery_routes', [])),
shipments=len(seed_data.get('shipments', []))
)
# Clone Delivery Routes
for route_data in seed_data.get('delivery_routes', []):
# Transform IDs using XOR
from shared.utils.demo_id_transformer import transform_id
try:
route_uuid = uuid.UUID(route_data['id'])
transformed_id = transform_id(route_data['id'], virtual_uuid)
except ValueError as e:
logger.error("Failed to parse route UUID",
route_id=route_data['id'],
error=str(e))
continue
# Parse date fields
route_date = parse_date_field(
route_data.get('route_date'),
session_time,
"route_date"
) or session_time
# Parse route sequence dates
parsed_sequence = []
for stop in route_data.get('route_sequence', []):
estimated_arrival = parse_date_field(
stop.get('estimated_arrival'),
session_time,
"estimated_arrival"
)
actual_arrival = parse_date_field(
stop.get('actual_arrival'),
session_time,
"actual_arrival"
)
parsed_sequence.append({
**stop,
"estimated_arrival": estimated_arrival.isoformat() if estimated_arrival else None,
"actual_arrival": actual_arrival.isoformat() if actual_arrival else None
})
# Create new delivery route
new_route = DeliveryRoute(
id=transformed_id,
tenant_id=virtual_uuid,
route_number=route_data.get('route_number'),
route_date=route_date,
vehicle_id=route_data.get('vehicle_id'),
driver_id=route_data.get('driver_id'),
total_distance_km=route_data.get('total_distance_km'),
estimated_duration_minutes=route_data.get('estimated_duration_minutes'),
route_sequence=parsed_sequence,
notes=route_data.get('notes'),
status=route_data.get('status', 'planned'),
created_at=session_time,
updated_at=session_time,
created_by=base_uuid,
updated_by=base_uuid
)
db.add(new_route)
stats["delivery_routes"] += 1
# Clone Shipments
for shipment_data in seed_data.get('shipments', []):
# Transform IDs using XOR
from shared.utils.demo_id_transformer import transform_id
try:
shipment_uuid = uuid.UUID(shipment_data['id'])
transformed_id = transform_id(shipment_data['id'], virtual_uuid)
except ValueError as e:
logger.error("Failed to parse shipment UUID",
shipment_id=shipment_data['id'],
error=str(e))
continue
# Parse date fields
shipment_date = parse_date_field(
shipment_data.get('shipment_date'),
session_time,
"shipment_date"
) or session_time
# Note: The Shipment model doesn't have estimated_delivery_time
# Only actual_delivery_time is stored
actual_delivery_time = parse_date_field(
shipment_data.get('actual_delivery_time'),
session_time,
"actual_delivery_time"
)
# Transform purchase_order_id if present (links to internal transfer PO)
purchase_order_id = None
if shipment_data.get('purchase_order_id'):
try:
po_uuid = uuid.UUID(shipment_data['purchase_order_id'])
purchase_order_id = transform_id(shipment_data['purchase_order_id'], virtual_uuid)
except ValueError:
logger.warning(
"Invalid purchase_order_id format",
purchase_order_id=shipment_data.get('purchase_order_id')
)
# Transform delivery_route_id (CRITICAL: must reference transformed route)
delivery_route_id = None
if shipment_data.get('delivery_route_id'):
try:
route_uuid = uuid.UUID(shipment_data['delivery_route_id'])
delivery_route_id = transform_id(shipment_data['delivery_route_id'], virtual_uuid)
except ValueError:
logger.warning(
"Invalid delivery_route_id format",
delivery_route_id=shipment_data.get('delivery_route_id')
)
# Store items in delivery_notes as JSON for demo purposes
# (In production, items are in the linked purchase order)
items_json = json.dumps(shipment_data.get('items', [])) if shipment_data.get('items') else None
# Create new shipment
new_shipment = Shipment(
id=transformed_id,
tenant_id=virtual_uuid,
parent_tenant_id=virtual_uuid, # Parent is the same as tenant for demo
child_tenant_id=shipment_data.get('child_tenant_id'),
purchase_order_id=purchase_order_id, # Link to internal transfer PO
delivery_route_id=delivery_route_id, # MUST use transformed ID
shipment_number=shipment_data.get('shipment_number'),
shipment_date=shipment_date,
status=shipment_data.get('status', 'pending'),
total_weight_kg=shipment_data.get('total_weight_kg'),
actual_delivery_time=actual_delivery_time,
# Store items info in delivery_notes for demo display
delivery_notes=f"{shipment_data.get('notes', '')}\nItems: {items_json}" if items_json else shipment_data.get('notes'),
created_at=session_time,
updated_at=session_time,
created_by=base_uuid,
updated_by=base_uuid
)
db.add(new_shipment)
stats["shipments"] += 1
# Commit cloned data
await db.commit()
total_records = stats["delivery_routes"] + stats["shipments"]
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
logger.info(
"Distribution data cloning completed",
virtual_tenant_id=virtual_tenant_id,
total_records=total_records,
stats=stats,
duration_ms=duration_ms
)
return {
"service": "distribution",
"status": "completed",
"records_cloned": total_records,
"duration_ms": duration_ms,
"details": stats
}
except ValueError as e:
logger.error("Invalid UUID format", error=str(e))
raise HTTPException(status_code=400, detail=f"Invalid UUID: {str(e)}")
except Exception as e:
logger.error(
"Failed to clone distribution data",
error=str(e),
virtual_tenant_id=virtual_tenant_id,
exc_info=True
)
# Rollback on error
await db.rollback()
return {
"service": "distribution",
"status": "failed",
"records_cloned": 0,
"duration_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000),
"error": str(e)
}
@router.get("/clone/health")
async def clone_health_check(_: bool = Depends(verify_internal_api_key)):
"""
Health check for internal cloning endpoint
Used by orchestrator to verify service availability
"""
return {
"service": "distribution",
"clone_endpoint": "available",
"version": "1.0.0"
}
@router.delete("/tenant/{virtual_tenant_id}")
async def delete_demo_data(
virtual_tenant_id: str,
db: AsyncSession = Depends(get_db),
_: bool = Depends(verify_internal_api_key)
):
"""Delete all distribution data for a virtual demo tenant"""
logger.info("Deleting distribution data for virtual tenant", virtual_tenant_id=virtual_tenant_id)
start_time = datetime.now(timezone.utc)
try:
virtual_uuid = uuid.UUID(virtual_tenant_id)
# Count records
route_count = await db.scalar(select(func.count(DeliveryRoute.id)).where(DeliveryRoute.tenant_id == virtual_uuid))
shipment_count = await db.scalar(select(func.count(Shipment.id)).where(Shipment.tenant_id == virtual_uuid))
# Delete in order
await db.execute(delete(Shipment).where(Shipment.tenant_id == virtual_uuid))
await db.execute(delete(DeliveryRoute).where(DeliveryRoute.tenant_id == virtual_uuid))
await db.commit()
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
logger.info("Distribution data deleted successfully", virtual_tenant_id=virtual_tenant_id, duration_ms=duration_ms)
return {
"service": "distribution",
"status": "deleted",
"virtual_tenant_id": virtual_tenant_id,
"records_deleted": {
"delivery_routes": route_count,
"shipments": shipment_count,
"total": route_count + shipment_count
},
"duration_ms": duration_ms
}
except Exception as e:
logger.error("Failed to delete distribution data", error=str(e), exc_info=True)
await db.rollback()
raise HTTPException(status_code=500, detail=str(e))

View File

@@ -8,7 +8,7 @@ from app.core.config import settings
from app.core.database import database_manager
from app.api.routes import router as distribution_router
from app.api.shipments import router as shipments_router
# from app.api.internal_demo import router as internal_demo_router # REMOVED: Replaced by script-based seed data loading
from app.api.internal_demo import router as internal_demo_router
from shared.service_base import StandardFastAPIService
@@ -122,4 +122,4 @@ service.setup_standard_endpoints()
# Note: Routes now use RouteBuilder which includes full paths, so no prefix needed
service.add_router(distribution_router, tags=["distribution"])
service.add_router(shipments_router, tags=["shipments"])
# service.add_router(internal_demo_router, tags=["internal-demo"]) # REMOVED: Replaced by script-based seed data loading
service.add_router(internal_demo_router, tags=["internal-demo"])

View File

@@ -157,12 +157,6 @@ async def clone_demo_data_internal(
else:
session_created_at_parsed = datetime.now(timezone.utc)
# Determine profile based on demo_account_type
if demo_account_type == "enterprise":
profile = "enterprise"
else:
profile = "professional"
logger.info(
"Starting inventory data cloning with date adjustment",
base_tenant_id=base_tenant_id,
@@ -172,32 +166,17 @@ async def clone_demo_data_internal(
session_time=session_created_at_parsed.isoformat()
)
# Load seed data using shared utility
try:
from shared.utils.seed_data_paths import get_seed_data_path
if profile == "professional":
json_file = get_seed_data_path("professional", "03-inventory.json")
elif profile == "enterprise":
json_file = get_seed_data_path("enterprise", "03-inventory.json")
else:
raise ValueError(f"Invalid profile: {profile}")
# Load seed data from JSON files
from shared.utils.seed_data_paths import get_seed_data_path
except ImportError:
# Fallback to original path
seed_data_dir = Path(__file__).parent.parent.parent.parent / "infrastructure" / "seed-data"
if profile == "professional":
json_file = seed_data_dir / "professional" / "03-inventory.json"
elif profile == "enterprise":
json_file = seed_data_dir / "enterprise" / "parent" / "03-inventory.json"
else:
raise ValueError(f"Invalid profile: {profile}")
if not json_file.exists():
raise HTTPException(
status_code=404,
detail=f"Seed data file not found: {json_file}"
)
if demo_account_type == "professional":
json_file = get_seed_data_path("professional", "03-inventory.json")
elif demo_account_type == "enterprise":
json_file = get_seed_data_path("enterprise", "03-inventory.json")
elif demo_account_type == "enterprise_child":
json_file = get_seed_data_path("enterprise", "03-inventory.json", child_id=base_tenant_id)
else:
raise ValueError(f"Invalid demo account type: {demo_account_type}")
# Load JSON data
with open(json_file, 'r', encoding='utf-8') as f:
@@ -223,7 +202,7 @@ async def clone_demo_data_internal(
# Transform and insert data
records_cloned = 0
# Clone ingredients
for ingredient_data in seed_data.get('ingredients', []):
# Transform ID
@@ -241,7 +220,7 @@ async def clone_demo_data_internal(
status_code=400,
detail=f"Invalid UUID format in ingredient data: {str(e)}"
)
# Transform dates using standardized helper
ingredient_data['created_at'] = parse_date_field(
ingredient_data.get('created_at'), session_time, 'created_at'
@@ -249,7 +228,7 @@ async def clone_demo_data_internal(
ingredient_data['updated_at'] = parse_date_field(
ingredient_data.get('updated_at'), session_time, 'updated_at'
) or session_time
# Map category field to ingredient_category enum
if 'category' in ingredient_data:
category_value = ingredient_data.pop('category')
@@ -260,7 +239,7 @@ async def clone_demo_data_internal(
except KeyError:
# If category not found in enum, use OTHER
ingredient_data['ingredient_category'] = IngredientCategory.OTHER
# Map unit_of_measure string to enum
if 'unit_of_measure' in ingredient_data:
from app.models.inventory import UnitOfMeasure
@@ -297,14 +276,14 @@ async def clone_demo_data_internal(
ingredient_data['unit_of_measure'] = UnitOfMeasure.UNITS
logger.warning("Unknown unit_of_measure, defaulting to UNITS",
original_unit=unit_str)
# Note: All seed data fields now match the model schema exactly
# No field filtering needed
# Remove original id and tenant_id from ingredient_data to avoid conflict
ingredient_data.pop('id', None)
ingredient_data.pop('tenant_id', None)
# Create ingredient
ingredient = Ingredient(
id=str(transformed_id),
@@ -314,6 +293,9 @@ async def clone_demo_data_internal(
db.add(ingredient)
records_cloned += 1
# Commit ingredients before creating stock to ensure foreign key references exist
await db.flush() # Use flush instead of commit to maintain transaction while continuing
# Clone stock batches
for stock_data in seed_data.get('stock', []):
# Transform ID - handle both UUID and string IDs

View File

@@ -62,7 +62,8 @@ async def load_fixture_data_for_tenant(
db: AsyncSession,
tenant_uuid: UUID,
demo_account_type: str,
reference_time: datetime
reference_time: datetime,
base_tenant_id: Optional[str] = None
) -> int:
"""
Load orchestration run data from JSON fixture directly into the virtual tenant.
@@ -72,16 +73,10 @@ async def load_fixture_data_for_tenant(
from shared.utils.demo_dates import resolve_time_marker, adjust_date_for_demo
# Load fixture data
try:
if demo_account_type == "enterprise_child" and base_tenant_id:
json_file = get_seed_data_path("enterprise", "11-orchestrator.json", child_id=base_tenant_id)
else:
json_file = get_seed_data_path(demo_account_type, "11-orchestrator.json")
except ImportError:
# Fallback to original path
seed_data_dir = Path(__file__).parent.parent.parent.parent / "shared" / "demo" / "fixtures"
json_file = seed_data_dir / demo_account_type / "11-orchestrator.json"
if not json_file.exists():
logger.warning("Orchestrator fixture file not found", file=str(json_file))
return 0
with open(json_file, 'r', encoding='utf-8') as f:
fixture_data = json.load(f)
@@ -206,7 +201,8 @@ async def clone_demo_data(
db,
virtual_uuid,
demo_account_type,
reference_time
reference_time,
base_tenant_id
)
await db.commit()

View File

@@ -161,6 +161,8 @@ async def clone_demo_data(
json_file = get_seed_data_path("professional", "08-orders.json")
elif demo_account_type == "enterprise":
json_file = get_seed_data_path("enterprise", "08-orders.json")
elif demo_account_type == "enterprise_child":
json_file = get_seed_data_path("enterprise", "08-orders.json", child_id=base_tenant_id)
else:
raise ValueError(f"Invalid demo account type: {demo_account_type}")
@@ -171,6 +173,8 @@ async def clone_demo_data(
json_file = seed_data_dir / "professional" / "08-orders.json"
elif demo_account_type == "enterprise":
json_file = seed_data_dir / "enterprise" / "parent" / "08-orders.json"
elif demo_account_type == "enterprise_child":
json_file = seed_data_dir / "enterprise" / "children" / base_tenant_id / "08-orders.json"
else:
raise ValueError(f"Invalid demo account type: {demo_account_type}")

View File

@@ -292,31 +292,16 @@ async def clone_demo_data(
return None
# Load seed data from JSON files
try:
from shared.utils.seed_data_paths import get_seed_data_path
if demo_account_type == "professional":
json_file = get_seed_data_path("professional", "07-procurement.json")
elif demo_account_type == "enterprise":
json_file = get_seed_data_path("enterprise", "07-procurement.json")
else:
raise ValueError(f"Invalid demo account type: {demo_account_type}")
from shared.utils.seed_data_paths import get_seed_data_path
except ImportError:
# Fallback to original path
seed_data_dir = Path(__file__).parent.parent.parent.parent / "infrastructure" / "seed-data"
if demo_account_type == "professional":
json_file = seed_data_dir / "professional" / "07-procurement.json"
elif demo_account_type == "enterprise":
json_file = seed_data_dir / "enterprise" / "parent" / "07-procurement.json"
else:
raise ValueError(f"Invalid demo account type: {demo_account_type}")
if not json_file.exists():
raise HTTPException(
status_code=404,
detail=f"Seed data file not found: {json_file}"
)
if demo_account_type == "professional":
json_file = get_seed_data_path("professional", "07-procurement.json")
elif demo_account_type == "enterprise":
json_file = get_seed_data_path("enterprise", "07-procurement.json")
elif demo_account_type == "enterprise_child":
json_file = get_seed_data_path("enterprise", "07-procurement.json", child_id=base_tenant_id)
else:
raise ValueError(f"Invalid demo account type: {demo_account_type}")
# Load JSON data
with open(json_file, 'r', encoding='utf-8') as f:

View File

@@ -141,31 +141,16 @@ async def clone_demo_data(
return None
# Load seed data from JSON files
try:
from shared.utils.seed_data_paths import get_seed_data_path
if demo_account_type == "professional":
json_file = get_seed_data_path("professional", "06-production.json")
elif demo_account_type == "enterprise":
json_file = get_seed_data_path("enterprise", "06-production.json")
else:
raise ValueError(f"Invalid demo account type: {demo_account_type}")
from shared.utils.seed_data_paths import get_seed_data_path
except ImportError:
# Fallback to original path
seed_data_dir = Path(__file__).parent.parent.parent.parent / "infrastructure" / "seed-data"
if demo_account_type == "professional":
json_file = seed_data_dir / "professional" / "06-production.json"
elif demo_account_type == "enterprise":
json_file = seed_data_dir / "enterprise" / "parent" / "06-production.json"
else:
raise ValueError(f"Invalid demo account type: {demo_account_type}")
if not json_file.exists():
raise HTTPException(
status_code=404,
detail=f"Seed data file not found: {json_file}"
)
if demo_account_type == "professional":
json_file = get_seed_data_path("professional", "06-production.json")
elif demo_account_type == "enterprise":
json_file = get_seed_data_path("enterprise", "06-production.json")
elif demo_account_type == "enterprise_child":
json_file = get_seed_data_path("enterprise", "06-production.json", child_id=base_tenant_id)
else:
raise ValueError(f"Invalid demo account type: {demo_account_type}")
# Load JSON data
with open(json_file, 'r', encoding='utf-8') as f:

View File

@@ -152,31 +152,16 @@ async def clone_demo_data(
)
# Load seed data from JSON files
try:
from shared.utils.seed_data_paths import get_seed_data_path
if demo_account_type == "professional":
json_file = get_seed_data_path("professional", "04-recipes.json")
elif demo_account_type == "enterprise":
json_file = get_seed_data_path("enterprise", "04-recipes.json")
else:
raise ValueError(f"Invalid demo account type: {demo_account_type}")
from shared.utils.seed_data_paths import get_seed_data_path
except ImportError:
# Fallback to original path
seed_data_dir = Path(__file__).parent.parent.parent.parent / "infrastructure" / "seed-data"
if demo_account_type == "professional":
json_file = seed_data_dir / "professional" / "04-recipes.json"
elif demo_account_type == "enterprise":
json_file = seed_data_dir / "enterprise" / "parent" / "04-recipes.json"
else:
raise ValueError(f"Invalid demo account type: {demo_account_type}")
if not json_file.exists():
raise HTTPException(
status_code=404,
detail=f"Seed data file not found: {json_file}"
)
if demo_account_type == "professional":
json_file = get_seed_data_path("professional", "04-recipes.json")
elif demo_account_type == "enterprise":
json_file = get_seed_data_path("enterprise", "04-recipes.json")
elif demo_account_type == "enterprise_child":
json_file = get_seed_data_path("enterprise", "04-recipes.json", child_id=base_tenant_id)
else:
raise ValueError(f"Invalid demo account type: {demo_account_type}")
# Load JSON data
with open(json_file, 'r', encoding='utf-8') as f:

View File

@@ -159,32 +159,17 @@ async def clone_demo_data(
"sales_records": 0,
}
# Load seed data from JSON files instead of cloning from database
try:
from shared.utils.seed_data_paths import get_seed_data_path
if demo_account_type == "professional":
json_file = get_seed_data_path("professional", "09-sales.json")
elif demo_account_type == "enterprise":
json_file = get_seed_data_path("enterprise", "09-sales.json")
else:
raise ValueError(f"Invalid demo account type: {demo_account_type}")
# Load seed data from JSON files
from shared.utils.seed_data_paths import get_seed_data_path
except ImportError:
# Fallback to original path
seed_data_dir = Path(__file__).parent.parent.parent.parent / "infrastructure" / "seed-data"
if demo_account_type == "professional":
json_file = seed_data_dir / "professional" / "09-sales.json"
elif demo_account_type == "enterprise":
json_file = seed_data_dir / "enterprise" / "parent" / "09-sales.json"
else:
raise ValueError(f"Invalid demo account type: {demo_account_type}")
if not json_file.exists():
raise HTTPException(
status_code=404,
detail=f"Seed data file not found: {json_file}"
)
if demo_account_type == "professional":
json_file = get_seed_data_path("professional", "09-sales.json")
elif demo_account_type == "enterprise":
json_file = get_seed_data_path("enterprise", "09-sales.json")
elif demo_account_type == "enterprise_child":
json_file = get_seed_data_path("enterprise", "09-sales.json", child_id=base_tenant_id)
else:
raise ValueError(f"Invalid demo account type: {demo_account_type}")
# Load JSON data
with open(json_file, 'r', encoding='utf-8') as f:
@@ -198,25 +183,36 @@ async def clone_demo_data(
# Load Sales Data from seed data
for sale_data in seed_data.get('sales_data', []):
# Parse date field (supports BASE_TS markers and ISO timestamps)
# Different demo types may use different field names for the date
# Prioritize in order: date, sale_date, sales_date
date_value = (sale_data.get('date') or
sale_data.get('sale_date') or
sale_data.get('sales_date'))
adjusted_date = parse_date_field(
sale_data.get('sales_date'),
date_value,
session_time,
"sales_date"
"date"
)
# Ensure date is not None for NOT NULL constraint by using session_time as fallback
if adjusted_date is None:
adjusted_date = session_time
# Create new sales record with adjusted date
# Map different possible JSON field names to the correct model field names
new_sale = SalesData(
id=uuid.uuid4(),
tenant_id=virtual_uuid,
date=adjusted_date,
inventory_product_id=sale_data.get('product_id'), # Use product_id from seed data
quantity_sold=sale_data.get('quantity', 0.0), # Map quantity to quantity_sold
unit_price=sale_data.get('unit_price', 0.0),
revenue=sale_data.get('total_amount', 0.0), # Map total_amount to revenue
cost_of_goods=sale_data.get('cost_of_goods', 0.0),
discount_applied=sale_data.get('discount_applied', 0.0),
inventory_product_id=sale_data.get('inventory_product_id') or sale_data.get('product_id'), # inventory_product_id is the model field
quantity_sold=sale_data.get('quantity_sold') or sale_data.get('quantity', 0.0), # quantity_sold is the model field
unit_price=sale_data.get('unit_price', 0.0), # unit_price is the model field
revenue=sale_data.get('revenue') or sale_data.get('total_revenue') or sale_data.get('total_amount', 0.0), # revenue is the model field
cost_of_goods=sale_data.get('cost_of_goods', 0.0), # cost_of_goods is the model field
discount_applied=sale_data.get('discount_applied', 0.0), # discount_applied is the model field
location_id=sale_data.get('location_id'),
sales_channel=sale_data.get('sales_channel', 'IN_STORE'),
sales_channel=sale_data.get('sales_channel', 'IN_STORE'), # sales_channel is the model field
source="demo_clone", # Mark as seeded
is_validated=sale_data.get('is_validated', True),
validation_notes=sale_data.get('validation_notes'),

View File

@@ -148,31 +148,16 @@ async def clone_demo_data(
)
# Load seed data from JSON files
try:
from shared.utils.seed_data_paths import get_seed_data_path
if demo_account_type == "professional":
json_file = get_seed_data_path("professional", "05-suppliers.json")
elif demo_account_type == "enterprise":
json_file = get_seed_data_path("enterprise", "05-suppliers.json")
else:
raise ValueError(f"Invalid demo account type: {demo_account_type}")
from shared.utils.seed_data_paths import get_seed_data_path
except ImportError:
# Fallback to original path
seed_data_dir = Path(__file__).parent.parent.parent.parent / "infrastructure" / "seed-data"
if demo_account_type == "professional":
json_file = seed_data_dir / "professional" / "05-suppliers.json"
elif demo_account_type == "enterprise":
json_file = seed_data_dir / "enterprise" / "parent" / "05-suppliers.json"
else:
raise ValueError(f"Invalid demo account type: {demo_account_type}")
if not json_file.exists():
raise HTTPException(
status_code=404,
detail=f"Seed data file not found: {json_file}"
)
if demo_account_type == "professional":
json_file = get_seed_data_path("professional", "05-suppliers.json")
elif demo_account_type == "enterprise":
json_file = get_seed_data_path("enterprise", "05-suppliers.json")
elif demo_account_type == "enterprise_child":
json_file = get_seed_data_path("enterprise", "05-suppliers.json", child_id=base_tenant_id)
else:
raise ValueError(f"Invalid demo account type: {demo_account_type}")
# Load JSON data
with open(json_file, 'r', encoding='utf-8') as f:

View File

@@ -533,7 +533,7 @@ async def clone_demo_data(
}
@router.post("/create-child")
@router.post("/internal/demo/create-child")
async def create_child_outlet(
request: dict,
db: AsyncSession = Depends(get_db),
@@ -596,6 +596,23 @@ async def create_child_outlet(
}
}
# Get parent tenant to retrieve the correct owner_id
parent_result = await db.execute(select(Tenant).where(Tenant.id == parent_uuid))
parent_tenant = parent_result.scalars().first()
if not parent_tenant:
logger.error("Parent tenant not found", parent_tenant_id=parent_tenant_id)
return {
"service": "tenant",
"status": "failed",
"records_created": 0,
"duration_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000),
"error": f"Parent tenant {parent_tenant_id} not found"
}
# Use the parent's owner_id for the child tenant (enterprise demo owner)
parent_owner_id = parent_tenant.owner_id
# Create child tenant with parent relationship
child_tenant = Tenant(
id=virtual_uuid,
@@ -615,9 +632,9 @@ async def create_child_outlet(
tenant_type="child",
hierarchy_path=f"{str(parent_uuid)}.{str(virtual_uuid)}",
# Owner ID - using demo owner ID from parent
# In real implementation, this would be the same owner as the parent tenant
owner_id=uuid.UUID("c1a2b3c4-d5e6-47a8-b9c0-d1e2f3a4b5c6") # Demo owner ID
# Owner ID - MUST match the parent tenant owner (enterprise demo owner)
# This ensures the parent owner can see and access child tenants
owner_id=parent_owner_id
)
db.add(child_tenant)
@@ -685,17 +702,17 @@ async def create_child_outlet(
# Create basic tenant members like parent
import json
# Demo owner is the same as central_baker/enterprise_chain owner (not individual_bakery)
demo_owner_uuid = uuid.UUID("d2e3f4a5-b6c7-48d9-e0f1-a2b3c4d5e6f7")
# Use the parent's owner_id (already retrieved above)
# This ensures consistency between tenant.owner_id and TenantMember records
# Create tenant member for owner
child_owner_member = TenantMember(
tenant_id=virtual_uuid,
user_id=demo_owner_uuid,
user_id=parent_owner_id,
role="owner",
permissions=json.dumps(["read", "write", "admin", "delete"]),
is_active=True,
invited_by=demo_owner_uuid,
invited_by=parent_owner_id,
invited_at=datetime.now(timezone.utc),
joined_at=datetime.now(timezone.utc),
created_at=datetime.now(timezone.utc)
@@ -744,6 +761,7 @@ async def create_child_outlet(
virtual_tenant_id=str(virtual_tenant_id),
parent_tenant_id=str(parent_tenant_id),
child_name=child_name,
owner_id=str(parent_owner_id),
duration_ms=duration_ms
)