Files
bakery-ia/tests/test_onboarding_flow.sh

1084 lines
38 KiB
Bash
Raw Normal View History

2025-07-26 23:29:57 +02:00
#!/bin/bash
# =================================================================
2025-07-27 10:01:37 +02:00
# IMPROVED ONBOARDING FLOW SIMULATION TEST SCRIPT
2025-07-26 23:29:57 +02:00
# =================================================================
2025-07-27 10:01:37 +02:00
# This script simulates the complete onboarding process using the
# real CSV data and proper import/validate endpoints
2025-07-26 23:29:57 +02:00
# Configuration
API_BASE="http://localhost:8000"
TEST_EMAIL="onboarding.test.$(date +%s)@bakery.com"
TEST_PASSWORD="TestPassword123!"
TEST_NAME="Test Bakery Owner"
2025-07-27 10:01:37 +02:00
REAL_CSV_FILE="bakery_sales_2023_2024.csv"
2025-07-31 16:03:30 +02:00
WS_BASE="ws://localhost:8002/api/v1/ws"
2025-08-01 17:55:14 +02:00
WS_TEST_DURATION=2000 # seconds to listen for WebSocket messages
2025-07-31 16:03:30 +02:00
WS_PID=""
2025-07-26 23:29:57 +02:00
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
PURPLE='\033[0;35m'
CYAN='\033[0;36m'
NC='\033[0m' # No Color
# Icons for steps
STEP_ICONS=("👤" "🏪" "📊" "🤖" "🎉")
2025-07-27 10:01:37 +02:00
echo -e "${CYAN}🧪 IMPROVED ONBOARDING FLOW SIMULATION TEST${NC}"
echo -e "${CYAN}==============================================${NC}"
2025-07-26 23:29:57 +02:00
echo "Testing complete user journey through onboarding process"
2025-07-29 09:40:01 +02:00
echo "Using full CSV dataset: $REAL_CSV_FILE"
2025-07-26 23:29:57 +02:00
echo "Test User: $TEST_EMAIL"
echo ""
# Utility functions
log_step() {
echo -e "${BLUE}📋 $1${NC}"
}
log_success() {
echo -e "${GREEN}$1${NC}"
}
log_error() {
echo -e "${RED}$1${NC}"
}
log_warning() {
echo -e "${YELLOW}⚠️ $1${NC}"
}
check_response() {
local response="$1"
local step_name="$2"
# Check for common error patterns
if echo "$response" | grep -q '"detail"' && echo "$response" | grep -q '"error"'; then
log_error "$step_name FAILED"
echo "Error details: $response"
return 1
elif echo "$response" | grep -q '500 Internal Server Error'; then
log_error "$step_name FAILED - Server Error"
echo "Response: $response"
return 1
elif echo "$response" | grep -q '"status".*"error"'; then
log_error "$step_name FAILED"
echo "Response: $response"
return 1
2025-07-27 10:01:37 +02:00
elif echo "$response" | grep -q '"detail".*\['; then
# This catches Pydantic validation errors (array of error objects)
log_error "$step_name FAILED - Validation Error"
echo "Response: $response"
return 1
2025-07-26 23:29:57 +02:00
else
log_success "$step_name PASSED"
return 0
fi
}
2025-07-27 10:01:37 +02:00
# New function specifically for validation responses
check_validation_response() {
local response="$1"
local http_code="$2"
local step_name="$3"
# Check HTTP status first
if [ "$http_code" != "200" ]; then
log_error "$step_name FAILED - HTTP $http_code"
echo "Response: $response"
return 1
fi
# Check for validation-specific success indicators
if echo "$response" | grep -q '"is_valid".*true'; then
log_success "$step_name PASSED"
return 0
elif echo "$response" | grep -q '"is_valid".*false'; then
log_warning "$step_name FAILED - Validation errors found"
return 1
else
# Fall back to generic error checking
check_response "$response" "$step_name"
return $?
fi
}
2025-07-26 23:29:57 +02:00
extract_json_field() {
local response="$1"
local field="$2"
2025-07-27 10:01:37 +02:00
# Create a temporary file for the JSON to avoid shell escaping issues
local temp_file="/tmp/json_response_$.json"
echo "$response" > "$temp_file"
python3 -c "
import json
try:
with open('$temp_file', 'r') as f:
data = json.load(f)
value = data.get('$field', '')
print(value)
except Exception as e:
print('')
" 2>/dev/null || echo ""
# Clean up
rm -f "$temp_file"
}
# Function to escape CSV content for JSON
escape_csv_for_json() {
local csv_file="$1"
# Use Python to properly escape for JSON to avoid sed issues
python3 -c "
import json
import sys
# Read the CSV file
with open('$csv_file', 'r', encoding='utf-8') as f:
content = f.read()
# Escape for JSON (this handles newlines, quotes, and control characters properly)
escaped = json.dumps(content)[1:-1] # Remove the surrounding quotes that json.dumps adds
print(escaped)
"
}
# Function to check for timezone-related errors
check_timezone_error() {
local response="$1"
if echo "$response" | grep -q "Cannot convert tz-naive Timestamp"; then
return 0 # Found timezone error
fi
return 1 # No timezone error
2025-07-26 23:29:57 +02:00
}
2025-08-01 20:43:02 +02:00
# ----------------------------------------------------------------
# IMPROVED WEBSOCKET MONITORING FUNCTION (CORRECTED)
# ----------------------------------------------------------------
# This function is now more robust, handling its own dependencies.
2025-07-31 16:03:30 +02:00
test_websocket_with_nodejs_builtin() {
local tenant_id="$1"
local job_id="$2"
2025-08-01 20:43:02 +02:00
local max_duration="$3" # Maximum time to wait in seconds
log_step "4.2.1. Starting robust WebSocket monitoring with Node.js"
# Check if node is installed
if ! command -v node >/dev/null 2>&1; then
log_error "Node.js is not installed. Cannot run WebSocket monitor."
echo "Please install Node.js to use this feature."
return 1
fi
2025-07-31 16:03:30 +02:00
2025-08-01 20:43:02 +02:00
# Check if npm is installed
if ! command -v npm >/dev/null 2>&1; then
log_error "npm is not installed. Cannot install Node.js dependencies."
echo "Please ensure npm is installed with Node.js."
return 1
fi
# Create a temporary directory for the script and its dependencies
local temp_dir=$(mktemp -d -t ws_monitor_XXXXXX)
local ws_monitor_script="$temp_dir/ws_monitor.js"
2025-07-31 16:03:30 +02:00
2025-08-01 20:43:02 +02:00
echo "Created temp directory: $temp_dir"
# Install the 'ws' module into the temporary directory
log_step "4.2.2. Installing 'ws' Node.js module in temporary directory..."
if ! (cd "$temp_dir" && npm install ws --silent >/dev/null); then
log_error "Failed to install 'ws' module. WebSocket monitoring will not run."
rm -rf "$temp_dir"
return 1
fi
log_success "'ws' module installed successfully."
# Write the Node.js WebSocket monitor script to the temporary directory
cat > "$ws_monitor_script" << 'EOF'
const WebSocket = require('ws');
const wsUrl = process.argv[2];
const accessToken = process.argv[3];
const maxDuration = parseInt(process.argv[4]); // in seconds
const ws = new WebSocket(wsUrl, {
headers: {
2025-07-31 16:03:30 +02:00
'Authorization': `Bearer ${accessToken}`
2025-08-01 20:43:02 +02:00
}
});
2025-07-31 16:03:30 +02:00
2025-08-01 20:43:02 +02:00
let timeout = setTimeout(() => {
console.error(`❌ WebSocket timeout after ${maxDuration} seconds. No completion message received.`);
ws.close();
process.exit(1);
}, maxDuration * 1000);
2025-07-31 16:03:30 +02:00
2025-08-01 20:43:02 +02:00
let pingInterval = setInterval(() => {
if (ws.readyState === WebSocket.OPEN) {
ws.send(JSON.stringify({ type: 'ping' }));
}
}, 30000); // Ping every 30 seconds to keep the connection alive
ws.onopen = () => {
console.log('✅ WebSocket connection established.');
2025-08-01 17:55:14 +02:00
};
2025-07-31 16:03:30 +02:00
2025-08-01 20:43:02 +02:00
ws.onmessage = (event) => {
try {
const message = JSON.parse(event.data);
const messageType = message.type || 'unknown';
const data = message.data || {};
const timestamp = new Date().toLocaleTimeString();
2025-07-31 16:03:30 +02:00
2025-08-01 20:43:02 +02:00
console.log(`[${timestamp}] 📨 Message: ${messageType.toUpperCase()}`);
2025-08-01 18:13:34 +02:00
2025-08-01 20:43:02 +02:00
if (messageType === 'progress') {
const progress = data.progress;
const step = data.current_step;
const productsCompleted = data.products_completed;
const productsTotal = data.products_total;
console.log(` 📊 Progress: ${progress}% - Step: ${step}`);
if (productsCompleted !== undefined && productsTotal !== undefined) {
console.log(` 📦 Products: ${productsCompleted}/${productsTotal}`);
2025-07-31 16:03:30 +02:00
}
2025-08-01 20:43:02 +02:00
} else if (messageType === 'completed') {
console.log('🎉 TRAINING COMPLETED SUCCESSFULLY!');
if (data.results) {
console.log(` ✅ Models Trained: ${data.results.successful_trainings}`);
2025-07-31 16:03:30 +02:00
}
2025-08-01 20:43:02 +02:00
clearTimeout(timeout);
clearInterval(pingInterval);
ws.close();
process.exit(0);
} else if (messageType === 'failed') {
console.error('❌ TRAINING FAILED!');
if (data.error) {
console.error(' 💥 Error:', data.error);
2025-08-01 17:55:14 +02:00
}
2025-08-01 20:43:02 +02:00
clearTimeout(timeout);
2025-08-01 17:55:14 +02:00
clearInterval(pingInterval);
2025-08-01 20:43:02 +02:00
ws.close();
2025-08-01 17:55:14 +02:00
process.exit(1);
2025-08-01 20:43:02 +02:00
} else if (messageType === 'heartbeat') {
// Heartbeat messages are handled, so we just log a debug message
console.log(' ❤️ Received heartbeat.');
} else if (messageType === 'initial_status') {
console.log(' Received initial status.');
console.log(' Status:', data.status);
console.log(' Progress:', data.progress);
}
2025-07-31 16:03:30 +02:00
2025-08-01 20:43:02 +02:00
console.log(''); // Add a newline for readability between messages
} catch (e) {
console.error('⚠️ Failed to parse message:', event.data, e);
}
};
2025-07-31 16:03:30 +02:00
2025-08-01 20:43:02 +02:00
ws.onclose = () => {
console.log('🔌 WebSocket connection closed.');
};
ws.onerror = (error) => {
console.error('💥 WebSocket error:', error.message);
process.exit(1);
};
2025-07-31 16:03:30 +02:00
EOF
local ws_url="$WS_BASE/tenants/$tenant_id/training/jobs/$job_id/live"
2025-08-01 20:43:02 +02:00
echo "Connecting to WebSocket: $ws_url"
2025-07-31 16:03:30 +02:00
2025-08-01 20:43:02 +02:00
# Run the monitor script from within the temporary directory
(cd "$temp_dir" && node ws_monitor.js "$ws_url" "$ACCESS_TOKEN" "$max_duration")
local exit_code=$?
# Clean up the temporary directory
log_step "4.2.3. Cleaning up temporary files..."
rm -rf "$temp_dir"
log_success "Cleanup complete."
2025-08-01 17:55:14 +02:00
if [ $exit_code -eq 0 ]; then
log_success "Training job completed successfully!"
2025-08-01 20:43:02 +02:00
return 0
2025-08-01 17:55:14 +02:00
else
2025-08-01 20:43:02 +02:00
log_error "WebSocket monitoring ended with an error."
return 1
2025-08-01 17:55:14 +02:00
fi
}
test_websocket_connection() {
local tenant_id="$1"
local job_id="$2"
local duration="$3"
log_step "4.2. Connecting to WebSocket for real-time progress monitoring"
echo "WebSocket URL: $WS_BASE/tenants/$tenant_id/training/jobs/$job_id/live"
echo "Test duration: ${duration}s"
echo ""
2025-08-01 20:43:02 +02:00
# Check for node and use the robust monitor script
if command -v node >/dev/null 2>&1 && command -v npm >/dev/null 2>&1; then
2025-08-01 17:55:14 +02:00
test_websocket_with_nodejs_builtin "$tenant_id" "$job_id" "$duration"
else
2025-08-01 20:43:02 +02:00
log_warning "Node.js or npm not found. Cannot run robust WebSocket monitor."
log_warning "Skipping real-time progress monitoring for this test."
return 0
2025-08-01 17:55:14 +02:00
fi
2025-07-31 16:03:30 +02:00
}
# Enhanced training step with WebSocket testing
enhanced_training_step_with_completion_check() {
2025-08-01 17:55:14 +02:00
echo -e "${STEP_ICONS[3]} ${PURPLE}STEP 4: MODEL TRAINING WITH SMART WEBSOCKET MONITORING${NC}"
echo "Enhanced training step with completion-aware progress monitoring"
2025-07-31 16:03:30 +02:00
echo ""
log_step "4.1. Initiating model training with FULL dataset"
# Start training job
TRAINING_RESPONSE=$(curl -s -w "\nHTTP_CODE:%{http_code}" -X POST "$API_BASE/api/v1/tenants/$TENANT_ID/training/jobs" \
-H "Authorization: Bearer $ACCESS_TOKEN" \
-H "Content-Type: application/json" \
-d '{}')
# Extract HTTP code and response
HTTP_CODE=$(echo "$TRAINING_RESPONSE" | grep "HTTP_CODE:" | cut -d: -f2)
TRAINING_RESPONSE=$(echo "$TRAINING_RESPONSE" | sed '/HTTP_CODE:/d')
echo "Training HTTP Status Code: $HTTP_CODE"
echo "Training Response:"
echo "$TRAINING_RESPONSE" | python3 -m json.tool 2>/dev/null || echo "$TRAINING_RESPONSE"
if [ "$HTTP_CODE" = "200" ] || [ "$HTTP_CODE" = "201" ]; then
# Extract training job details
TRAINING_TASK_ID=$(extract_json_field "$TRAINING_RESPONSE" "task_id")
JOB_ID=$(extract_json_field "$TRAINING_RESPONSE" "job_id")
JOB_STATUS=$(extract_json_field "$TRAINING_RESPONSE" "status")
# Use job_id if available, otherwise use task_id
WEBSOCKET_JOB_ID="${JOB_ID:-$TRAINING_TASK_ID}"
if [ -n "$WEBSOCKET_JOB_ID" ]; then
log_success "Training job started successfully"
echo " Job ID: $WEBSOCKET_JOB_ID"
echo " Status: $JOB_STATUS"
2025-08-01 20:43:02 +02:00
# Training is in progress - use smart monitoring
log_step "4.2. Starting smart WebSocket monitoring"
echo " Strategy: Monitor until job completion or timeout"
echo " Maximum wait time: ${WS_TEST_DURATION}s (safety timeout)"
echo " Will automatically close when training completes"
echo ""
# Call the improved WebSocket monitoring function
test_websocket_connection "$TENANT_ID" "$WEBSOCKET_JOB_ID" "$WS_TEST_DURATION"
2025-07-31 16:03:30 +02:00
else
log_warning "Training started but couldn't extract job ID for WebSocket testing"
echo "Response: $TRAINING_RESPONSE"
fi
else
log_error "Training job failed to start (HTTP $HTTP_CODE)"
echo "Response: $TRAINING_RESPONSE"
fi
echo ""
}
2025-07-26 23:29:57 +02:00
# =================================================================
# PRE-FLIGHT CHECKS
# =================================================================
echo -e "${PURPLE}🔍 Pre-flight checks...${NC}"
# Check if services are running
if ! curl -s "$API_BASE/health" > /dev/null; then
log_error "API Gateway is not responding at $API_BASE"
echo "Please ensure services are running: docker-compose up -d"
exit 1
fi
log_success "API Gateway is responding"
2025-07-27 10:01:37 +02:00
# Check if CSV file exists
if [ ! -f "$REAL_CSV_FILE" ]; then
log_error "Real CSV file not found: $REAL_CSV_FILE"
echo "Please ensure the CSV file is in the current directory"
exit 1
fi
log_success "Real CSV file found: $REAL_CSV_FILE"
2025-07-29 09:40:01 +02:00
# Show CSV file info - FULL DATASET
echo "CSV file info (FULL DATASET):"
2025-07-27 10:01:37 +02:00
echo " Lines: $(wc -l < "$REAL_CSV_FILE")"
echo " Size: $(du -h "$REAL_CSV_FILE" | cut -f1)"
echo " Header: $(head -1 "$REAL_CSV_FILE")"
2025-07-26 23:29:57 +02:00
# Check individual services
services_check() {
local service_ports=("8001:Auth" "8002:Training" "8003:Data" "8005:Tenant")
for service in "${service_ports[@]}"; do
IFS=':' read -r port name <<< "$service"
if curl -s "http://localhost:$port/health" > /dev/null; then
echo "$name Service (port $port)"
else
log_warning "$name Service not responding on port $port"
fi
done
}
services_check
echo ""
# =================================================================
# STEP 1: USER REGISTRATION (ONBOARDING PAGE STEP 1)
# =================================================================
echo -e "${STEP_ICONS[0]} ${PURPLE}STEP 1: USER REGISTRATION${NC}"
echo "Simulating onboarding page step 1 - 'Crear Cuenta'"
echo ""
log_step "1.1. Registering new user account"
echo "Email: $TEST_EMAIL"
echo "Full Name: $TEST_NAME"
echo "Password: [HIDDEN]"
REGISTER_RESPONSE=$(curl -s -X POST "$API_BASE/api/v1/auth/register" \
-H "Content-Type: application/json" \
-d "{
\"email\": \"$TEST_EMAIL\",
\"password\": \"$TEST_PASSWORD\",
2025-08-02 09:41:50 +02:00
\"full_name\": \"$TEST_NAME\",
\"role\": \"admin\"
2025-07-26 23:29:57 +02:00
}")
echo "Registration Response:"
echo "$REGISTER_RESPONSE" | python3 -m json.tool 2>/dev/null || echo "$REGISTER_RESPONSE"
if check_response "$REGISTER_RESPONSE" "User Registration"; then
2025-08-02 17:38:55 +02:00
USER_ID=$(echo "$REGISTER_RESPONSE" | python3 -c "
import json, sys
try:
data = json.load(sys.stdin)
user = data.get('user', {})
print(user.get('id', ''))
except:
print('')
")
2025-07-26 23:29:57 +02:00
if [ -n "$USER_ID" ]; then
log_success "User ID extracted: $USER_ID"
fi
else
echo "Full response: $REGISTER_RESPONSE"
exit 1
fi
echo ""
# =================================================================
# STEP 1.5: USER LOGIN (AUTOMATIC AFTER REGISTRATION)
# =================================================================
2025-07-29 09:40:01 +02:00
log_step "1.5. Logging in to get access token"
2025-07-26 23:29:57 +02:00
LOGIN_RESPONSE=$(curl -s -X POST "$API_BASE/api/v1/auth/login" \
-H "Content-Type: application/json" \
-d "{
\"email\": \"$TEST_EMAIL\",
\"password\": \"$TEST_PASSWORD\"
}")
echo "Login Response:"
echo "$LOGIN_RESPONSE" | python3 -m json.tool 2>/dev/null || echo "$LOGIN_RESPONSE"
2025-07-29 09:40:01 +02:00
if check_response "$LOGIN_RESPONSE" "User Login"; then
ACCESS_TOKEN=$(extract_json_field "$LOGIN_RESPONSE" "access_token")
if [ -n "$ACCESS_TOKEN" ]; then
log_success "Access token obtained"
else
log_error "Failed to extract access token"
exit 1
fi
else
echo "Full response: $LOGIN_RESPONSE"
2025-07-26 23:29:57 +02:00
exit 1
fi
echo ""
# =================================================================
# STEP 2: BAKERY REGISTRATION (ONBOARDING PAGE STEP 2)
# =================================================================
echo -e "${STEP_ICONS[1]} ${PURPLE}STEP 2: BAKERY REGISTRATION${NC}"
echo "Simulating onboarding page step 2 - 'Datos de Panadería'"
echo ""
2025-07-27 11:04:32 +02:00
log_step "2.1. Registering bakery/tenant with mock coordinates"
# Mock coordinates for Madrid locations (since geolocation service is not running)
# These are real Madrid coordinates for testing weather and traffic data acquisition
MADRID_COORDS=(
"40.4168:-3.7038" # Sol (city center)
"40.4378:-3.6795" # Retiro area
"40.4093:-3.6936" # Atocha area
"40.4517:-3.6847" # Chamberí area
"40.3897:-3.6774" # Delicias area
)
# Select random coordinates from Madrid locations
SELECTED_COORDS=${MADRID_COORDS[$((RANDOM % ${#MADRID_COORDS[@]}))]}
IFS=':' read -r MOCK_LATITUDE MOCK_LONGITUDE <<< "$SELECTED_COORDS"
echo "Using mock coordinates for Madrid:"
echo " Latitude: $MOCK_LATITUDE"
echo " Longitude: $MOCK_LONGITUDE"
echo " (This simulates the address-to-coordinates conversion service)"
# Using exact schema from BakeryRegistration with added coordinates
2025-07-26 23:29:57 +02:00
BAKERY_DATA="{
\"name\": \"Panadería Test $(date +%H%M)\",
\"business_type\": \"bakery\",
\"address\": \"Calle Gran Vía 123\",
\"city\": \"Madrid\",
\"postal_code\": \"28001\",
2025-07-29 07:53:30 +02:00
\"phone\": \"+34600123456\"
2025-07-26 23:29:57 +02:00
}"
2025-07-27 11:04:32 +02:00
echo "Bakery Data with mock coordinates:"
2025-07-26 23:29:57 +02:00
echo "$BAKERY_DATA" | python3 -m json.tool
BAKERY_RESPONSE=$(curl -s -w "\nHTTP_CODE:%{http_code}" -X POST "$API_BASE/api/v1/tenants/register" \
-H "Content-Type: application/json" \
-H "Authorization: Bearer $ACCESS_TOKEN" \
-d "$BAKERY_DATA")
# Extract HTTP code and response
HTTP_CODE=$(echo "$BAKERY_RESPONSE" | grep "HTTP_CODE:" | cut -d: -f2)
BAKERY_RESPONSE=$(echo "$BAKERY_RESPONSE" | sed '/HTTP_CODE:/d')
echo "HTTP Status Code: $HTTP_CODE"
echo "Bakery Registration Response:"
echo "$BAKERY_RESPONSE" | python3 -m json.tool 2>/dev/null || echo "$BAKERY_RESPONSE"
if check_response "$BAKERY_RESPONSE" "Bakery Registration"; then
TENANT_ID=$(extract_json_field "$BAKERY_RESPONSE" "id")
if [ -n "$TENANT_ID" ]; then
log_success "Tenant ID extracted: $TENANT_ID"
2025-07-27 11:04:32 +02:00
log_success "Mock coordinates will be used for weather/traffic data: ($MOCK_LATITUDE, $MOCK_LONGITUDE)"
# Store coordinates for later use in training
echo "BAKERY_LATITUDE=$MOCK_LATITUDE" > /tmp/bakery_coords.env
echo "BAKERY_LONGITUDE=$MOCK_LONGITUDE" >> /tmp/bakery_coords.env
echo "TENANT_ID=$TENANT_ID" >> /tmp/bakery_coords.env
2025-07-26 23:29:57 +02:00
else
log_error "Failed to extract tenant ID"
exit 1
fi
else
echo "Full response: $BAKERY_RESPONSE"
exit 1
fi
echo ""
# =================================================================
# STEP 3: SALES DATA UPLOAD (ONBOARDING PAGE STEP 3)
# =================================================================
echo -e "${STEP_ICONS[2]} ${PURPLE}STEP 3: SALES DATA UPLOAD${NC}"
echo "Simulating onboarding page step 3 - 'Historial de Ventas'"
echo ""
2025-07-29 09:40:01 +02:00
log_step "3.1. Validating full sales data format"
2025-07-26 23:29:57 +02:00
2025-07-27 10:01:37 +02:00
# Read and escape CSV content for JSON using Python for reliability
2025-07-29 09:40:01 +02:00
log_step "3.1.1. Preparing FULL CSV data for JSON transmission"
2025-07-26 23:29:57 +02:00
2025-07-29 09:40:01 +02:00
CSV_CONTENT=$(escape_csv_for_json "$REAL_CSV_FILE")
2025-07-26 23:29:57 +02:00
2025-07-27 10:01:37 +02:00
if [ $? -ne 0 ] || [ -z "$CSV_CONTENT" ]; then
log_error "Failed to escape CSV content for JSON"
exit 1
fi
2025-07-29 09:40:01 +02:00
log_success "FULL CSV content escaped successfully (length: ${#CSV_CONTENT} chars)"
2025-07-27 10:01:37 +02:00
# Create validation request using Python for proper JSON formatting
2025-07-29 09:40:01 +02:00
log_step "3.1.2. Creating validation request with FULL dataset"
2025-07-27 10:01:37 +02:00
VALIDATION_DATA_FILE="/tmp/validation_request.json"
python3 -c "
import json
2025-07-29 09:40:01 +02:00
# Read the FULL CSV content
with open('$REAL_CSV_FILE', 'r', encoding='utf-8') as f:
2025-07-27 10:01:37 +02:00
csv_content = f.read()
# Create proper JSON request
request_data = {
'data': csv_content,
'data_format': 'csv',
'validate_only': True,
'source': 'onboarding_upload'
2025-07-26 23:29:57 +02:00
}
2025-07-27 10:01:37 +02:00
# Write to file
with open('$VALIDATION_DATA_FILE', 'w', encoding='utf-8') as f:
json.dump(request_data, f, ensure_ascii=False, indent=2)
print('Validation request file created successfully')
"
2025-07-26 23:29:57 +02:00
2025-07-27 10:01:37 +02:00
if [ ! -f "$VALIDATION_DATA_FILE" ]; then
log_error "Failed to create validation request file"
exit 1
fi
echo "Validation request (first 200 chars):"
head -c 200 "$VALIDATION_DATA_FILE"
echo "..."
2025-08-08 09:08:41 +02:00
VALIDATION_RESPONSE=$(curl -s -w "\nHTTP_CODE:%{http_code}" -X POST "$API_BASE/api/v1/tenants/$TENANT_ID/sales/import/validate-json" \
2025-07-26 23:29:57 +02:00
-H "Content-Type: application/json" \
-H "Authorization: Bearer $ACCESS_TOKEN" \
2025-07-27 10:01:37 +02:00
-d @"$VALIDATION_DATA_FILE")
# Extract HTTP code and response
HTTP_CODE=$(echo "$VALIDATION_RESPONSE" | grep "HTTP_CODE:" | cut -d: -f2)
VALIDATION_RESPONSE=$(echo "$VALIDATION_RESPONSE" | sed '/HTTP_CODE:/d')
2025-07-26 23:29:57 +02:00
2025-07-27 10:01:37 +02:00
echo "HTTP Status Code: $HTTP_CODE"
2025-07-26 23:29:57 +02:00
echo "Validation Response:"
echo "$VALIDATION_RESPONSE" | python3 -m json.tool 2>/dev/null || echo "$VALIDATION_RESPONSE"
2025-07-27 10:01:37 +02:00
# Parse validation results using the SalesValidationResult schema
IS_VALID=$(extract_json_field "$VALIDATION_RESPONSE" "is_valid")
TOTAL_RECORDS=$(extract_json_field "$VALIDATION_RESPONSE" "total_records")
VALID_RECORDS=$(extract_json_field "$VALIDATION_RESPONSE" "valid_records")
INVALID_RECORDS=$(extract_json_field "$VALIDATION_RESPONSE" "invalid_records")
if [ "$IS_VALID" = "True" ]; then
2025-07-29 09:40:01 +02:00
log_success "FULL sales data validation passed"
2025-07-27 10:01:37 +02:00
echo " Total records: $TOTAL_RECORDS"
echo " Valid records: $VALID_RECORDS"
echo " Invalid records: $INVALID_RECORDS"
elif [ "$IS_VALID" = "False" ]; then
2025-07-29 09:40:01 +02:00
log_error "FULL sales data validation failed"
2025-07-27 10:01:37 +02:00
echo " Total records: $TOTAL_RECORDS"
echo " Valid records: $VALID_RECORDS"
echo " Invalid records: $INVALID_RECORDS"
# Extract and display errors
2025-07-26 23:29:57 +02:00
echo "Validation errors:"
2025-07-27 10:01:37 +02:00
echo "$VALIDATION_RESPONSE" | python3 -c "
import json, sys
try:
data = json.load(sys.stdin)
errors = data.get('errors', [])
for i, err in enumerate(errors[:5]): # Show first 5 errors
print(f' {i+1}. {err.get(\"message\", \"Unknown error\")}')
if len(errors) > 5:
print(f' ... and {len(errors) - 5} more errors')
except:
print(' Could not parse error details')
" 2>/dev/null
log_warning "Validation failed, but continuing to test import flow..."
2025-07-26 23:29:57 +02:00
else
log_warning "Validation response format unexpected, but continuing..."
fi
2025-07-29 09:40:01 +02:00
log_step "3.2. Importing FULL sales data using file upload"
2025-07-26 23:29:57 +02:00
2025-07-29 09:40:01 +02:00
# The import endpoint expects form data (file upload), not JSON
# Use curl's -F flag for multipart/form-data
2025-07-27 10:01:37 +02:00
IMPORT_RESPONSE=$(curl -s -w "\nHTTP_CODE:%{http_code}" -X POST "$API_BASE/api/v1/tenants/$TENANT_ID/sales/import" \
2025-07-26 23:29:57 +02:00
-H "Authorization: Bearer $ACCESS_TOKEN" \
2025-07-29 09:40:01 +02:00
-F "file=@$REAL_CSV_FILE" \
2025-07-27 10:01:37 +02:00
-F "file_format=csv")
# Extract HTTP code and response
HTTP_CODE=$(echo "$IMPORT_RESPONSE" | grep "HTTP_CODE:" | cut -d: -f2)
IMPORT_RESPONSE=$(echo "$IMPORT_RESPONSE" | sed '/HTTP_CODE:/d')
echo "Import HTTP Status Code: $HTTP_CODE"
echo "Import Response:"
echo "$IMPORT_RESPONSE" | python3 -m json.tool 2>/dev/null || echo "$IMPORT_RESPONSE"
# Check for import success using SalesImportResult schema
if [ "$HTTP_CODE" = "200" ]; then
IMPORT_SUCCESS=$(extract_json_field "$IMPORT_RESPONSE" "success")
RECORDS_CREATED=$(extract_json_field "$IMPORT_RESPONSE" "records_created")
RECORDS_FAILED=$(extract_json_field "$IMPORT_RESPONSE" "records_failed")
2025-07-29 09:40:01 +02:00
RECORDS_PROCESSED=$(extract_json_field "$IMPORT_RESPONSE" "records_processed")
2025-07-27 10:01:37 +02:00
SUCCESS_RATE=$(extract_json_field "$IMPORT_RESPONSE" "success_rate")
2025-07-26 23:29:57 +02:00
2025-07-29 09:40:01 +02:00
if [ "$IMPORT_SUCCESS" = "True" ] || [ "$IMPORT_SUCCESS" = "true" ]; then
log_success "FULL dataset import completed successfully"
echo " Records processed: $RECORDS_PROCESSED"
2025-07-27 10:01:37 +02:00
echo " Records created: $RECORDS_CREATED"
echo " Records failed: $RECORDS_FAILED"
echo " Success rate: $SUCCESS_RATE%"
echo " Processing time: $(extract_json_field "$IMPORT_RESPONSE" "processing_time_seconds")s"
if [ "$RECORDS_FAILED" -gt 0 ] 2>/dev/null; then
log_warning "$RECORDS_FAILED records failed during import"
fi
2025-07-29 09:40:01 +02:00
elif [ "$IMPORT_SUCCESS" = "False" ] || [ "$IMPORT_SUCCESS" = "false" ]; then
2025-07-27 10:01:37 +02:00
log_error "Import reported failure despite HTTP 200"
echo "Import response: $IMPORT_RESPONSE"
else
log_warning "Could not parse import success field (got: '$IMPORT_SUCCESS')"
2025-07-29 09:40:01 +02:00
# Fallback: if we got HTTP 200 and response contains records data, assume success
if echo "$IMPORT_RESPONSE" | grep -q '"records_created"\|"records_processed"'; then
2025-07-27 10:01:37 +02:00
log_success "Import appears successful based on response content"
2025-07-29 09:40:01 +02:00
FALLBACK_CREATED=$(echo "$IMPORT_RESPONSE" | grep -o '"records_created":[0-9]*' | cut -d: -f2 | head -1)
FALLBACK_PROCESSED=$(echo "$IMPORT_RESPONSE" | grep -o '"records_processed":[0-9]*' | cut -d: -f2 | head -1)
echo " Records processed: $FALLBACK_PROCESSED"
2025-07-27 10:01:37 +02:00
echo " Records created: $FALLBACK_CREATED"
fi
fi
2025-07-29 09:40:01 +02:00
else
log_warning "FULL dataset import failed with HTTP $HTTP_CODE, but continuing with test..."
# Check for timezone error specifically
if check_timezone_error "$IMPORT_RESPONSE"; then
log_warning "Detected timezone conversion error - this is a known issue"
echo "Consider applying timezone fix to data import service"
fi
2025-07-26 23:29:57 +02:00
fi
echo ""
# =================================================================
2025-07-29 09:40:01 +02:00
# STEP 4: MODEL TRAINING (ONBOARDING PAGE STEP 4)
2025-07-26 23:29:57 +02:00
# =================================================================
2025-07-31 16:03:30 +02:00
enhanced_training_step_with_completion_check
2025-07-26 23:29:57 +02:00
echo ""
# =================================================================
2025-07-29 09:40:01 +02:00
# STEP 5: ONBOARDING COMPLETION (DASHBOARD ACCESS)
2025-07-26 23:29:57 +02:00
# =================================================================
2025-07-29 09:40:01 +02:00
log_step "5.1. Testing basic dashboard functionality"
2025-07-26 23:29:57 +02:00
2025-07-29 18:52:56 +02:00
# forecast request with proper schema
2025-07-29 17:50:01 +02:00
FORECAST_REQUEST="{
2025-07-29 18:52:56 +02:00
\"product_name\": \"pan\",
2025-08-08 09:08:41 +02:00
\"forecast_date\": \"2025-08-08\",
2025-07-29 17:50:01 +02:00
\"forecast_days\": 1,
\"location\": \"madrid_centro\",
\"confidence_level\": 0.85
}"
echo "Forecast Request:"
echo "$FORECAST_REQUEST" | python3 -m json.tool
# Make the API call
FORECAST_RESPONSE=$(curl -s -w "\nHTTP_CODE:%{http_code}" -X POST "$API_BASE/api/v1/tenants/$TENANT_ID/forecasts/single" \
2025-07-29 15:08:55 +02:00
-H "Content-Type: application/json" \
-H "Authorization: Bearer $ACCESS_TOKEN" \
2025-07-29 17:50:01 +02:00
-d "$FORECAST_REQUEST")
# Extract HTTP code and response
HTTP_CODE=$(echo "$FORECAST_RESPONSE" | grep "HTTP_CODE:" | cut -d: -f2)
FORECAST_RESPONSE=$(echo "$FORECAST_RESPONSE" | sed '/HTTP_CODE:/d')
echo "Forecast HTTP Status: $HTTP_CODE"
echo "Forecast Response:"
echo "$FORECAST_RESPONSE" | python3 -m json.tool 2>/dev/null || echo "$FORECAST_RESPONSE"
# Validate response
if [ "$HTTP_CODE" = "200" ]; then
if echo "$FORECAST_RESPONSE" | grep -q '"predicted_demand"\|"id"'; then
log_success "Forecasting service is working correctly"
# Extract key values for validation
PREDICTED_DEMAND=$(extract_json_field "$FORECAST_RESPONSE" "predicted_demand")
CONFIDENCE_LOWER=$(extract_json_field "$FORECAST_RESPONSE" "confidence_lower")
CONFIDENCE_UPPER=$(extract_json_field "$FORECAST_RESPONSE" "confidence_upper")
if [ -n "$PREDICTED_DEMAND" ]; then
echo " Predicted Demand: $PREDICTED_DEMAND"
echo " Confidence Range: [$CONFIDENCE_LOWER, $CONFIDENCE_UPPER]"
fi
else
log_error "Forecast response missing expected fields"
echo "Response: $FORECAST_RESPONSE"
fi
elif [ "$HTTP_CODE" = "422" ]; then
log_error "Forecast request validation failed"
echo "Validation errors: $FORECAST_RESPONSE"
elif [ "$HTTP_CODE" = "404" ]; then
log_warning "Forecast endpoint not found - check API routing"
elif [ "$HTTP_CODE" = "500" ]; then
log_error "Internal server error in forecasting service"
echo "Error details: $FORECAST_RESPONSE"
2025-07-26 23:29:57 +02:00
else
2025-07-29 17:50:01 +02:00
log_warning "Forecasting may not be ready yet (HTTP $HTTP_CODE)"
echo "Response: $FORECAST_RESPONSE"
2025-07-26 23:29:57 +02:00
fi
echo ""
# =================================================================
2025-08-02 17:09:53 +02:00
# STEP 6: ADMIN USER DELETION TEST (NEW)
2025-07-26 23:29:57 +02:00
# =================================================================
2025-08-02 17:09:53 +02:00
echo -e "${STEP_ICONS[4]} ${PURPLE}STEP 6: ADMIN USER DELETION TEST${NC}"
echo "Testing complete admin user deletion with all associated data cleanup"
echo ""
log_step "6.1. Getting deletion preview for test user"
# First, get a preview of what would be deleted
DELETION_PREVIEW_RESPONSE=$(curl -s -w "\nHTTP_CODE:%{http_code}" -X GET "$API_BASE/api/v1/users/delete/$USER_ID/deletion-preview" \
2025-08-02 17:53:28 +02:00
-H "Authorization: Bearer $ACCESS_TOKEN" \
-H "Content-Type: application/json")
2025-08-02 17:09:53 +02:00
# Extract HTTP code and response
HTTP_CODE=$(echo "$DELETION_PREVIEW_RESPONSE" | grep "HTTP_CODE:" | cut -d: -f2)
DELETION_PREVIEW_RESPONSE=$(echo "$DELETION_PREVIEW_RESPONSE" | sed '/HTTP_CODE:/d')
echo "Deletion Preview HTTP Status: $HTTP_CODE"
echo "Deletion Preview Response:"
echo "$DELETION_PREVIEW_RESPONSE" | python3 -m json.tool 2>/dev/null || echo "$DELETION_PREVIEW_RESPONSE"
if [ "$HTTP_CODE" = "200" ]; then
# Extract preview information
TOTAL_TENANTS=$(extract_json_field "$DELETION_PREVIEW_RESPONSE" "tenant_associations" | python3 -c "
import json, sys
try:
data = json.load(sys.stdin)
print(data.get('total_tenants', 0))
except:
print(0)
" 2>/dev/null)
OWNED_TENANTS=$(extract_json_field "$DELETION_PREVIEW_RESPONSE" "tenant_associations" | python3 -c "
import json, sys
try:
data = json.load(sys.stdin)
print(data.get('owned_tenants', 0))
except:
print(0)
" 2>/dev/null)
log_success "Deletion preview obtained successfully"
echo " User to delete: $TEST_EMAIL"
echo " Total tenant associations: $TOTAL_TENANTS"
echo " Owned tenants: $OWNED_TENANTS"
echo ""
log_step "6.2. Executing admin user deletion"
echo "This will delete:"
echo " ✓ User account and authentication data"
echo " ✓ All tenant memberships and owned tenants"
echo " ✓ All training models and artifacts"
echo " ✓ All forecasts and predictions"
echo " ✓ All notification preferences and logs"
echo ""
# Wait a moment to show the preview
sleep 2
# Execute the deletion
DELETION_RESPONSE=$(curl -s -w "\nHTTP_CODE:%{http_code}" -X DELETE "$API_BASE/api/v1/users/delete/$USER_ID" \
-H "Authorization: Bearer $ACCESS_TOKEN")
# Extract HTTP code and response
HTTP_CODE=$(echo "$DELETION_RESPONSE" | grep "HTTP_CODE:" | cut -d: -f2)
DELETION_RESPONSE=$(echo "$DELETION_RESPONSE" | sed '/HTTP_CODE:/d')
echo "Admin Deletion HTTP Status: $HTTP_CODE"
echo "Admin Deletion Response:"
echo "$DELETION_RESPONSE" | python3 -m json.tool 2>/dev/null || echo "$DELETION_RESPONSE"
if [ "$HTTP_CODE" = "200" ]; then
DELETION_SUCCESS=$(extract_json_field "$DELETION_RESPONSE" "success")
if [ "$DELETION_SUCCESS" = "True" ] || [ "$DELETION_SUCCESS" = "true" ]; then
log_success "Admin user deletion initiated successfully"
echo " Status: Processing in background"
echo " Message: $(extract_json_field "$DELETION_RESPONSE" "message")"
log_step "6.3. Monitoring deletion progress (background task)"
echo " Note: Deletion runs as background task for better performance"
echo " Monitoring for 30 seconds to check completion..."
# Monitor for completion by trying to access user data
MONITOR_COUNT=0
MAX_MONITOR_ATTEMPTS=30
while [ $MONITOR_COUNT -lt $MAX_MONITOR_ATTEMPTS ]; do
sleep 1
MONITOR_COUNT=$((MONITOR_COUNT + 1))
# Try to get user info (should fail when deletion completes)
CHECK_RESPONSE=$(curl -s -w "\nHTTP_CODE:%{http_code}" -X GET "$API_BASE/api/v1/users/me" \
-H "Authorization: Bearer $ACCESS_TOKEN" 2>/dev/null)
CHECK_HTTP_CODE=$(echo "$CHECK_RESPONSE" | grep "HTTP_CODE:" | cut -d: -f2)
if [ "$CHECK_HTTP_CODE" = "401" ] || [ "$CHECK_HTTP_CODE" = "404" ]; then
log_success "User deletion completed (user no longer accessible)"
echo " Deletion verified after ${MONITOR_COUNT}s"
break
elif [ $MONITOR_COUNT -eq $MAX_MONITOR_ATTEMPTS ]; then
log_warning "Deletion monitoring timed out after ${MAX_MONITOR_ATTEMPTS}s"
echo " Deletion may still be processing in background"
echo " Check server logs for completion status"
fi
# Show progress every 5 seconds
if [ $((MONITOR_COUNT % 5)) -eq 0 ]; then
echo " Monitoring... ${MONITOR_COUNT}s/${MAX_MONITOR_ATTEMPTS}s"
fi
done
else
log_error "Admin user deletion failed"
echo "Response: $DELETION_RESPONSE"
fi
elif [ "$HTTP_CODE" = "400" ]; then
log_error "Deletion request was invalid"
echo "Error details: $DELETION_RESPONSE"
elif [ "$HTTP_CODE" = "403" ]; then
log_error "Insufficient permissions for deletion"
echo "Note: Only admin users can delete other admin users"
elif [ "$HTTP_CODE" = "404" ]; then
log_error "User not found for deletion"
echo "User ID: $USER_ID may have already been deleted"
else
log_error "Admin user deletion failed (HTTP $HTTP_CODE)"
echo "Response: $DELETION_RESPONSE"
fi
else
log_error "Failed to get deletion preview (HTTP $HTTP_CODE)"
echo "Cannot proceed with deletion test"
echo "Response: $DELETION_PREVIEW_RESPONSE"
fi
log_step "6.4. Verifying cleanup completion"
# Try to login with the deleted user (should fail)
VERIFY_LOGIN_RESPONSE=$(curl -s -w "\nHTTP_CODE:%{http_code}" -X POST "$API_BASE/api/v1/auth/login" \
-H "Content-Type: application/json" \
-d "{
\"email\": \"$TEST_EMAIL\",
\"password\": \"$TEST_PASSWORD\"
}")
VERIFY_HTTP_CODE=$(echo "$VERIFY_LOGIN_RESPONSE" | grep "HTTP_CODE:" | cut -d: -f2)
VERIFY_LOGIN_RESPONSE=$(echo "$VERIFY_LOGIN_RESPONSE" | sed '/HTTP_CODE:/d')
if [ "$VERIFY_HTTP_CODE" = "401" ] || [ "$VERIFY_HTTP_CODE" = "404" ]; then
log_success "Verification: User login properly blocked (user deleted)"
echo " HTTP Status: $VERIFY_HTTP_CODE"
elif [ "$VERIFY_HTTP_CODE" = "200" ]; then
log_warning "Verification: User can still login (deletion may not be complete)"
echo " This could indicate deletion is still processing"
else
log_warning "Verification: Unexpected login response (HTTP $VERIFY_HTTP_CODE)"
echo " Response: $VERIFY_LOGIN_RESPONSE"
fi
2025-07-26 23:29:57 +02:00
echo ""
2025-08-02 17:09:53 +02:00
# =================================================================
# Update the SUMMARY section to include Step 6
# =================================================================
# Replace the existing summary section with this updated version:
echo -e "${CYAN}📊 COMPLETE ONBOARDING + DELETION FLOW TEST SUMMARY${NC}"
echo -e "${CYAN}===================================================${NC}"
echo ""
echo "✅ Completed All Test Steps:"
2025-07-26 23:29:57 +02:00
echo " ${STEP_ICONS[0]} Step 1: User Registration ✓"
echo " ${STEP_ICONS[1]} Step 2: Bakery Registration ✓"
2025-07-29 09:40:01 +02:00
echo " ${STEP_ICONS[2]} Step 3: FULL Sales Data Upload ✓"
echo " ${STEP_ICONS[3]} Step 4: Model Training with FULL Data ✓"
2025-07-26 23:29:57 +02:00
echo " ${STEP_ICONS[4]} Step 5: Onboarding Complete ✓"
2025-08-02 17:09:53 +02:00
echo " 🗑️ Step 6: Admin User Deletion Test ✓"
2025-07-26 23:29:57 +02:00
echo ""
echo "📋 Test Results:"
2025-08-02 17:09:53 +02:00
echo " Original User ID: $USER_ID"
echo " Original Tenant ID: $TENANT_ID"
2025-07-26 23:29:57 +02:00
echo " Training Task ID: $TRAINING_TASK_ID"
echo " Test Email: $TEST_EMAIL"
2025-07-29 09:40:01 +02:00
echo " FULL CSV Used: $REAL_CSV_FILE"
echo " Total Records in Dataset: $(wc -l < "$REAL_CSV_FILE" 2>/dev/null || echo "Unknown")"
2025-07-27 10:01:37 +02:00
echo ""
echo "📈 Data Quality:"
if [ -n "$TOTAL_RECORDS" ]; then
echo " Total Records Processed: $TOTAL_RECORDS"
echo " Valid Records: $VALID_RECORDS"
echo " Invalid Records: $INVALID_RECORDS"
if [ "$TOTAL_RECORDS" -gt 0 ]; then
VALID_PERCENTAGE=$(python3 -c "print(round(${VALID_RECORDS:-0} / ${TOTAL_RECORDS} * 100, 1))" 2>/dev/null || echo "N/A")
echo " Data Quality: $VALID_PERCENTAGE% valid"
fi
else
echo " Data validation metrics not available"
fi
2025-07-26 23:29:57 +02:00
echo ""
2025-08-02 17:09:53 +02:00
echo "🗑️ Deletion Test Results:"
if [ "$DELETION_SUCCESS" = "True" ] || [ "$DELETION_SUCCESS" = "true" ]; then
echo " ✅ Admin user deletion: SUCCESS"
echo " ✅ Associated data cleanup: INITIATED"
echo " ✅ User authentication: BLOCKED"
echo " 📊 Tenant associations cleaned: $TOTAL_TENANTS"
echo " 🏢 Owned tenants handled: $OWNED_TENANTS"
else
echo " ❌ Admin user deletion: FAILED or INCOMPLETE"
echo " ⚠️ Manual cleanup may be required"
fi
echo ""
echo "🧹 Cleanup Status:"
if [ "$DELETION_SUCCESS" = "True" ] || [ "$DELETION_SUCCESS" = "true" ]; then
echo " ✅ Automatic cleanup completed via admin deletion"
echo " ✅ Test user and tenant data removed"
echo " ✅ Training models and forecasts deleted"
echo " ✅ All associated data cleaned up"
else
echo " ⚠️ Automatic cleanup failed - manual cleanup needed:"
echo " - Test user: $TEST_EMAIL"
echo " - Test tenant: $TENANT_ID"
echo " - Training models and forecasts"
fi
2025-07-26 23:29:57 +02:00
# Cleanup temporary files
2025-07-29 09:40:01 +02:00
rm -f "$VALIDATION_DATA_FILE"
2025-08-02 17:09:53 +02:00
rm -f /tmp/bakery_coords.env
2025-07-26 23:29:57 +02:00
echo ""
2025-08-02 17:09:53 +02:00
if [ "$DELETION_SUCCESS" = "True" ] || [ "$DELETION_SUCCESS" = "true" ]; then
log_success "Complete onboarding + deletion flow test finished successfully!"
echo -e "${CYAN}✅ All steps completed: Registration → Onboarding → Training → Deletion → Cleanup${NC}"
else
log_warning "Onboarding flow completed, but deletion test had issues"
echo -e "${YELLOW}⚠️ Onboarding steps passed, but admin deletion needs investigation${NC}"
fi