Add user delete process

This commit is contained in:
Urtzi Alfaro
2025-10-31 11:54:19 +01:00
parent 63f5c6d512
commit 269d3b5032
74 changed files with 16783 additions and 213 deletions

View File

@@ -0,0 +1,362 @@
"""
Integration Tests for Tenant Deletion System
Tests the complete deletion flow across all 12 microservices
"""
import asyncio
import pytest
import httpx
from typing import Dict, List, Any
from uuid import uuid4
import structlog
logger = structlog.get_logger(__name__)
# Test Configuration
BASE_URLS = {
"tenant": "http://tenant-service:8000/api/v1",
"orders": "http://orders-service:8000/api/v1",
"inventory": "http://inventory-service:8000/api/v1",
"recipes": "http://recipes-service:8000/api/v1",
"sales": "http://sales-service:8000/api/v1",
"production": "http://production-service:8000/api/v1",
"suppliers": "http://suppliers-service:8000/api/v1",
"pos": "http://pos-service:8000/api/v1",
"external": "http://external-service:8000/api/v1",
"forecasting": "http://forecasting-service:8000/api/v1",
"training": "http://training-service:8000/api/v1",
"alert_processor": "http://alert-processor-service:8000/api/v1",
"notification": "http://notification-service:8000/api/v1",
}
# Test tenant ID (use a real demo tenant from the system)
TEST_TENANT_ID = "dbc2128a-7539-470c-94b9-c1e37031bd77" # Demo tenant
@pytest.fixture
async def service_token():
"""Get a service JWT token for authentication"""
# TODO: Implement actual token generation
# For now, use environment variable or mock
return "service_token_placeholder"
@pytest.fixture
async def http_client():
"""Create async HTTP client"""
async with httpx.AsyncClient(verify=False, timeout=30.0) as client:
yield client
class TestIndividualServiceDeletion:
"""Test each service's deletion endpoint individually"""
@pytest.mark.asyncio
async def test_orders_service_preview(self, http_client, service_token):
"""Test Orders service deletion preview"""
url = f"{BASE_URLS['orders']}/orders/tenant/{TEST_TENANT_ID}/deletion-preview"
headers = {"Authorization": f"Bearer {service_token}"}
response = await http_client.get(url, headers=headers)
assert response.status_code == 200
data = response.json()
assert "preview" in data
assert "total_records" in data
assert data["service"] == "orders"
logger.info("orders.preview_test.passed", data=data)
@pytest.mark.asyncio
async def test_inventory_service_preview(self, http_client, service_token):
"""Test Inventory service deletion preview"""
url = f"{BASE_URLS['inventory']}/inventory/tenant/{TEST_TENANT_ID}/deletion-preview"
headers = {"Authorization": f"Bearer {service_token}"}
response = await http_client.get(url, headers=headers)
assert response.status_code == 200
data = response.json()
assert "preview" in data
assert "total_records" in data
logger.info("inventory.preview_test.passed", data=data)
@pytest.mark.asyncio
async def test_recipes_service_preview(self, http_client, service_token):
"""Test Recipes service deletion preview"""
url = f"{BASE_URLS['recipes']}/recipes/tenant/{TEST_TENANT_ID}/deletion-preview"
headers = {"Authorization": f"Bearer {service_token}"}
response = await http_client.get(url, headers=headers)
assert response.status_code == 200
data = response.json()
assert "preview" in data
logger.info("recipes.preview_test.passed", data=data)
@pytest.mark.asyncio
async def test_forecasting_service_preview(self, http_client, service_token):
"""Test Forecasting service deletion preview"""
url = f"{BASE_URLS['forecasting']}/forecasting/tenant/{TEST_TENANT_ID}/deletion-preview"
headers = {"Authorization": f"Bearer {service_token}"}
response = await http_client.get(url, headers=headers)
assert response.status_code == 200
data = response.json()
assert "preview" in data
assert data["service"] == "forecasting"
logger.info("forecasting.preview_test.passed", data=data)
@pytest.mark.asyncio
async def test_training_service_preview(self, http_client, service_token):
"""Test Training service deletion preview"""
url = f"{BASE_URLS['training']}/training/tenant/{TEST_TENANT_ID}/deletion-preview"
headers = {"Authorization": f"Bearer {service_token}"}
response = await http_client.get(url, headers=headers)
assert response.status_code == 200
data = response.json()
assert "preview" in data
assert data["service"] == "training"
logger.info("training.preview_test.passed", data=data)
@pytest.mark.asyncio
async def test_notification_service_preview(self, http_client, service_token):
"""Test Notification service deletion preview"""
url = f"{BASE_URLS['notification']}/notifications/tenant/{TEST_TENANT_ID}/deletion-preview"
headers = {"Authorization": f"Bearer {service_token}"}
response = await http_client.get(url, headers=headers)
assert response.status_code == 200
data = response.json()
assert "preview" in data
assert data["service"] == "notification"
logger.info("notification.preview_test.passed", data=data)
@pytest.mark.asyncio
async def test_all_services_preview_parallel(self, http_client, service_token):
"""Test all services' preview endpoints in parallel"""
headers = {"Authorization": f"Bearer {service_token}"}
# Define all preview URLs
preview_urls = {
"orders": f"{BASE_URLS['orders']}/orders/tenant/{TEST_TENANT_ID}/deletion-preview",
"inventory": f"{BASE_URLS['inventory']}/inventory/tenant/{TEST_TENANT_ID}/deletion-preview",
"recipes": f"{BASE_URLS['recipes']}/recipes/tenant/{TEST_TENANT_ID}/deletion-preview",
"sales": f"{BASE_URLS['sales']}/sales/tenant/{TEST_TENANT_ID}/deletion-preview",
"production": f"{BASE_URLS['production']}/production/tenant/{TEST_TENANT_ID}/deletion-preview",
"suppliers": f"{BASE_URLS['suppliers']}/suppliers/tenant/{TEST_TENANT_ID}/deletion-preview",
"pos": f"{BASE_URLS['pos']}/pos/tenant/{TEST_TENANT_ID}/deletion-preview",
"external": f"{BASE_URLS['external']}/external/tenant/{TEST_TENANT_ID}/deletion-preview",
"forecasting": f"{BASE_URLS['forecasting']}/forecasting/tenant/{TEST_TENANT_ID}/deletion-preview",
"training": f"{BASE_URLS['training']}/training/tenant/{TEST_TENANT_ID}/deletion-preview",
"alert_processor": f"{BASE_URLS['alert_processor']}/alerts/tenant/{TEST_TENANT_ID}/deletion-preview",
"notification": f"{BASE_URLS['notification']}/notifications/tenant/{TEST_TENANT_ID}/deletion-preview",
}
# Make all requests in parallel
tasks = [
http_client.get(url, headers=headers)
for url in preview_urls.values()
]
responses = await asyncio.gather(*tasks, return_exceptions=True)
# Analyze results
results = {}
for service, response in zip(preview_urls.keys(), responses):
if isinstance(response, Exception):
results[service] = {"status": "error", "error": str(response)}
else:
results[service] = {
"status": "success" if response.status_code == 200 else "failed",
"status_code": response.status_code,
"data": response.json() if response.status_code == 200 else None
}
# Log summary
successful = sum(1 for r in results.values() if r["status"] == "success")
logger.info("parallel_preview_test.completed",
total_services=len(results),
successful=successful,
failed=len(results) - successful,
results=results)
# Assert at least 10 services responded successfully
assert successful >= 10, f"Only {successful}/12 services responded successfully"
return results
class TestOrchestratedDeletion:
"""Test the orchestrator's ability to delete across all services"""
@pytest.mark.asyncio
async def test_orchestrator_preview_all_services(self, http_client, service_token):
"""Test orchestrator can preview deletion across all services"""
from services.auth.app.services.deletion_orchestrator import DeletionOrchestrator
orchestrator = DeletionOrchestrator(auth_token=service_token)
# Get preview from all services
previews = {}
for service_name, endpoint_template in orchestrator.SERVICE_DELETION_ENDPOINTS.items():
url = endpoint_template.format(tenant_id=TEST_TENANT_ID) + "/deletion-preview"
try:
response = await http_client.get(
url,
headers={"Authorization": f"Bearer {service_token}"},
timeout=10.0
)
if response.status_code == 200:
previews[service_name] = response.json()
else:
previews[service_name] = {"error": f"HTTP {response.status_code}"}
except Exception as e:
previews[service_name] = {"error": str(e)}
logger.info("orchestrator.preview_test.completed",
services_count=len(previews),
previews=previews)
# Calculate total records across all services
total_records = 0
for service, data in previews.items():
if "total_records" in data:
total_records += data["total_records"]
logger.info("orchestrator.preview_test.total_records",
total_records=total_records,
services=len(previews))
assert len(previews) == 12, "Should have previews from all 12 services"
assert total_records >= 0, "Should have valid record counts"
class TestErrorHandling:
"""Test error handling and edge cases"""
@pytest.mark.asyncio
async def test_invalid_tenant_id(self, http_client, service_token):
"""Test deletion with invalid tenant ID"""
invalid_tenant_id = str(uuid4())
url = f"{BASE_URLS['orders']}/orders/tenant/{invalid_tenant_id}/deletion-preview"
headers = {"Authorization": f"Bearer {service_token}"}
response = await http_client.get(url, headers=headers)
# Should succeed with zero counts for non-existent tenant
assert response.status_code == 200
data = response.json()
assert data["total_records"] == 0
@pytest.mark.asyncio
async def test_unauthorized_access(self, http_client):
"""Test deletion without authentication"""
url = f"{BASE_URLS['orders']}/orders/tenant/{TEST_TENANT_ID}/deletion-preview"
response = await http_client.get(url)
# Should be unauthorized
assert response.status_code in [401, 403]
@pytest.mark.asyncio
async def test_service_timeout_handling(self, http_client, service_token):
"""Test handling of service timeouts"""
# Use a very short timeout to simulate timeout
async with httpx.AsyncClient(verify=False, timeout=0.001) as short_client:
url = f"{BASE_URLS['orders']}/orders/tenant/{TEST_TENANT_ID}/deletion-preview"
headers = {"Authorization": f"Bearer {service_token}"}
with pytest.raises((httpx.TimeoutException, httpx.ConnectTimeout)):
await short_client.get(url, headers=headers)
class TestDataIntegrity:
"""Test data integrity after deletion"""
@pytest.mark.asyncio
async def test_cascade_deletion_order(self, http_client, service_token):
"""Test that child records are deleted before parents"""
# This would require creating test data and verifying deletion order
# For now, we verify the preview shows proper counts
url = f"{BASE_URLS['orders']}/orders/tenant/{TEST_TENANT_ID}/deletion-preview"
headers = {"Authorization": f"Bearer {service_token}"}
response = await http_client.get(url, headers=headers)
assert response.status_code == 200
data = response.json()
preview = data.get("preview", {})
# Verify we have counts for both parents and children
# In orders service: order_items (child) and orders (parent)
if preview.get("order_items", 0) > 0:
assert preview.get("orders", 0) > 0, "If items exist, orders should exist"
logger.info("cascade_deletion_test.passed", preview=preview)
class TestPerformance:
"""Test performance of deletion operations"""
@pytest.mark.asyncio
async def test_parallel_deletion_performance(self, http_client, service_token):
"""Test performance of parallel deletion across services"""
import time
headers = {"Authorization": f"Bearer {service_token}"}
preview_urls = [
f"{BASE_URLS['orders']}/orders/tenant/{TEST_TENANT_ID}/deletion-preview",
f"{BASE_URLS['inventory']}/inventory/tenant/{TEST_TENANT_ID}/deletion-preview",
f"{BASE_URLS['forecasting']}/forecasting/tenant/{TEST_TENANT_ID}/deletion-preview",
f"{BASE_URLS['training']}/training/tenant/{TEST_TENANT_ID}/deletion-preview",
]
# Test parallel execution
start_time = time.time()
tasks = [http_client.get(url, headers=headers) for url in preview_urls]
responses = await asyncio.gather(*tasks, return_exceptions=True)
parallel_duration = time.time() - start_time
# Test sequential execution
start_time = time.time()
for url in preview_urls:
await http_client.get(url, headers=headers)
sequential_duration = time.time() - start_time
logger.info("performance_test.completed",
parallel_duration=parallel_duration,
sequential_duration=sequential_duration,
speedup=sequential_duration / parallel_duration if parallel_duration > 0 else 0)
# Parallel should be faster
assert parallel_duration < sequential_duration, "Parallel execution should be faster"
# Helper function to run all tests
async def run_all_tests():
"""Run all integration tests"""
import sys
logger.info("integration_tests.starting")
# Run pytest programmatically
exit_code = pytest.main([
__file__,
"-v",
"-s",
"--tb=short",
"--log-cli-level=INFO"
])
logger.info("integration_tests.completed", exit_code=exit_code)
sys.exit(exit_code)
if __name__ == "__main__":
asyncio.run(run_all_tests())