Add minio support and forntend analitycs

This commit is contained in:
Urtzi Alfaro
2026-01-17 22:42:40 +01:00
parent fbc670ddb3
commit 3c4b5c2a06
53 changed files with 3485 additions and 437 deletions

View File

@@ -96,48 +96,48 @@ def check_system_resources() -> Dict[str, Any]:
def check_model_storage() -> Dict[str, Any]:
"""Check model storage health"""
"""Check MinIO model storage health"""
try:
storage_path = settings.MODEL_STORAGE_PATH
from shared.clients.minio_client import minio_client
if not os.path.exists(storage_path):
# Check MinIO connectivity
if not minio_client.health_check():
return {
"status": "warning",
"message": f"Model storage path does not exist: {storage_path}"
"status": "unhealthy",
"message": "MinIO service is not reachable",
"storage_type": "minio"
}
# Check if writable
test_file = os.path.join(storage_path, ".health_check")
try:
with open(test_file, 'w') as f:
f.write("test")
os.remove(test_file)
writable = True
except Exception:
writable = False
bucket_name = settings.MINIO_MODEL_BUCKET
# Count model files
model_files = 0
total_size = 0
for root, dirs, files in os.walk(storage_path):
for file in files:
if file.endswith('.pkl'):
model_files += 1
file_path = os.path.join(root, file)
total_size += os.path.getsize(file_path)
# Check if bucket exists
bucket_exists = minio_client.bucket_exists(bucket_name)
if not bucket_exists:
return {
"status": "warning",
"message": f"MinIO bucket does not exist: {bucket_name}",
"storage_type": "minio"
}
# Count model files in MinIO
model_objects = minio_client.list_objects(bucket_name, prefix="models/")
model_files = [obj for obj in model_objects if obj.endswith('.pkl')]
return {
"status": "healthy" if writable else "degraded",
"path": storage_path,
"writable": writable,
"model_files": model_files,
"total_size_mb": round(total_size / 1024 / 1024, 2)
"status": "healthy",
"storage_type": "minio",
"endpoint": settings.MINIO_ENDPOINT,
"bucket": bucket_name,
"use_ssl": settings.MINIO_USE_SSL,
"model_files": len(model_files),
"bucket_exists": bucket_exists
}
except Exception as e:
logger.error(f"Model storage check failed: {e}")
logger.error(f"MinIO storage check failed: {e}")
return {
"status": "error",
"storage_type": "minio",
"error": str(e)
}

View File

@@ -14,7 +14,6 @@ from app.services.training_service import EnhancedTrainingService
from datetime import datetime, timezone
from sqlalchemy import select, delete, func
import uuid
import shutil
from shared.auth.decorators import (
get_current_user_dep,
@@ -304,10 +303,9 @@ async def delete_tenant_models_complete(
"jobs_cancelled": 0,
"models_deleted": 0,
"artifacts_deleted": 0,
"artifacts_files_deleted": 0,
"minio_objects_deleted": 0,
"training_logs_deleted": 0,
"performance_metrics_deleted": 0,
"storage_freed_bytes": 0,
"errors": []
}
@@ -336,51 +334,35 @@ async def delete_tenant_models_complete(
deletion_stats["errors"].append(error_msg)
logger.error(error_msg)
# Step 2: Delete model artifact files from storage
# Step 2: Delete model artifact files from MinIO storage
try:
artifacts_query = select(ModelArtifact).where(
ModelArtifact.tenant_id == tenant_uuid
)
artifacts_result = await db.execute(artifacts_query)
artifacts = artifacts_result.scalars().all()
storage_freed = 0
from shared.clients.minio_client import minio_client
bucket_name = settings.MINIO_MODEL_BUCKET
prefix = f"models/{tenant_id}/"
# List all objects for this tenant
objects_to_delete = minio_client.list_objects(bucket_name, prefix=prefix)
files_deleted = 0
for artifact in artifacts:
for obj_name in objects_to_delete:
try:
file_path = Path(artifact.file_path)
if file_path.exists():
file_size = file_path.stat().st_size
file_path.unlink() # Delete file
storage_freed += file_size
files_deleted += 1
logger.debug("Deleted artifact file",
file_path=str(file_path),
size_bytes=file_size)
# Also try to delete parent directories if empty
try:
if file_path.parent.exists() and not any(file_path.parent.iterdir()):
file_path.parent.rmdir()
except:
pass # Ignore errors cleaning up directories
minio_client.delete_object(bucket_name, obj_name)
files_deleted += 1
logger.debug("Deleted MinIO object", object_name=obj_name)
except Exception as e:
error_msg = f"Error deleting artifact file {artifact.file_path}: {str(e)}"
error_msg = f"Error deleting MinIO object {obj_name}: {str(e)}"
deletion_stats["errors"].append(error_msg)
logger.warning(error_msg)
deletion_stats["artifacts_files_deleted"] = files_deleted
deletion_stats["storage_freed_bytes"] = storage_freed
logger.info("Deleted artifact files",
deletion_stats["minio_objects_deleted"] = files_deleted
logger.info("Deleted MinIO objects",
tenant_id=tenant_id,
files_deleted=files_deleted,
storage_freed_mb=storage_freed / (1024 * 1024))
files_deleted=files_deleted)
except Exception as e:
error_msg = f"Error processing artifact files: {str(e)}"
error_msg = f"Error processing MinIO objects: {str(e)}"
deletion_stats["errors"].append(error_msg)
logger.error(error_msg)
@@ -463,19 +445,7 @@ async def delete_tenant_models_complete(
detail=error_msg
)
# Step 4: Clean up tenant model directory
try:
tenant_model_dir = Path(settings.MODEL_STORAGE_PATH) / tenant_id
if tenant_model_dir.exists():
shutil.rmtree(tenant_model_dir)
logger.info("Deleted tenant model directory",
directory=str(tenant_model_dir))
except Exception as e:
error_msg = f"Error deleting model directory: {str(e)}"
deletion_stats["errors"].append(error_msg)
logger.warning(error_msg)
# Models deleted successfully
# Step 4: Models deleted successfully (MinIO cleanup already done in Step 2)
return {
"success": True,
"message": f"All training data for tenant {tenant_id} deleted successfully",