Add POI feature and imporve the overall backend implementation
This commit is contained in:
@@ -9,7 +9,11 @@ from enum import Enum
|
||||
import structlog
|
||||
import httpx
|
||||
import asyncio
|
||||
from uuid import uuid4
|
||||
from uuid import uuid4, UUID
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.models.deletion_job import DeletionJob as DeletionJobModel
|
||||
from app.repositories.deletion_job_repository import DeletionJobRepository
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
@@ -136,15 +140,114 @@ class DeletionOrchestrator:
|
||||
"notification": "http://notification-service:8000/api/v1/notifications/tenant/{tenant_id}",
|
||||
}
|
||||
|
||||
def __init__(self, auth_token: Optional[str] = None):
|
||||
def __init__(self, auth_token: Optional[str] = None, db: Optional[AsyncSession] = None):
|
||||
"""
|
||||
Initialize orchestrator
|
||||
|
||||
Args:
|
||||
auth_token: JWT token for service-to-service authentication
|
||||
db: Database session for persistence (optional for backward compatibility)
|
||||
"""
|
||||
self.auth_token = auth_token
|
||||
self.jobs: Dict[str, DeletionJob] = {} # In-memory job storage (TODO: move to database)
|
||||
self.db = db
|
||||
self.jobs: Dict[str, DeletionJob] = {} # In-memory cache for active jobs
|
||||
|
||||
async def _save_job_to_db(self, job: DeletionJob) -> None:
|
||||
"""Save or update job to database"""
|
||||
if not self.db:
|
||||
return
|
||||
|
||||
try:
|
||||
repository = DeletionJobRepository(self.db)
|
||||
|
||||
# Check if job exists
|
||||
existing = await repository.get_by_job_id(job.job_id)
|
||||
|
||||
if existing:
|
||||
# Update existing job
|
||||
existing.status = job.status.value
|
||||
existing.service_results = {
|
||||
name: {
|
||||
"status": result.status.value,
|
||||
"deleted_counts": result.deleted_counts,
|
||||
"total_deleted": result.total_deleted,
|
||||
"errors": result.errors,
|
||||
"duration_seconds": result.duration_seconds
|
||||
}
|
||||
for name, result in job.service_results.items()
|
||||
}
|
||||
existing.total_items_deleted = job.total_items_deleted
|
||||
existing.services_completed = job.services_completed
|
||||
existing.services_failed = job.services_failed
|
||||
existing.error_log = job.error_log
|
||||
existing.completed_at = datetime.fromisoformat(job.completed_at) if job.completed_at else None
|
||||
|
||||
await repository.update(existing)
|
||||
else:
|
||||
# Create new job
|
||||
db_job = DeletionJobModel(
|
||||
job_id=job.job_id,
|
||||
tenant_id=UUID(job.tenant_id),
|
||||
tenant_name=job.tenant_name,
|
||||
initiated_by=UUID(job.initiated_by) if job.initiated_by else None,
|
||||
status=job.status.value,
|
||||
service_results={},
|
||||
total_items_deleted=0,
|
||||
services_completed=0,
|
||||
services_failed=0,
|
||||
error_log=job.error_log,
|
||||
started_at=datetime.fromisoformat(job.started_at) if job.started_at else None,
|
||||
completed_at=None
|
||||
)
|
||||
await repository.create(db_job)
|
||||
|
||||
await self.db.commit()
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to save job to database", error=str(e), job_id=job.job_id)
|
||||
# Don't fail the job if database save fails
|
||||
pass
|
||||
|
||||
async def _load_job_from_db(self, job_id: str) -> Optional[DeletionJob]:
|
||||
"""Load job from database"""
|
||||
if not self.db:
|
||||
return None
|
||||
|
||||
try:
|
||||
repository = DeletionJobRepository(self.db)
|
||||
db_job = await repository.get_by_job_id(job_id)
|
||||
|
||||
if not db_job:
|
||||
return None
|
||||
|
||||
# Convert database model to dataclass
|
||||
job = DeletionJob(
|
||||
job_id=db_job.job_id,
|
||||
tenant_id=str(db_job.tenant_id),
|
||||
tenant_name=db_job.tenant_name,
|
||||
initiated_by=str(db_job.initiated_by) if db_job.initiated_by else None,
|
||||
status=DeletionStatus(db_job.status),
|
||||
started_at=db_job.started_at.isoformat() if db_job.started_at else None,
|
||||
completed_at=db_job.completed_at.isoformat() if db_job.completed_at else None,
|
||||
error_log=db_job.error_log or []
|
||||
)
|
||||
|
||||
# Reconstruct service results
|
||||
if db_job.service_results:
|
||||
for service_name, result_data in db_job.service_results.items():
|
||||
job.service_results[service_name] = ServiceDeletionResult(
|
||||
service_name=service_name,
|
||||
status=ServiceDeletionStatus(result_data["status"]),
|
||||
deleted_counts=result_data.get("deleted_counts", {}),
|
||||
errors=result_data.get("errors", []),
|
||||
duration_seconds=result_data.get("duration_seconds", 0.0)
|
||||
)
|
||||
|
||||
return job
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to load job from database", error=str(e), job_id=job_id)
|
||||
return None
|
||||
|
||||
async def orchestrate_tenant_deletion(
|
||||
self,
|
||||
@@ -176,6 +279,9 @@ class DeletionOrchestrator:
|
||||
|
||||
self.jobs[job.job_id] = job
|
||||
|
||||
# Save initial job to database
|
||||
await self._save_job_to_db(job)
|
||||
|
||||
logger.info("Starting tenant deletion orchestration",
|
||||
job_id=job.job_id,
|
||||
tenant_id=tenant_id,
|
||||
@@ -214,6 +320,9 @@ class DeletionOrchestrator:
|
||||
|
||||
job.completed_at = datetime.now(timezone.utc).isoformat()
|
||||
|
||||
# Save final state to database
|
||||
await self._save_job_to_db(job)
|
||||
|
||||
except Exception as e:
|
||||
job.status = DeletionStatus.FAILED
|
||||
job.error_log.append(f"Fatal orchestration error: {str(e)}")
|
||||
@@ -224,6 +333,9 @@ class DeletionOrchestrator:
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
|
||||
# Save error state to database
|
||||
await self._save_job_to_db(job)
|
||||
|
||||
return job
|
||||
|
||||
async def _delete_from_all_services(
|
||||
@@ -385,7 +497,7 @@ class DeletionOrchestrator:
|
||||
duration_seconds=duration
|
||||
)
|
||||
|
||||
def get_job_status(self, job_id: str) -> Optional[Dict[str, Any]]:
|
||||
async def get_job_status(self, job_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get status of a deletion job
|
||||
|
||||
@@ -395,10 +507,20 @@ class DeletionOrchestrator:
|
||||
Returns:
|
||||
Job status dict or None if not found
|
||||
"""
|
||||
# Try in-memory cache first
|
||||
job = self.jobs.get(job_id)
|
||||
return job.to_dict() if job else None
|
||||
if job:
|
||||
return job.to_dict()
|
||||
|
||||
def list_jobs(
|
||||
# Try loading from database
|
||||
job = await self._load_job_from_db(job_id)
|
||||
if job:
|
||||
self.jobs[job_id] = job # Cache it
|
||||
return job.to_dict()
|
||||
|
||||
return None
|
||||
|
||||
async def list_jobs(
|
||||
self,
|
||||
tenant_id: Optional[str] = None,
|
||||
status: Optional[DeletionStatus] = None,
|
||||
@@ -415,6 +537,50 @@ class DeletionOrchestrator:
|
||||
Returns:
|
||||
List of job dicts
|
||||
"""
|
||||
# If database is available, load from database
|
||||
if self.db:
|
||||
try:
|
||||
repository = DeletionJobRepository(self.db)
|
||||
|
||||
if tenant_id:
|
||||
db_jobs = await repository.list_by_tenant(
|
||||
UUID(tenant_id),
|
||||
status=status.value if status else None,
|
||||
limit=limit
|
||||
)
|
||||
else:
|
||||
db_jobs = await repository.list_all(
|
||||
status=status.value if status else None,
|
||||
limit=limit
|
||||
)
|
||||
|
||||
# Convert to job dicts
|
||||
jobs = []
|
||||
for db_job in db_jobs:
|
||||
job_dict = {
|
||||
"job_id": db_job.job_id,
|
||||
"tenant_id": str(db_job.tenant_id),
|
||||
"tenant_name": db_job.tenant_name,
|
||||
"initiated_by": str(db_job.initiated_by) if db_job.initiated_by else None,
|
||||
"status": db_job.status,
|
||||
"total_items_deleted": db_job.total_items_deleted,
|
||||
"services_completed": db_job.services_completed,
|
||||
"services_failed": db_job.services_failed,
|
||||
"service_results": db_job.service_results or {},
|
||||
"started_at": db_job.started_at.isoformat() if db_job.started_at else None,
|
||||
"completed_at": db_job.completed_at.isoformat() if db_job.completed_at else None,
|
||||
"error_log": db_job.error_log or []
|
||||
}
|
||||
jobs.append(job_dict)
|
||||
|
||||
return jobs
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to list jobs from database", error=str(e))
|
||||
# Fall back to in-memory cache
|
||||
pass
|
||||
|
||||
# Fall back to in-memory cache
|
||||
jobs = list(self.jobs.values())
|
||||
|
||||
# Apply filters
|
||||
|
||||
Reference in New Issue
Block a user