Add POI feature and imporve the overall backend implementation
This commit is contained in:
@@ -17,7 +17,7 @@ The **Auth Service** is the security foundation of Bakery-IA, providing robust J
|
||||
|
||||
### User Management
|
||||
- **User Profiles** - Complete user information management
|
||||
- **User Onboarding** - Multi-step onboarding progress tracking
|
||||
- **User Onboarding** - Multi-step onboarding progress tracking with 15 steps including POI detection
|
||||
- **Profile Updates** - Self-service profile editing
|
||||
- **Account Deletion** - GDPR-compliant account removal
|
||||
- **Login Attempts Tracking** - Brute force protection
|
||||
@@ -100,9 +100,11 @@ The **Auth Service** is the security foundation of Bakery-IA, providing robust J
|
||||
- `DELETE /api/v1/auth/account` - Delete account (GDPR)
|
||||
|
||||
### User Onboarding
|
||||
- `GET /api/v1/auth/onboarding/progress` - Get onboarding status
|
||||
- `PUT /api/v1/auth/onboarding/step/{step}` - Complete onboarding step
|
||||
- `POST /api/v1/auth/onboarding/complete` - Mark onboarding complete
|
||||
- `GET /api/v1/auth/me/onboarding/progress` - Get onboarding status
|
||||
- `PUT /api/v1/auth/me/onboarding/step` - Update/complete onboarding step
|
||||
- `POST /api/v1/auth/me/onboarding/complete` - Mark onboarding complete
|
||||
- `GET /api/v1/auth/me/onboarding/next-step` - Get next incomplete step
|
||||
- `GET /api/v1/auth/me/onboarding/can-access/{step_name}` - Check if step is accessible
|
||||
|
||||
### GDPR Compliance
|
||||
- `GET /api/v1/auth/gdpr/consents` - Get user consents
|
||||
@@ -185,16 +187,36 @@ CREATE TABLE user_onboarding_progress (
|
||||
CREATE TABLE user_onboarding_summary (
|
||||
id UUID PRIMARY KEY,
|
||||
user_id UUID REFERENCES users(id) ON DELETE CASCADE,
|
||||
total_steps INTEGER NOT NULL,
|
||||
completed_steps INTEGER DEFAULT 0,
|
||||
is_complete BOOLEAN DEFAULT FALSE,
|
||||
completed_at TIMESTAMP,
|
||||
current_step VARCHAR(50) NOT NULL DEFAULT 'user_registered',
|
||||
next_step VARCHAR(50),
|
||||
completion_percentage VARCHAR(50) DEFAULT '0.0',
|
||||
fully_completed BOOLEAN DEFAULT FALSE,
|
||||
steps_completed_count VARCHAR(50) DEFAULT '0', -- Format: "3/15"
|
||||
last_activity_at TIMESTAMP DEFAULT NOW(),
|
||||
created_at TIMESTAMP DEFAULT NOW(),
|
||||
updated_at TIMESTAMP DEFAULT NOW(),
|
||||
UNIQUE(user_id)
|
||||
);
|
||||
```
|
||||
|
||||
**Onboarding Steps (15 total):**
|
||||
1. `user_registered` - User account created (auto-completed)
|
||||
2. `bakery-type-selection` - Choose bakery type
|
||||
3. `setup` - Basic bakery setup and tenant creation
|
||||
4. `poi-detection` - **POI Detection (Location Context)** - Automatic detection of nearby Points of Interest
|
||||
5. `upload-sales-data` - File upload, validation, AI classification
|
||||
6. `inventory-review` - Review AI-detected products
|
||||
7. `initial-stock-entry` - Capture initial stock levels
|
||||
8. `product-categorization` - Advanced categorization (optional)
|
||||
9. `suppliers-setup` - Suppliers configuration
|
||||
10. `recipes-setup` - Production recipes (optional)
|
||||
11. `production-processes` - Finishing processes (optional)
|
||||
12. `quality-setup` - Quality standards (optional)
|
||||
13. `team-setup` - Team members (optional)
|
||||
14. `ml-training` - AI model training (requires POI detection)
|
||||
15. `setup-review` - Review all configuration
|
||||
16. `completion` - Onboarding completed
|
||||
|
||||
**login_attempts**
|
||||
```sql
|
||||
CREATE TABLE login_attempts (
|
||||
|
||||
@@ -49,15 +49,18 @@ ONBOARDING_STEPS = [
|
||||
# Phase 2: Core Setup
|
||||
"setup", # Basic bakery setup and tenant creation
|
||||
|
||||
# Phase 2a: AI-Assisted Inventory Setup (REFACTORED - split into 3 focused steps)
|
||||
# Phase 2a: POI Detection (Location Context)
|
||||
"poi-detection", # Detect nearby POIs for location-based ML features
|
||||
|
||||
# Phase 2b: AI-Assisted Inventory Setup (REFACTORED - split into 3 focused steps)
|
||||
"upload-sales-data", # File upload, validation, and AI classification
|
||||
"inventory-review", # Review and confirm AI-detected products with type selection
|
||||
"initial-stock-entry", # Capture initial stock levels
|
||||
|
||||
# Phase 2b: Product Categorization (optional advanced categorization)
|
||||
# Phase 2c: Product Categorization (optional advanced categorization)
|
||||
"product-categorization", # Advanced categorization (may be deprecated)
|
||||
|
||||
# Phase 2c: Suppliers (shared by all paths)
|
||||
# Phase 2d: Suppliers (shared by all paths)
|
||||
"suppliers-setup", # Suppliers configuration
|
||||
|
||||
# Phase 3: Advanced Configuration (all optional)
|
||||
@@ -81,6 +84,9 @@ STEP_DEPENDENCIES = {
|
||||
# Core setup - no longer depends on data-source-choice (removed)
|
||||
"setup": ["user_registered", "bakery-type-selection"],
|
||||
|
||||
# POI Detection - requires tenant creation (setup)
|
||||
"poi-detection": ["user_registered", "setup"],
|
||||
|
||||
# AI-Assisted Inventory Setup - REFACTORED into 3 sequential steps
|
||||
"upload-sales-data": ["user_registered", "setup"],
|
||||
"inventory-review": ["user_registered", "setup", "upload-sales-data"],
|
||||
@@ -98,8 +104,8 @@ STEP_DEPENDENCIES = {
|
||||
"quality-setup": ["user_registered", "setup"],
|
||||
"team-setup": ["user_registered", "setup"],
|
||||
|
||||
# ML Training - requires AI path completion (upload-sales-data with inventory review)
|
||||
"ml-training": ["user_registered", "setup", "upload-sales-data", "inventory-review"],
|
||||
# ML Training - requires AI path completion AND POI detection for location features
|
||||
"ml-training": ["user_registered", "setup", "poi-detection", "upload-sales-data", "inventory-review"],
|
||||
|
||||
# Review and completion
|
||||
"setup-review": ["user_registered", "setup"],
|
||||
|
||||
@@ -14,6 +14,7 @@ from .users import User
|
||||
from .tokens import RefreshToken, LoginAttempt
|
||||
from .onboarding import UserOnboardingProgress, UserOnboardingSummary
|
||||
from .consent import UserConsent, ConsentHistory
|
||||
from .deletion_job import DeletionJob
|
||||
|
||||
__all__ = [
|
||||
'User',
|
||||
@@ -23,5 +24,6 @@ __all__ = [
|
||||
'UserOnboardingSummary',
|
||||
'UserConsent',
|
||||
'ConsentHistory',
|
||||
'DeletionJob',
|
||||
"AuditLog",
|
||||
]
|
||||
64
services/auth/app/models/deletion_job.py
Normal file
64
services/auth/app/models/deletion_job.py
Normal file
@@ -0,0 +1,64 @@
|
||||
"""
|
||||
Deletion Job Model
|
||||
Tracks tenant deletion jobs for persistence and recovery
|
||||
"""
|
||||
|
||||
from sqlalchemy import Column, String, DateTime, Text, JSON, Index, Integer
|
||||
from sqlalchemy.dialects.postgresql import UUID
|
||||
from sqlalchemy.sql import func
|
||||
import uuid
|
||||
|
||||
from shared.database.base import Base
|
||||
|
||||
|
||||
class DeletionJob(Base):
|
||||
"""
|
||||
Persistent storage for tenant deletion jobs
|
||||
Enables job recovery and tracking across service restarts
|
||||
"""
|
||||
__tablename__ = "deletion_jobs"
|
||||
|
||||
# Primary identifiers
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4, index=True)
|
||||
job_id = Column(String(100), nullable=False, unique=True, index=True) # External job ID
|
||||
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
|
||||
# Job Metadata
|
||||
tenant_name = Column(String(255), nullable=True)
|
||||
initiated_by = Column(UUID(as_uuid=True), nullable=True) # User ID who started deletion
|
||||
|
||||
# Job Status
|
||||
status = Column(String(50), nullable=False, default="pending", index=True) # pending, in_progress, completed, failed, rolled_back
|
||||
|
||||
# Service Results
|
||||
service_results = Column(JSON, nullable=True) # Dict of service_name -> result details
|
||||
|
||||
# Progress Tracking
|
||||
total_items_deleted = Column(Integer, default=0, nullable=False)
|
||||
services_completed = Column(Integer, default=0, nullable=False)
|
||||
services_failed = Column(Integer, default=0, nullable=False)
|
||||
|
||||
# Error Tracking
|
||||
error_log = Column(JSON, nullable=True) # Array of error messages
|
||||
|
||||
# Timestamps
|
||||
started_at = Column(DateTime(timezone=True), nullable=True, index=True)
|
||||
completed_at = Column(DateTime(timezone=True), nullable=True)
|
||||
created_at = Column(DateTime(timezone=True), server_default=func.now(), nullable=False)
|
||||
updated_at = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now(), nullable=False)
|
||||
|
||||
# Additional Context
|
||||
notes = Column(Text, nullable=True)
|
||||
extra_metadata = Column(JSON, nullable=True) # Additional job-specific data
|
||||
|
||||
# Indexes for performance
|
||||
__table_args__ = (
|
||||
Index('idx_deletion_job_id', 'job_id'),
|
||||
Index('idx_deletion_tenant_id', 'tenant_id'),
|
||||
Index('idx_deletion_status', 'status'),
|
||||
Index('idx_deletion_started_at', 'started_at'),
|
||||
Index('idx_deletion_tenant_status', 'tenant_id', 'status'),
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<DeletionJob(job_id='{self.job_id}', tenant_id={self.tenant_id}, status='{self.status}')>"
|
||||
110
services/auth/app/repositories/deletion_job_repository.py
Normal file
110
services/auth/app/repositories/deletion_job_repository.py
Normal file
@@ -0,0 +1,110 @@
|
||||
"""
|
||||
Deletion Job Repository
|
||||
Database operations for deletion job persistence
|
||||
"""
|
||||
|
||||
from typing import List, Optional
|
||||
from uuid import UUID
|
||||
from sqlalchemy import select, and_, desc
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
import structlog
|
||||
|
||||
from app.models.deletion_job import DeletionJob
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class DeletionJobRepository:
|
||||
"""Repository for deletion job database operations"""
|
||||
|
||||
def __init__(self, session: AsyncSession):
|
||||
self.session = session
|
||||
|
||||
async def create(self, deletion_job: DeletionJob) -> DeletionJob:
|
||||
"""Create a new deletion job record"""
|
||||
try:
|
||||
self.session.add(deletion_job)
|
||||
await self.session.flush()
|
||||
await self.session.refresh(deletion_job)
|
||||
return deletion_job
|
||||
except Exception as e:
|
||||
logger.error("Failed to create deletion job", error=str(e))
|
||||
raise
|
||||
|
||||
async def get_by_job_id(self, job_id: str) -> Optional[DeletionJob]:
|
||||
"""Get deletion job by job_id"""
|
||||
try:
|
||||
query = select(DeletionJob).where(DeletionJob.job_id == job_id)
|
||||
result = await self.session.execute(query)
|
||||
return result.scalar_one_or_none()
|
||||
except Exception as e:
|
||||
logger.error("Failed to get deletion job", error=str(e), job_id=job_id)
|
||||
raise
|
||||
|
||||
async def get_by_id(self, id: UUID) -> Optional[DeletionJob]:
|
||||
"""Get deletion job by database ID"""
|
||||
try:
|
||||
return await self.session.get(DeletionJob, id)
|
||||
except Exception as e:
|
||||
logger.error("Failed to get deletion job by ID", error=str(e), id=str(id))
|
||||
raise
|
||||
|
||||
async def list_by_tenant(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
status: Optional[str] = None,
|
||||
limit: int = 100
|
||||
) -> List[DeletionJob]:
|
||||
"""List deletion jobs for a tenant"""
|
||||
try:
|
||||
query = select(DeletionJob).where(DeletionJob.tenant_id == tenant_id)
|
||||
|
||||
if status:
|
||||
query = query.where(DeletionJob.status == status)
|
||||
|
||||
query = query.order_by(desc(DeletionJob.started_at)).limit(limit)
|
||||
|
||||
result = await self.session.execute(query)
|
||||
return list(result.scalars().all())
|
||||
except Exception as e:
|
||||
logger.error("Failed to list deletion jobs", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def list_all(
|
||||
self,
|
||||
status: Optional[str] = None,
|
||||
limit: int = 100
|
||||
) -> List[DeletionJob]:
|
||||
"""List all deletion jobs with optional status filter"""
|
||||
try:
|
||||
query = select(DeletionJob)
|
||||
|
||||
if status:
|
||||
query = query.where(DeletionJob.status == status)
|
||||
|
||||
query = query.order_by(desc(DeletionJob.started_at)).limit(limit)
|
||||
|
||||
result = await self.session.execute(query)
|
||||
return list(result.scalars().all())
|
||||
except Exception as e:
|
||||
logger.error("Failed to list all deletion jobs", error=str(e))
|
||||
raise
|
||||
|
||||
async def update(self, deletion_job: DeletionJob) -> DeletionJob:
|
||||
"""Update a deletion job record"""
|
||||
try:
|
||||
await self.session.flush()
|
||||
await self.session.refresh(deletion_job)
|
||||
return deletion_job
|
||||
except Exception as e:
|
||||
logger.error("Failed to update deletion job", error=str(e))
|
||||
raise
|
||||
|
||||
async def delete(self, deletion_job: DeletionJob) -> None:
|
||||
"""Delete a deletion job record"""
|
||||
try:
|
||||
await self.session.delete(deletion_job)
|
||||
await self.session.flush()
|
||||
except Exception as e:
|
||||
logger.error("Failed to delete deletion job", error=str(e))
|
||||
raise
|
||||
@@ -9,7 +9,11 @@ from enum import Enum
|
||||
import structlog
|
||||
import httpx
|
||||
import asyncio
|
||||
from uuid import uuid4
|
||||
from uuid import uuid4, UUID
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.models.deletion_job import DeletionJob as DeletionJobModel
|
||||
from app.repositories.deletion_job_repository import DeletionJobRepository
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
@@ -136,15 +140,114 @@ class DeletionOrchestrator:
|
||||
"notification": "http://notification-service:8000/api/v1/notifications/tenant/{tenant_id}",
|
||||
}
|
||||
|
||||
def __init__(self, auth_token: Optional[str] = None):
|
||||
def __init__(self, auth_token: Optional[str] = None, db: Optional[AsyncSession] = None):
|
||||
"""
|
||||
Initialize orchestrator
|
||||
|
||||
Args:
|
||||
auth_token: JWT token for service-to-service authentication
|
||||
db: Database session for persistence (optional for backward compatibility)
|
||||
"""
|
||||
self.auth_token = auth_token
|
||||
self.jobs: Dict[str, DeletionJob] = {} # In-memory job storage (TODO: move to database)
|
||||
self.db = db
|
||||
self.jobs: Dict[str, DeletionJob] = {} # In-memory cache for active jobs
|
||||
|
||||
async def _save_job_to_db(self, job: DeletionJob) -> None:
|
||||
"""Save or update job to database"""
|
||||
if not self.db:
|
||||
return
|
||||
|
||||
try:
|
||||
repository = DeletionJobRepository(self.db)
|
||||
|
||||
# Check if job exists
|
||||
existing = await repository.get_by_job_id(job.job_id)
|
||||
|
||||
if existing:
|
||||
# Update existing job
|
||||
existing.status = job.status.value
|
||||
existing.service_results = {
|
||||
name: {
|
||||
"status": result.status.value,
|
||||
"deleted_counts": result.deleted_counts,
|
||||
"total_deleted": result.total_deleted,
|
||||
"errors": result.errors,
|
||||
"duration_seconds": result.duration_seconds
|
||||
}
|
||||
for name, result in job.service_results.items()
|
||||
}
|
||||
existing.total_items_deleted = job.total_items_deleted
|
||||
existing.services_completed = job.services_completed
|
||||
existing.services_failed = job.services_failed
|
||||
existing.error_log = job.error_log
|
||||
existing.completed_at = datetime.fromisoformat(job.completed_at) if job.completed_at else None
|
||||
|
||||
await repository.update(existing)
|
||||
else:
|
||||
# Create new job
|
||||
db_job = DeletionJobModel(
|
||||
job_id=job.job_id,
|
||||
tenant_id=UUID(job.tenant_id),
|
||||
tenant_name=job.tenant_name,
|
||||
initiated_by=UUID(job.initiated_by) if job.initiated_by else None,
|
||||
status=job.status.value,
|
||||
service_results={},
|
||||
total_items_deleted=0,
|
||||
services_completed=0,
|
||||
services_failed=0,
|
||||
error_log=job.error_log,
|
||||
started_at=datetime.fromisoformat(job.started_at) if job.started_at else None,
|
||||
completed_at=None
|
||||
)
|
||||
await repository.create(db_job)
|
||||
|
||||
await self.db.commit()
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to save job to database", error=str(e), job_id=job.job_id)
|
||||
# Don't fail the job if database save fails
|
||||
pass
|
||||
|
||||
async def _load_job_from_db(self, job_id: str) -> Optional[DeletionJob]:
|
||||
"""Load job from database"""
|
||||
if not self.db:
|
||||
return None
|
||||
|
||||
try:
|
||||
repository = DeletionJobRepository(self.db)
|
||||
db_job = await repository.get_by_job_id(job_id)
|
||||
|
||||
if not db_job:
|
||||
return None
|
||||
|
||||
# Convert database model to dataclass
|
||||
job = DeletionJob(
|
||||
job_id=db_job.job_id,
|
||||
tenant_id=str(db_job.tenant_id),
|
||||
tenant_name=db_job.tenant_name,
|
||||
initiated_by=str(db_job.initiated_by) if db_job.initiated_by else None,
|
||||
status=DeletionStatus(db_job.status),
|
||||
started_at=db_job.started_at.isoformat() if db_job.started_at else None,
|
||||
completed_at=db_job.completed_at.isoformat() if db_job.completed_at else None,
|
||||
error_log=db_job.error_log or []
|
||||
)
|
||||
|
||||
# Reconstruct service results
|
||||
if db_job.service_results:
|
||||
for service_name, result_data in db_job.service_results.items():
|
||||
job.service_results[service_name] = ServiceDeletionResult(
|
||||
service_name=service_name,
|
||||
status=ServiceDeletionStatus(result_data["status"]),
|
||||
deleted_counts=result_data.get("deleted_counts", {}),
|
||||
errors=result_data.get("errors", []),
|
||||
duration_seconds=result_data.get("duration_seconds", 0.0)
|
||||
)
|
||||
|
||||
return job
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to load job from database", error=str(e), job_id=job_id)
|
||||
return None
|
||||
|
||||
async def orchestrate_tenant_deletion(
|
||||
self,
|
||||
@@ -176,6 +279,9 @@ class DeletionOrchestrator:
|
||||
|
||||
self.jobs[job.job_id] = job
|
||||
|
||||
# Save initial job to database
|
||||
await self._save_job_to_db(job)
|
||||
|
||||
logger.info("Starting tenant deletion orchestration",
|
||||
job_id=job.job_id,
|
||||
tenant_id=tenant_id,
|
||||
@@ -214,6 +320,9 @@ class DeletionOrchestrator:
|
||||
|
||||
job.completed_at = datetime.now(timezone.utc).isoformat()
|
||||
|
||||
# Save final state to database
|
||||
await self._save_job_to_db(job)
|
||||
|
||||
except Exception as e:
|
||||
job.status = DeletionStatus.FAILED
|
||||
job.error_log.append(f"Fatal orchestration error: {str(e)}")
|
||||
@@ -224,6 +333,9 @@ class DeletionOrchestrator:
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
|
||||
# Save error state to database
|
||||
await self._save_job_to_db(job)
|
||||
|
||||
return job
|
||||
|
||||
async def _delete_from_all_services(
|
||||
@@ -385,7 +497,7 @@ class DeletionOrchestrator:
|
||||
duration_seconds=duration
|
||||
)
|
||||
|
||||
def get_job_status(self, job_id: str) -> Optional[Dict[str, Any]]:
|
||||
async def get_job_status(self, job_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get status of a deletion job
|
||||
|
||||
@@ -395,10 +507,20 @@ class DeletionOrchestrator:
|
||||
Returns:
|
||||
Job status dict or None if not found
|
||||
"""
|
||||
# Try in-memory cache first
|
||||
job = self.jobs.get(job_id)
|
||||
return job.to_dict() if job else None
|
||||
if job:
|
||||
return job.to_dict()
|
||||
|
||||
def list_jobs(
|
||||
# Try loading from database
|
||||
job = await self._load_job_from_db(job_id)
|
||||
if job:
|
||||
self.jobs[job_id] = job # Cache it
|
||||
return job.to_dict()
|
||||
|
||||
return None
|
||||
|
||||
async def list_jobs(
|
||||
self,
|
||||
tenant_id: Optional[str] = None,
|
||||
status: Optional[DeletionStatus] = None,
|
||||
@@ -415,6 +537,50 @@ class DeletionOrchestrator:
|
||||
Returns:
|
||||
List of job dicts
|
||||
"""
|
||||
# If database is available, load from database
|
||||
if self.db:
|
||||
try:
|
||||
repository = DeletionJobRepository(self.db)
|
||||
|
||||
if tenant_id:
|
||||
db_jobs = await repository.list_by_tenant(
|
||||
UUID(tenant_id),
|
||||
status=status.value if status else None,
|
||||
limit=limit
|
||||
)
|
||||
else:
|
||||
db_jobs = await repository.list_all(
|
||||
status=status.value if status else None,
|
||||
limit=limit
|
||||
)
|
||||
|
||||
# Convert to job dicts
|
||||
jobs = []
|
||||
for db_job in db_jobs:
|
||||
job_dict = {
|
||||
"job_id": db_job.job_id,
|
||||
"tenant_id": str(db_job.tenant_id),
|
||||
"tenant_name": db_job.tenant_name,
|
||||
"initiated_by": str(db_job.initiated_by) if db_job.initiated_by else None,
|
||||
"status": db_job.status,
|
||||
"total_items_deleted": db_job.total_items_deleted,
|
||||
"services_completed": db_job.services_completed,
|
||||
"services_failed": db_job.services_failed,
|
||||
"service_results": db_job.service_results or {},
|
||||
"started_at": db_job.started_at.isoformat() if db_job.started_at else None,
|
||||
"completed_at": db_job.completed_at.isoformat() if db_job.completed_at else None,
|
||||
"error_log": db_job.error_log or []
|
||||
}
|
||||
jobs.append(job_dict)
|
||||
|
||||
return jobs
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to list jobs from database", error=str(e))
|
||||
# Fall back to in-memory cache
|
||||
pass
|
||||
|
||||
# Fall back to in-memory cache
|
||||
jobs = list(self.jobs.values())
|
||||
|
||||
# Apply filters
|
||||
|
||||
Reference in New Issue
Block a user