Initial commit - production deployment

This commit is contained in:
2026-01-21 17:17:16 +01:00
commit c23d00dd92
2289 changed files with 638440 additions and 0 deletions

View File

View File

@@ -0,0 +1,3 @@
from .internal_demo import router as internal_demo_router
__all__ = ["internal_demo_router"]

View File

@@ -0,0 +1,214 @@
"""
User self-service account deletion API for GDPR compliance
Implements Article 17 (Right to erasure / "Right to be forgotten")
"""
from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException, status, Request, BackgroundTasks
from pydantic import BaseModel, Field
from datetime import datetime, timezone
import structlog
from shared.auth.decorators import get_current_user_dep
from app.core.database import get_db
from app.services.admin_delete import AdminUserDeleteService
from app.models.users import User
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select
import httpx
logger = structlog.get_logger()
router = APIRouter()
class AccountDeletionRequest(BaseModel):
"""Request model for account deletion"""
confirm_email: str = Field(..., description="User's email for confirmation")
reason: str = Field(default="", description="Optional reason for deletion")
password: str = Field(..., description="User's password for verification")
class DeletionScheduleResponse(BaseModel):
"""Response for scheduled deletion"""
message: str
user_id: str
scheduled_deletion_date: str
grace_period_days: int = 30
@router.delete("/api/v1/auth/me/account")
async def request_account_deletion(
deletion_request: AccountDeletionRequest,
request: Request,
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Request account deletion (self-service)
GDPR Article 17 - Right to erasure ("right to be forgotten")
This initiates account deletion with a 30-day grace period.
During this period:
- Account is marked for deletion
- User can still log in and cancel deletion
- After 30 days, account is permanently deleted
Requires:
- Email confirmation matching logged-in user
- Current password verification
"""
try:
user_id = UUID(current_user["user_id"])
user_email = current_user.get("email")
if deletion_request.confirm_email.lower() != user_email.lower():
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Email confirmation does not match your account email"
)
query = select(User).where(User.id == user_id)
result = await db.execute(query)
user = result.scalar_one_or_none()
if not user:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="User not found"
)
from app.core.security import SecurityManager
if not SecurityManager.verify_password(deletion_request.password, user.hashed_password):
logger.warning(
"account_deletion_invalid_password",
user_id=str(user_id),
ip_address=request.client.host if request.client else None
)
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Invalid password"
)
logger.info(
"account_deletion_requested",
user_id=str(user_id),
email=user_email,
reason=deletion_request.reason[:100] if deletion_request.reason else None,
ip_address=request.client.host if request.client else None
)
tenant_id = current_user.get("tenant_id")
if tenant_id:
try:
async with httpx.AsyncClient(timeout=10.0) as client:
cancel_response = await client.get(
f"http://tenant-service:8000/api/v1/tenants/{tenant_id}/subscription/status",
headers={"Authorization": request.headers.get("Authorization")}
)
if cancel_response.status_code == 200:
subscription_data = cancel_response.json()
if subscription_data.get("status") in ["active", "pending_cancellation"]:
cancel_sub_response = await client.delete(
f"http://tenant-service:8000/api/v1/tenants/{tenant_id}/subscription",
headers={"Authorization": request.headers.get("Authorization")}
)
logger.info(
"subscription_cancelled_before_deletion",
user_id=str(user_id),
tenant_id=tenant_id,
subscription_status=subscription_data.get("status")
)
except Exception as e:
logger.warning(
"subscription_cancellation_failed_during_account_deletion",
user_id=str(user_id),
error=str(e)
)
deletion_service = AdminUserDeleteService(db)
result = await deletion_service.delete_admin_user_complete(
user_id=str(user_id),
requesting_user_id=str(user_id)
)
return {
"message": "Account deleted successfully",
"user_id": str(user_id),
"deletion_date": datetime.now(timezone.utc).isoformat(),
"data_retained": "Audit logs will be anonymized after legal retention period (1 year)",
"gdpr_article": "Article 17 - Right to erasure"
}
except HTTPException:
raise
except Exception as e:
logger.error(
"account_deletion_failed",
user_id=current_user.get("user_id"),
error=str(e)
)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to process account deletion request"
)
@router.get("/api/v1/auth/me/account/deletion-info")
async def get_deletion_info(
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Get information about what will be deleted
Shows user exactly what data will be deleted when they request
account deletion. Transparency requirement under GDPR.
"""
try:
user_id = UUID(current_user["user_id"])
deletion_service = AdminUserDeleteService(db)
preview = await deletion_service.preview_user_deletion(str(user_id))
return {
"user_info": preview.get("user"),
"what_will_be_deleted": {
"account_data": "Your account, email, name, and profile information",
"sessions": "All active sessions and refresh tokens",
"consents": "Your consent history and preferences",
"security_data": "Login history and security logs",
"tenant_data": preview.get("tenant_associations"),
"estimated_records": preview.get("estimated_deletions")
},
"what_will_be_retained": {
"audit_logs": "Anonymized for 1 year (legal requirement)",
"financial_records": "Anonymized for 7 years (tax law)",
"anonymized_analytics": "Aggregated data without personal identifiers"
},
"process": {
"immediate_deletion": True,
"grace_period": "No grace period - deletion is immediate",
"reversible": False,
"completion_time": "Immediate"
},
"gdpr_rights": {
"article_17": "Right to erasure (right to be forgotten)",
"article_5_1_e": "Storage limitation principle",
"exceptions": "Data required for legal obligations will be retained in anonymized form"
},
"warning": "⚠️ This action is irreversible. All your data will be permanently deleted."
}
except Exception as e:
logger.error(
"deletion_info_failed",
user_id=current_user.get("user_id"),
error=str(e)
)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to retrieve deletion information"
)

View File

@@ -0,0 +1,657 @@
"""
Refactored Auth Operations with proper 3DS/3DS2 support
Implements SetupIntent-first architecture for secure registration flows
"""
import logging
from typing import Dict, Any
from fastapi import APIRouter, Depends, HTTPException, status, Request
from app.services.auth_service import auth_service, AuthService
from app.schemas.auth import UserRegistration, UserLogin, UserResponse
from app.models.users import User
from shared.exceptions.auth_exceptions import (
UserCreationError,
RegistrationError,
PaymentOrchestrationError
)
from shared.auth.decorators import get_current_user_dep
# Configure logging
logger = logging.getLogger(__name__)
# Create router
router = APIRouter(prefix="/api/v1/auth", tags=["auth"])
async def get_auth_service() -> AuthService:
"""Dependency injection for auth service"""
return auth_service
@router.post("/start-registration",
response_model=Dict[str, Any],
summary="Start secure registration with payment verification")
async def start_registration(
user_data: UserRegistration,
request: Request,
auth_service: AuthService = Depends(get_auth_service)
) -> Dict[str, Any]:
"""
Start secure registration flow with SetupIntent-first approach
This is the FIRST step in the atomic registration architecture:
1. Creates Stripe customer via tenant service
2. Creates SetupIntent with confirm=True
3. Returns SetupIntent data to frontend
IMPORTANT: NO subscription or user is created in this step!
Two possible outcomes:
- requires_action=True: 3DS required, frontend must confirm SetupIntent then call complete-registration
- requires_action=False: No 3DS required, but frontend STILL must call complete-registration
In BOTH cases, the frontend must call complete-registration to create the subscription and user.
This ensures consistent flow and prevents duplicate subscriptions.
Args:
user_data: User registration data with payment info
Returns:
SetupIntent result with:
- requires_action: True if 3DS required, False if not
- setup_intent_id: SetupIntent ID for verification
- client_secret: For 3DS authentication (when requires_action=True)
- customer_id: Stripe customer ID
- Other SetupIntent metadata
Raises:
HTTPException: 400 for validation errors, 500 for server errors
"""
try:
logger.info(f"Starting secure registration flow, email={user_data.email}, plan={user_data.subscription_plan}")
# Validate required fields
if not user_data.email or not user_data.email.strip():
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Email is required"
)
if not user_data.password or len(user_data.password) < 8:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Password must be at least 8 characters long"
)
if not user_data.full_name or not user_data.full_name.strip():
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Full name is required"
)
if user_data.subscription_plan and not user_data.payment_method_id:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Payment method ID is required for subscription registration"
)
# Start secure registration flow
result = await auth_service.start_secure_registration_flow(user_data)
# Check if 3DS is required
if result.get('requires_action', False):
logger.info(f"Registration requires 3DS verification, email={user_data.email}, setup_intent_id={result.get('setup_intent_id')}")
return {
"requires_action": True,
"action_type": "setup_intent_confirmation",
"client_secret": result.get('client_secret'),
"setup_intent_id": result.get('setup_intent_id'),
"customer_id": result.get('customer_id'),
"payment_customer_id": result.get('payment_customer_id'),
"plan_id": result.get('plan_id'),
"payment_method_id": result.get('payment_method_id'),
"billing_cycle": result.get('billing_cycle'),
"coupon_info": result.get('coupon_info'),
"trial_info": result.get('trial_info'),
"email": result.get('email'),
"message": "Payment verification required. Frontend must confirm SetupIntent to handle 3DS."
}
else:
user = result.get('user')
user_id = user.id if user else None
logger.info(f"Registration completed without 3DS, email={user_data.email}, user_id={user_id}, subscription_id={result.get('subscription_id')}")
# Return complete registration result
user_data_response = None
if user:
user_data_response = {
"id": str(user.id),
"email": user.email,
"full_name": user.full_name,
"is_active": user.is_active
}
return {
"requires_action": False,
"setup_intent_id": result.get('setup_intent_id'),
"user": user_data_response,
"subscription_id": result.get('subscription_id'),
"payment_customer_id": result.get('payment_customer_id'),
"status": result.get('status'),
"message": "Registration completed successfully"
}
except RegistrationError as e:
logger.error(f"Registration flow failed: {str(e)}, email: {user_data.email}",
exc_info=True)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Registration failed: {str(e)}"
) from e
except PaymentOrchestrationError as e:
logger.error(f"Payment orchestration failed: {str(e)}, email: {user_data.email}",
exc_info=True)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Payment setup failed: {str(e)}"
) from e
except Exception as e:
logger.error(f"Unexpected registration error: {str(e)}, email: {user_data.email}",
exc_info=True)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Registration error: {str(e)}"
) from e
@router.post("/complete-registration",
response_model=Dict[str, Any],
summary="Complete registration after SetupIntent verification")
async def complete_registration(
verification_data: Dict[str, Any],
request: Request,
auth_service: AuthService = Depends(get_auth_service)
) -> Dict[str, Any]:
"""
Complete registration after frontend confirms SetupIntent
This is the SECOND step in the atomic registration architecture:
1. Called after frontend confirms SetupIntent (with or without 3DS)
2. Verifies SetupIntent status with Stripe
3. Creates subscription with verified payment method (FIRST time subscription is created)
4. Creates user record in auth database
5. Saves onboarding progress
6. Generates auth tokens for auto-login
This endpoint is called in TWO scenarios:
1. After user completes 3DS authentication (requires_action=True flow)
2. Immediately after start-registration (requires_action=False flow)
In BOTH cases, this is where the subscription and user are actually created.
This ensures consistent flow and prevents duplicate subscriptions.
Args:
verification_data: Must contain:
- setup_intent_id: Verified SetupIntent ID
- user_data: Original user registration data
Returns:
Complete registration result with:
- user: Created user data
- subscription_id: Created subscription ID
- payment_customer_id: Stripe customer ID
- access_token: JWT access token
- refresh_token: JWT refresh token
Raises:
HTTPException: 400 if setup_intent_id is missing, 500 for server errors
"""
try:
# Validate required fields
if not verification_data.get('setup_intent_id'):
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="SetupIntent ID is required"
)
if not verification_data.get('user_data'):
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="User data is required"
)
# Extract user data
user_data_dict = verification_data['user_data']
user_data = UserRegistration(**user_data_dict)
logger.info(f"Completing registration after SetupIntent verification, email={user_data.email}, setup_intent_id={verification_data['setup_intent_id']}")
# Complete registration with verified payment
result = await auth_service.complete_registration_with_verified_payment(
verification_data['setup_intent_id'],
user_data
)
logger.info(f"Registration completed successfully after 3DS, user_id={result['user'].id}, email={result['user'].email}, subscription_id={result.get('subscription_id')}")
return {
"success": True,
"user": {
"id": str(result['user'].id),
"email": result['user'].email,
"full_name": result['user'].full_name,
"is_active": result['user'].is_active,
"is_verified": result['user'].is_verified,
"created_at": result['user'].created_at.isoformat() if result['user'].created_at else None,
"role": result['user'].role
},
"subscription_id": result.get('subscription_id'),
"payment_customer_id": result.get('payment_customer_id'),
"status": result.get('status'),
"access_token": result.get('access_token'),
"refresh_token": result.get('refresh_token'),
"message": "Registration completed successfully after 3DS verification"
}
except RegistrationError as e:
logger.error(f"Registration completion after 3DS failed: {str(e)}, setup_intent_id: {verification_data.get('setup_intent_id')}, email: {user_data_dict.get('email') if user_data_dict else 'unknown'}",
exc_info=True)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Registration completion failed: {str(e)}"
) from e
except Exception as e:
logger.error(f"Unexpected registration completion error: {str(e)}, setup_intent_id: {verification_data.get('setup_intent_id')}",
exc_info=True)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Registration completion error: {str(e)}"
) from e
@router.post("/login",
response_model=Dict[str, Any],
summary="User login with subscription validation")
async def login(
login_data: UserLogin,
request: Request,
auth_service: AuthService = Depends(get_auth_service)
) -> Dict[str, Any]:
"""
User login endpoint with subscription validation
This endpoint:
1. Validates user credentials
2. Checks if user has active subscription (if required)
3. Returns authentication tokens
4. Updates last login timestamp
Args:
login_data: User login credentials (email and password)
Returns:
Authentication tokens and user information
Raises:
HTTPException: 401 for invalid credentials, 403 for subscription issues
"""
try:
logger.info(f"Login attempt, email={login_data.email}")
# Validate required fields
if not login_data.email or not login_data.email.strip():
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Email is required"
)
if not login_data.password or len(login_data.password) < 8:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Password must be at least 8 characters long"
)
# Call auth service to perform login
result = await auth_service.login_user(login_data)
logger.info(f"Login successful, email={login_data.email}, user_id={result['user'].id}")
# Extract tokens from result for top-level response
tokens = result.get('tokens', {})
return {
"success": True,
"access_token": tokens.get('access_token'),
"refresh_token": tokens.get('refresh_token'),
"token_type": tokens.get('token_type'),
"expires_in": tokens.get('expires_in'),
"user": {
"id": str(result['user'].id),
"email": result['user'].email,
"full_name": result['user'].full_name,
"is_active": result['user'].is_active,
"last_login": result['user'].last_login.isoformat() if result['user'].last_login else None
},
"subscription": result.get('subscription', {}),
"message": "Login successful"
}
except HTTPException:
# Re-raise HTTP exceptions (like 401 for invalid credentials)
raise
except Exception as e:
logger.error(f"Login failed: {str(e)}, email: {login_data.email}",
exc_info=True)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Login failed: {str(e)}"
) from e
# ============================================================================
# TOKEN MANAGEMENT ENDPOINTS - NEWLY ADDED
# ============================================================================
@router.post("/refresh",
response_model=Dict[str, Any],
summary="Refresh access token using refresh token")
async def refresh_token(
request: Request,
refresh_data: Dict[str, Any],
auth_service: AuthService = Depends(get_auth_service)
) -> Dict[str, Any]:
"""
Refresh access token using a valid refresh token
This endpoint:
1. Validates the refresh token
2. Generates new access and refresh tokens
3. Returns the new tokens
Args:
refresh_data: Dictionary containing refresh_token
Returns:
New authentication tokens
Raises:
HTTPException: 401 for invalid refresh tokens
"""
try:
logger.info("Token refresh request initiated")
# Extract refresh token from request
refresh_token = refresh_data.get("refresh_token")
if not refresh_token:
logger.warning("Refresh token missing from request")
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Refresh token is required"
)
# Use service layer to refresh tokens
tokens = await auth_service.refresh_auth_tokens(refresh_token)
logger.info("Token refresh successful via service layer")
return {
"success": True,
"access_token": tokens.get("access_token"),
"refresh_token": tokens.get("refresh_token"),
"token_type": "bearer",
"expires_in": 1800, # 30 minutes
"message": "Token refresh successful"
}
except HTTPException:
# Re-raise HTTP exceptions
raise
except Exception as e:
logger.error(f"Token refresh failed: {str(e)}", exc_info=True)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Token refresh failed: {str(e)}"
) from e
@router.post("/verify",
response_model=Dict[str, Any],
summary="Verify token validity")
async def verify_token(
request: Request,
token_data: Dict[str, Any]
) -> Dict[str, Any]:
"""
Verify the validity of an access token
Args:
token_data: Dictionary containing access_token
Returns:
Token validation result
"""
try:
logger.info("Token verification request initiated")
# Extract access token from request
access_token = token_data.get("access_token")
if not access_token:
logger.warning("Access token missing from verification request")
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Access token is required"
)
# Use service layer to verify token
result = await auth_service.verify_access_token(access_token)
logger.info("Token verification successful via service layer")
return {
"success": True,
"valid": result.get("valid"),
"user_id": result.get("user_id"),
"email": result.get("email"),
"message": "Token is valid"
}
except HTTPException:
# Re-raise HTTP exceptions
raise
except Exception as e:
logger.error(f"Token verification failed: {str(e)}", exc_info=True)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Token verification failed: {str(e)}"
) from e
@router.post("/logout",
response_model=Dict[str, Any],
summary="Logout and revoke refresh token")
async def logout(
request: Request,
logout_data: Dict[str, Any],
auth_service: AuthService = Depends(get_auth_service)
) -> Dict[str, Any]:
"""
Logout user and revoke refresh token
Args:
logout_data: Dictionary containing refresh_token
Returns:
Logout confirmation
"""
try:
logger.info("Logout request initiated")
# Extract refresh token from request
refresh_token = logout_data.get("refresh_token")
if not refresh_token:
logger.warning("Refresh token missing from logout request")
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Refresh token is required"
)
# Use service layer to revoke refresh token
try:
await auth_service.revoke_refresh_token(refresh_token)
logger.info("Logout successful via service layer")
return {
"success": True,
"message": "Logout successful"
}
except Exception as e:
logger.error(f"Error during logout: {str(e)}")
# Don't fail logout if revocation fails
return {
"success": True,
"message": "Logout successful (token revocation failed but user logged out)"
}
except HTTPException:
# Re-raise HTTP exceptions
raise
except Exception as e:
logger.error(f"Logout failed: {str(e)}", exc_info=True)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Logout failed: {str(e)}"
) from e
@router.post("/change-password",
response_model=Dict[str, Any],
summary="Change user password")
async def change_password(
request: Request,
password_data: Dict[str, Any],
auth_service: AuthService = Depends(get_auth_service)
) -> Dict[str, Any]:
"""
Change user password
Args:
password_data: Dictionary containing current_password and new_password
Returns:
Password change confirmation
"""
try:
logger.info("Password change request initiated")
# Extract user from request state
if not hasattr(request.state, 'user') or not request.state.user:
logger.warning("Unauthorized password change attempt - no user context")
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Authentication required"
)
user_id = request.state.user.get("user_id")
if not user_id:
logger.warning("Unauthorized password change attempt - no user_id")
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Invalid user context"
)
# Extract password data
current_password = password_data.get("current_password")
new_password = password_data.get("new_password")
if not current_password or not new_password:
logger.warning("Password change missing required fields")
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Current password and new password are required"
)
if len(new_password) < 8:
logger.warning("New password too short")
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="New password must be at least 8 characters long"
)
# Use service layer to change password
await auth_service.change_user_password(user_id, current_password, new_password)
logger.info(f"Password change successful via service layer, user_id={user_id}")
return {
"success": True,
"message": "Password changed successfully"
}
except HTTPException:
# Re-raise HTTP exceptions
raise
except Exception as e:
logger.error(f"Password change failed: {str(e)}", exc_info=True)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Password change failed: {str(e)}"
) from e
@router.post("/verify-email",
response_model=Dict[str, Any],
summary="Verify user email")
async def verify_email(
request: Request,
email_data: Dict[str, Any],
auth_service: AuthService = Depends(get_auth_service)
) -> Dict[str, Any]:
"""
Verify user email (placeholder implementation)
Args:
email_data: Dictionary containing email and verification_token
Returns:
Email verification confirmation
"""
try:
logger.info("Email verification request initiated")
# Extract email and token
email = email_data.get("email")
verification_token = email_data.get("verification_token")
if not email or not verification_token:
logger.warning("Email verification missing required fields")
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Email and verification token are required"
)
# Use service layer to verify email
await auth_service.verify_user_email(email, verification_token)
logger.info("Email verification successful via service layer")
return {
"success": True,
"message": "Email verified successfully"
}
except HTTPException:
# Re-raise HTTP exceptions
raise
except Exception as e:
logger.error(f"Email verification failed: {str(e)}", exc_info=True)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Email verification failed: {str(e)}"
) from e

View File

@@ -0,0 +1,372 @@
"""
User consent management API endpoints for GDPR compliance
"""
from typing import List, Optional
from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException, status, Request
from pydantic import BaseModel, Field
from datetime import datetime, timezone
import structlog
import hashlib
from shared.auth.decorators import get_current_user_dep
from app.core.database import get_db
from app.models.consent import UserConsent, ConsentHistory
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select, and_
logger = structlog.get_logger()
router = APIRouter()
class ConsentRequest(BaseModel):
"""Request model for granting/updating consent"""
terms_accepted: bool = Field(..., description="Accept terms of service")
privacy_accepted: bool = Field(..., description="Accept privacy policy")
marketing_consent: bool = Field(default=False, description="Consent to marketing communications")
analytics_consent: bool = Field(default=False, description="Consent to analytics cookies")
consent_method: str = Field(..., description="How consent was given (registration, settings, cookie_banner)")
consent_version: str = Field(default="1.0", description="Version of terms/privacy policy")
class ConsentResponse(BaseModel):
"""Response model for consent data"""
id: str
user_id: str
terms_accepted: bool
privacy_accepted: bool
marketing_consent: bool
analytics_consent: bool
consent_version: str
consent_method: str
consented_at: str
withdrawn_at: Optional[str]
class ConsentHistoryResponse(BaseModel):
"""Response model for consent history"""
id: str
user_id: str
action: str
consent_snapshot: dict
created_at: str
def hash_text(text: str) -> str:
"""Create hash of consent text for verification"""
return hashlib.sha256(text.encode()).hexdigest()
@router.post("/api/v1/auth/me/consent", response_model=ConsentResponse, status_code=status.HTTP_201_CREATED)
async def record_consent(
consent_data: ConsentRequest,
request: Request,
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Record user consent for data processing
GDPR Article 7 - Conditions for consent
"""
try:
user_id = UUID(current_user["user_id"])
ip_address = request.client.host if request.client else None
user_agent = request.headers.get("user-agent")
consent = UserConsent(
user_id=user_id,
terms_accepted=consent_data.terms_accepted,
privacy_accepted=consent_data.privacy_accepted,
marketing_consent=consent_data.marketing_consent,
analytics_consent=consent_data.analytics_consent,
consent_version=consent_data.consent_version,
consent_method=consent_data.consent_method,
ip_address=ip_address,
user_agent=user_agent,
consented_at=datetime.now(timezone.utc)
)
db.add(consent)
await db.flush()
history = ConsentHistory(
user_id=user_id,
consent_id=consent.id,
action="granted",
consent_snapshot=consent_data.dict(),
ip_address=ip_address,
user_agent=user_agent,
consent_method=consent_data.consent_method,
created_at=datetime.now(timezone.utc)
)
db.add(history)
await db.commit()
await db.refresh(consent)
logger.info(
"consent_recorded",
user_id=str(user_id),
consent_version=consent_data.consent_version,
method=consent_data.consent_method
)
return ConsentResponse(
id=str(consent.id),
user_id=str(consent.user_id),
terms_accepted=consent.terms_accepted,
privacy_accepted=consent.privacy_accepted,
marketing_consent=consent.marketing_consent,
analytics_consent=consent.analytics_consent,
consent_version=consent.consent_version,
consent_method=consent.consent_method,
consented_at=consent.consented_at.isoformat(),
withdrawn_at=consent.withdrawn_at.isoformat() if consent.withdrawn_at else None
)
except Exception as e:
await db.rollback()
logger.error("error_recording_consent", error=str(e), user_id=current_user.get("user_id"))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to record consent"
)
@router.get("/api/v1/auth/me/consent/current", response_model=Optional[ConsentResponse])
async def get_current_consent(
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Get current active consent for user
"""
try:
user_id = UUID(current_user["user_id"])
query = select(UserConsent).where(
and_(
UserConsent.user_id == user_id,
UserConsent.withdrawn_at.is_(None)
)
).order_by(UserConsent.consented_at.desc())
result = await db.execute(query)
consent = result.scalar_one_or_none()
if not consent:
return None
return ConsentResponse(
id=str(consent.id),
user_id=str(consent.user_id),
terms_accepted=consent.terms_accepted,
privacy_accepted=consent.privacy_accepted,
marketing_consent=consent.marketing_consent,
analytics_consent=consent.analytics_consent,
consent_version=consent.consent_version,
consent_method=consent.consent_method,
consented_at=consent.consented_at.isoformat(),
withdrawn_at=consent.withdrawn_at.isoformat() if consent.withdrawn_at else None
)
except Exception as e:
logger.error("error_getting_consent", error=str(e), user_id=current_user.get("user_id"))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to retrieve consent"
)
@router.get("/api/v1/auth/me/consent/history", response_model=List[ConsentHistoryResponse])
async def get_consent_history(
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Get complete consent history for user
GDPR Article 7(1) - Demonstrating consent
"""
try:
user_id = UUID(current_user["user_id"])
query = select(ConsentHistory).where(
ConsentHistory.user_id == user_id
).order_by(ConsentHistory.created_at.desc())
result = await db.execute(query)
history = result.scalars().all()
return [
ConsentHistoryResponse(
id=str(h.id),
user_id=str(h.user_id),
action=h.action,
consent_snapshot=h.consent_snapshot,
created_at=h.created_at.isoformat()
)
for h in history
]
except Exception as e:
logger.error("error_getting_consent_history", error=str(e), user_id=current_user.get("user_id"))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to retrieve consent history"
)
@router.put("/api/v1/auth/me/consent", response_model=ConsentResponse)
async def update_consent(
consent_data: ConsentRequest,
request: Request,
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Update user consent preferences
GDPR Article 7(3) - Withdrawal of consent
"""
try:
user_id = UUID(current_user["user_id"])
query = select(UserConsent).where(
and_(
UserConsent.user_id == user_id,
UserConsent.withdrawn_at.is_(None)
)
).order_by(UserConsent.consented_at.desc())
result = await db.execute(query)
current_consent = result.scalar_one_or_none()
if current_consent:
current_consent.withdrawn_at = datetime.now(timezone.utc)
history = ConsentHistory(
user_id=user_id,
consent_id=current_consent.id,
action="updated",
consent_snapshot=current_consent.to_dict(),
ip_address=request.client.host if request.client else None,
user_agent=request.headers.get("user-agent"),
consent_method=consent_data.consent_method,
created_at=datetime.now(timezone.utc)
)
db.add(history)
new_consent = UserConsent(
user_id=user_id,
terms_accepted=consent_data.terms_accepted,
privacy_accepted=consent_data.privacy_accepted,
marketing_consent=consent_data.marketing_consent,
analytics_consent=consent_data.analytics_consent,
consent_version=consent_data.consent_version,
consent_method=consent_data.consent_method,
ip_address=request.client.host if request.client else None,
user_agent=request.headers.get("user-agent"),
consented_at=datetime.now(timezone.utc)
)
db.add(new_consent)
await db.flush()
history = ConsentHistory(
user_id=user_id,
consent_id=new_consent.id,
action="granted" if not current_consent else "updated",
consent_snapshot=consent_data.dict(),
ip_address=request.client.host if request.client else None,
user_agent=request.headers.get("user-agent"),
consent_method=consent_data.consent_method,
created_at=datetime.now(timezone.utc)
)
db.add(history)
await db.commit()
await db.refresh(new_consent)
logger.info(
"consent_updated",
user_id=str(user_id),
consent_version=consent_data.consent_version
)
return ConsentResponse(
id=str(new_consent.id),
user_id=str(new_consent.user_id),
terms_accepted=new_consent.terms_accepted,
privacy_accepted=new_consent.privacy_accepted,
marketing_consent=new_consent.marketing_consent,
analytics_consent=new_consent.analytics_consent,
consent_version=new_consent.consent_version,
consent_method=new_consent.consent_method,
consented_at=new_consent.consented_at.isoformat(),
withdrawn_at=None
)
except Exception as e:
await db.rollback()
logger.error("error_updating_consent", error=str(e), user_id=current_user.get("user_id"))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to update consent"
)
@router.post("/api/v1/auth/me/consent/withdraw", status_code=status.HTTP_200_OK)
async def withdraw_consent(
request: Request,
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Withdraw all consent
GDPR Article 7(3) - Right to withdraw consent
"""
try:
user_id = UUID(current_user["user_id"])
query = select(UserConsent).where(
and_(
UserConsent.user_id == user_id,
UserConsent.withdrawn_at.is_(None)
)
)
result = await db.execute(query)
consents = result.scalars().all()
for consent in consents:
consent.withdrawn_at = datetime.now(timezone.utc)
history = ConsentHistory(
user_id=user_id,
consent_id=consent.id,
action="withdrawn",
consent_snapshot=consent.to_dict(),
ip_address=request.client.host if request.client else None,
user_agent=request.headers.get("user-agent"),
consent_method="user_withdrawal",
created_at=datetime.now(timezone.utc)
)
db.add(history)
await db.commit()
logger.info("consent_withdrawn", user_id=str(user_id), count=len(consents))
return {
"message": "Consent withdrawn successfully",
"withdrawn_count": len(consents)
}
except Exception as e:
await db.rollback()
logger.error("error_withdrawing_consent", error=str(e), user_id=current_user.get("user_id"))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to withdraw consent"
)

View File

@@ -0,0 +1,121 @@
"""
User data export API endpoints for GDPR compliance
Implements Article 15 (Right to Access) and Article 20 (Right to Data Portability)
"""
from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException, status
from fastapi.responses import JSONResponse
import structlog
from shared.auth.decorators import get_current_user_dep
from app.core.database import get_db
from app.services.data_export_service import DataExportService
logger = structlog.get_logger()
router = APIRouter()
@router.get("/api/v1/auth/me/export")
async def export_my_data(
current_user: dict = Depends(get_current_user_dep),
db = Depends(get_db)
):
"""
Export all personal data for the current user
GDPR Article 15 - Right of access by the data subject
GDPR Article 20 - Right to data portability
Returns complete user data in machine-readable JSON format including:
- Personal information
- Account data
- Consent history
- Security logs
- Audit trail
Response is provided in JSON format for easy data portability.
"""
try:
user_id = UUID(current_user["user_id"])
export_service = DataExportService(db)
data = await export_service.export_user_data(user_id)
logger.info(
"data_export_requested",
user_id=str(user_id),
email=current_user.get("email")
)
return JSONResponse(
content=data,
status_code=status.HTTP_200_OK,
headers={
"Content-Disposition": f'attachment; filename="user_data_export_{user_id}.json"',
"Content-Type": "application/json"
}
)
except Exception as e:
logger.error(
"data_export_failed",
user_id=current_user.get("user_id"),
error=str(e)
)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to export user data"
)
@router.get("/api/v1/auth/me/export/summary")
async def get_export_summary(
current_user: dict = Depends(get_current_user_dep),
db = Depends(get_db)
):
"""
Get a summary of what data would be exported
Useful for showing users what data we have about them
before they request full export.
"""
try:
user_id = UUID(current_user["user_id"])
export_service = DataExportService(db)
data = await export_service.export_user_data(user_id)
summary = {
"user_id": str(user_id),
"data_categories": {
"personal_data": bool(data.get("personal_data")),
"account_data": bool(data.get("account_data")),
"consent_data": bool(data.get("consent_data")),
"security_data": bool(data.get("security_data")),
"onboarding_data": bool(data.get("onboarding_data")),
"audit_logs": bool(data.get("audit_logs"))
},
"data_counts": {
"active_sessions": data.get("account_data", {}).get("active_sessions_count", 0),
"consent_changes": data.get("consent_data", {}).get("total_consent_changes", 0),
"login_attempts": len(data.get("security_data", {}).get("recent_login_attempts", [])),
"audit_logs": data.get("audit_logs", {}).get("total_logs_exported", 0)
},
"export_format": "JSON",
"gdpr_articles": ["Article 15 (Right to Access)", "Article 20 (Data Portability)"]
}
return summary
except Exception as e:
logger.error(
"export_summary_failed",
user_id=current_user.get("user_id"),
error=str(e)
)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to generate export summary"
)

View File

@@ -0,0 +1,229 @@
"""
Internal Demo Cloning API for Auth Service
Service-to-service endpoint for cloning authentication and user data
"""
from fastapi import APIRouter, Depends, HTTPException, Header
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select
import structlog
import uuid
from datetime import datetime, timezone
from typing import Optional
import os
import sys
from pathlib import Path
import json
# Add shared path
sys.path.insert(0, str(Path(__file__).parent.parent.parent.parent.parent))
from app.core.database import get_db
from app.models.users import User
from app.core.config import settings
logger = structlog.get_logger()
router = APIRouter(prefix="/internal/demo", tags=["internal"])
# Base demo tenant IDs
DEMO_TENANT_PROFESSIONAL = "a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6"
@router.post("/clone")
async def clone_demo_data(
base_tenant_id: str,
virtual_tenant_id: str,
demo_account_type: str,
session_id: Optional[str] = None,
session_created_at: Optional[str] = None,
db: AsyncSession = Depends(get_db)
):
"""
Clone auth service data for a virtual demo tenant
Clones:
- Demo users (owner and staff)
Note: Tenant memberships are handled by the tenant service's internal_demo endpoint
Args:
base_tenant_id: Template tenant UUID to clone from
virtual_tenant_id: Target virtual tenant UUID
demo_account_type: Type of demo account
session_id: Originating session ID for tracing
Returns:
Cloning status and record counts
"""
start_time = datetime.now(timezone.utc)
# Parse session creation time
if session_created_at:
try:
session_time = datetime.fromisoformat(session_created_at.replace('Z', '+00:00'))
except (ValueError, AttributeError):
session_time = start_time
else:
session_time = start_time
logger.info(
"Starting auth data cloning",
base_tenant_id=base_tenant_id,
virtual_tenant_id=virtual_tenant_id,
demo_account_type=demo_account_type,
session_id=session_id,
session_created_at=session_created_at
)
try:
# Validate UUIDs
base_uuid = uuid.UUID(base_tenant_id)
virtual_uuid = uuid.UUID(virtual_tenant_id)
# Note: We don't check for existing users since User model doesn't have demo_session_id
# Demo users are identified by their email addresses from the seed data
# Idempotency is handled by checking if each user email already exists below
# Load demo users from JSON seed file
from shared.utils.seed_data_paths import get_seed_data_path
if demo_account_type == "professional":
json_file = get_seed_data_path("professional", "02-auth.json")
elif demo_account_type == "enterprise":
json_file = get_seed_data_path("enterprise", "02-auth.json")
elif demo_account_type == "enterprise_child":
# Child locations don't have separate auth data - they share parent's users
logger.info("enterprise_child uses parent tenant auth, skipping user cloning", virtual_tenant_id=virtual_tenant_id)
return {
"service": "auth",
"status": "completed",
"records_cloned": 0,
"duration_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000),
"details": {"users": 0, "note": "Child locations share parent auth"}
}
else:
raise ValueError(f"Invalid demo account type: {demo_account_type}")
# Load JSON data
import json
with open(json_file, 'r', encoding='utf-8') as f:
seed_data = json.load(f)
# Get demo users for this account type
demo_users_data = seed_data.get("users", [])
records_cloned = 0
# Create users and tenant memberships
for user_data in demo_users_data:
user_id = uuid.UUID(user_data["id"])
# Create user if not exists
user_result = await db.execute(
select(User).where(User.id == user_id)
)
existing_user = user_result.scalars().first()
if not existing_user:
# Apply date adjustments to created_at and updated_at
from shared.utils.demo_dates import adjust_date_for_demo
# Adjust created_at date
created_at_str = user_data.get("created_at", session_time.isoformat())
if isinstance(created_at_str, str):
try:
original_created_at = datetime.fromisoformat(created_at_str.replace('Z', '+00:00'))
adjusted_created_at = adjust_date_for_demo(original_created_at, session_time)
except ValueError:
adjusted_created_at = session_time
else:
adjusted_created_at = session_time
# Adjust updated_at date (same as created_at for demo users)
adjusted_updated_at = adjusted_created_at
# Get full_name from either "name" or "full_name" field
full_name = user_data.get("full_name") or user_data.get("name", "Demo User")
# For demo users, use a placeholder hashed password (they won't actually log in)
# In production, this would be properly hashed
demo_hashed_password = "$2b$12$LQv3c1yqBWVHxkd0LHAkCOYz6TtxMQJqhN8/LewY5GyYqNlI.eFKW" # "demo_password"
user = User(
id=user_id,
email=user_data["email"],
full_name=full_name,
hashed_password=demo_hashed_password,
is_active=user_data.get("is_active", True),
is_verified=True,
role=user_data.get("role", "member"),
language=user_data.get("language", "es"),
timezone=user_data.get("timezone", "Europe/Madrid"),
created_at=adjusted_created_at,
updated_at=adjusted_updated_at
)
db.add(user)
records_cloned += 1
# Note: Tenant memberships are handled by tenant service
# Only create users in auth service
await db.commit()
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
logger.info(
"Auth data cloning completed",
virtual_tenant_id=virtual_tenant_id,
session_id=session_id,
records_cloned=records_cloned,
duration_ms=duration_ms
)
return {
"service": "auth",
"status": "completed",
"records_cloned": records_cloned,
"base_tenant_id": str(base_tenant_id),
"virtual_tenant_id": str(virtual_tenant_id),
"session_id": session_id,
"demo_account_type": demo_account_type,
"duration_ms": duration_ms
}
except ValueError as e:
logger.error("Invalid UUID format", error=str(e), virtual_tenant_id=virtual_tenant_id)
raise HTTPException(status_code=400, detail=f"Invalid UUID: {str(e)}")
except Exception as e:
logger.error(
"Failed to clone auth data",
error=str(e),
virtual_tenant_id=virtual_tenant_id,
exc_info=True
)
# Rollback on error
await db.rollback()
return {
"service": "auth",
"status": "failed",
"records_cloned": 0,
"duration_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000),
"error": str(e)
}
@router.get("/clone/health")
async def clone_health_check():
"""
Health check for internal cloning endpoint
Used by orchestrator to verify service availability
"""
return {
"service": "auth",
"clone_endpoint": "available",
"version": "1.0.0"
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,308 @@
# services/auth/app/api/password_reset.py
"""
Password reset API endpoints
Handles forgot password and password reset functionality
"""
import logging
from datetime import datetime, timedelta, timezone
from typing import Dict, Any
from fastapi import APIRouter, Depends, HTTPException, status, BackgroundTasks
from app.services.auth_service import auth_service, AuthService
from app.schemas.auth import PasswordReset, PasswordResetConfirm
from app.core.security import SecurityManager
from app.core.config import settings
from app.repositories.password_reset_repository import PasswordResetTokenRepository
from app.repositories.user_repository import UserRepository
from app.models.users import User
from shared.clients.notification_client import NotificationServiceClient
import structlog
# Configure logging
logger = structlog.get_logger()
# Create router
router = APIRouter(prefix="/api/v1/auth", tags=["password-reset"])
async def get_auth_service() -> AuthService:
"""Dependency injection for auth service"""
return auth_service
def generate_reset_token() -> str:
"""Generate a secure password reset token"""
import secrets
return secrets.token_urlsafe(32)
async def send_password_reset_email(email: str, reset_token: str, user_full_name: str):
"""Send password reset email in background using notification service"""
try:
# Construct reset link (this should match your frontend URL)
# Use FRONTEND_URL from settings if available, otherwise fall back to gateway URL
frontend_url = getattr(settings, 'FRONTEND_URL', settings.GATEWAY_URL)
reset_link = f"{frontend_url}/reset-password?token={reset_token}"
# Create HTML content for the password reset email in Spanish
html_content = f"""
<!DOCTYPE html>
<html lang="es">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Restablecer Contraseña</title>
<style>
body {{
font-family: Arial, sans-serif;
line-height: 1.6;
color: #333;
max-width: 600px;
margin: 0 auto;
padding: 20px;
background-color: #f9f9f9;
}}
.header {{
text-align: center;
margin-bottom: 30px;
background: linear-gradient(135deg, #4F46E5 0%, #7C3AED 100%);
color: white;
padding: 20px;
border-radius: 8px;
}}
.content {{
background: white;
padding: 30px;
border-radius: 8px;
box-shadow: 0 2px 10px rgba(0,0,0,0.1);
}}
.button {{
display: inline-block;
padding: 12px 30px;
background-color: #4F46E5;
color: white;
text-decoration: none;
border-radius: 5px;
margin: 20px 0;
font-weight: bold;
}}
.footer {{
margin-top: 40px;
text-align: center;
font-size: 0.9em;
color: #666;
padding-top: 20px;
border-top: 1px solid #eee;
}}
</style>
</head>
<body>
<div class="header">
<h1>Restablecer Contraseña</h1>
</div>
<div class="content">
<p>Hola {user_full_name},</p>
<p>Recibimos una solicitud para restablecer tu contraseña. Haz clic en el botón de abajo para crear una nueva contraseña:</p>
<p style="text-align: center; margin: 30px 0;">
<a href="{reset_link}" class="button">Restablecer Contraseña</a>
</p>
<p>Si no solicitaste un restablecimiento de contraseña, puedes ignorar este correo electrónico de forma segura.</p>
<p>Este enlace expirará en 1 hora por razones de seguridad.</p>
</div>
<div class="footer">
<p>Este es un mensaje automático de BakeWise. Por favor, no respondas a este correo electrónico.</p>
</div>
</body>
</html>
"""
# Create text content as fallback
text_content = f"""
Hola {user_full_name},
Recibimos una solicitud para restablecer tu contraseña. Haz clic en el siguiente enlace para crear una nueva contraseña:
{reset_link}
Si no solicitaste un restablecimiento de contraseña, puedes ignorar este correo electrónico de forma segura.
Este enlace expirará en 1 hora por razones de seguridad.
Este es un mensaje automático de BakeWise. Por favor, no respondas a este correo electrónico.
"""
# Send email using the notification service
notification_client = NotificationServiceClient(settings)
# Send the notification using the send_email method
await notification_client.send_email(
tenant_id="system", # Using system tenant for password resets
to_email=email,
subject="Restablecer Contraseña",
message=text_content,
html_content=html_content,
priority="high"
)
logger.info(f"Password reset email sent successfully to {email}")
except Exception as e:
logger.error(f"Failed to send password reset email to {email}: {str(e)}")
@router.post("/password/reset-request",
summary="Request password reset",
description="Send a password reset link to the user's email")
async def request_password_reset(
reset_request: PasswordReset,
background_tasks: BackgroundTasks,
auth_service: AuthService = Depends(get_auth_service)
) -> Dict[str, Any]:
"""
Request a password reset
This endpoint:
1. Finds the user by email
2. Generates a password reset token
3. Stores the token in the database
4. Sends a password reset email to the user
"""
try:
logger.info(f"Password reset request for email: {reset_request.email}")
# Find user by email
async with auth_service.database_manager.get_session() as session:
user_repo = UserRepository(User, session)
user = await user_repo.get_by_field("email", reset_request.email)
if not user:
# Don't reveal if email exists to prevent enumeration attacks
logger.info(f"Password reset request for non-existent email: {reset_request.email}")
return {"message": "If an account with this email exists, a reset link has been sent."}
# Generate a secure reset token
reset_token = generate_reset_token()
# Set token expiration (e.g., 1 hour)
expires_at = datetime.now(timezone.utc) + timedelta(hours=1)
# Store the reset token in the database
token_repo = PasswordResetTokenRepository(session)
# Clean up any existing unused tokens for this user
await token_repo.cleanup_expired_tokens()
# Create new reset token
await token_repo.create_token(
user_id=str(user.id),
token=reset_token,
expires_at=expires_at
)
# Commit the transaction
await session.commit()
# Send password reset email in background
background_tasks.add_task(
send_password_reset_email,
user.email,
reset_token,
user.full_name
)
logger.info(f"Password reset token created for user: {user.email}")
return {"message": "If an account with this email exists, a reset link has been sent."}
except HTTPException:
raise
except Exception as e:
logger.error(f"Password reset request failed: {str(e)}", exc_info=True)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Password reset request failed"
)
@router.post("/password/reset",
summary="Reset password with token",
description="Reset user password using a valid reset token")
async def reset_password(
reset_confirm: PasswordResetConfirm,
auth_service: AuthService = Depends(get_auth_service)
) -> Dict[str, Any]:
"""
Reset password using a valid reset token
This endpoint:
1. Validates the reset token
2. Checks if the token is valid and not expired
3. Updates the user's password
4. Marks the token as used
"""
try:
logger.info(f"Password reset attempt with token: {reset_confirm.token[:10]}...")
# Validate password strength
if not SecurityManager.validate_password(reset_confirm.new_password):
errors = SecurityManager.get_password_validation_errors(reset_confirm.new_password)
logger.warning(f"Password validation failed: {errors}")
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=f"Password does not meet requirements: {'; '.join(errors)}"
)
# Find the reset token in the database
async with auth_service.database_manager.get_session() as session:
token_repo = PasswordResetTokenRepository(session)
reset_token_obj = await token_repo.get_token_by_value(reset_confirm.token)
if not reset_token_obj:
logger.warning(f"Invalid or expired password reset token: {reset_confirm.token[:10]}...")
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Invalid or expired reset token"
)
# Get the user associated with this token
user_repo = UserRepository(User, session)
user = await user_repo.get_by_id(str(reset_token_obj.user_id))
if not user:
logger.error(f"User not found for reset token: {reset_confirm.token[:10]}...")
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Invalid reset token"
)
# Hash the new password
hashed_password = SecurityManager.hash_password(reset_confirm.new_password)
# Update user's password
await user_repo.update(str(user.id), {
"hashed_password": hashed_password
})
# Mark the reset token as used
await token_repo.mark_token_as_used(str(reset_token_obj.id))
# Commit the transactions
await session.commit()
logger.info(f"Password successfully reset for user: {user.email}")
return {"message": "Password has been reset successfully"}
except HTTPException:
raise
except Exception as e:
logger.error(f"Password reset failed: {str(e)}", exc_info=True)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Password reset failed"
)

View File

@@ -0,0 +1,662 @@
"""
User management API routes
"""
from fastapi import APIRouter, Depends, HTTPException, status, BackgroundTasks, Path, Body
from sqlalchemy.ext.asyncio import AsyncSession
from typing import Dict, Any
import structlog
import uuid
from datetime import datetime, timezone
from app.core.database import get_db, get_background_db_session
from app.schemas.auth import UserResponse, PasswordChange
from app.schemas.users import UserUpdate, BatchUserRequest, OwnerUserCreate
from app.services.user_service import EnhancedUserService
from app.models.users import User
from sqlalchemy.ext.asyncio import AsyncSession
from app.services.admin_delete import AdminUserDeleteService
from app.models import AuditLog
# Import unified authentication from shared library
from shared.auth.decorators import (
get_current_user_dep,
require_admin_role_dep
)
from shared.security import create_audit_logger, AuditSeverity, AuditAction
logger = structlog.get_logger()
router = APIRouter(tags=["users"])
# Initialize audit logger
audit_logger = create_audit_logger("auth-service", AuditLog)
@router.delete("/api/v1/auth/users/{user_id}")
async def delete_admin_user(
background_tasks: BackgroundTasks,
user_id: str = Path(..., description="User ID"),
current_user = Depends(require_admin_role_dep),
db: AsyncSession = Depends(get_db)
):
"""
Delete an admin user and all associated data across all services.
This operation will:
1. Cancel any active training jobs for user's tenants
2. Delete all trained models and artifacts
3. Delete all forecasts and predictions
4. Delete notification preferences and logs
5. Handle tenant ownership (transfer or delete)
6. Delete user account and authentication data
**Warning: This operation is irreversible!**
"""
# Validate user_id format
try:
uuid.UUID(user_id)
except ValueError:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Invalid user ID format"
)
# Quick validation that user exists before starting background task
deletion_service = AdminUserDeleteService(db)
user_info = await deletion_service._validate_admin_user(user_id)
if not user_info:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Admin user {user_id} not found"
)
# Log audit event for user deletion
try:
# Get tenant_id from current_user or use a placeholder for system-level operations
tenant_id_str = current_user.get("tenant_id", "00000000-0000-0000-0000-000000000000")
await audit_logger.log_deletion(
db_session=db,
tenant_id=tenant_id_str,
user_id=current_user["user_id"],
resource_type="user",
resource_id=user_id,
resource_data=user_info,
description=f"Admin {current_user.get('email', current_user['user_id'])} initiated deletion of user {user_info.get('email', user_id)}",
endpoint="/delete/{user_id}",
method="DELETE"
)
except Exception as audit_error:
logger.warning("Failed to log audit event", error=str(audit_error))
# Start deletion as background task for better performance
background_tasks.add_task(
execute_admin_user_deletion,
user_id=user_id,
requesting_user_id=current_user["user_id"]
)
return {
"success": True,
"message": f"Admin user deletion for {user_id} has been initiated",
"status": "processing",
"user_info": user_info,
"initiated_at": datetime.utcnow().isoformat(),
"note": "Deletion is processing in the background. Check logs for completion status."
}
# Add this background task function to services/auth/app/api/users.py:
async def execute_admin_user_deletion(user_id: str, requesting_user_id: str):
"""
Background task using shared infrastructure
"""
# ✅ Use the shared background session
async with get_background_db_session() as session:
deletion_service = AdminUserDeleteService(session)
result = await deletion_service.delete_admin_user_complete(
user_id=user_id,
requesting_user_id=requesting_user_id
)
logger.info("Background admin user deletion completed successfully",
user_id=user_id,
requesting_user=requesting_user_id,
result=result)
@router.get("/api/v1/auth/users/{user_id}/deletion-preview")
async def preview_user_deletion(
user_id: str = Path(..., description="User ID"),
db: AsyncSession = Depends(get_db)
):
"""
Preview what data would be deleted for an admin user.
This endpoint provides a dry-run preview of the deletion operation
without actually deleting any data.
"""
try:
uuid.UUID(user_id)
except ValueError:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Invalid user ID format"
)
deletion_service = AdminUserDeleteService(db)
# Get user info
user_info = await deletion_service._validate_admin_user(user_id)
if not user_info:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Admin user {user_id} not found"
)
# Get tenant associations
tenant_info = await deletion_service._get_user_tenant_info(user_id)
# Build preview
preview = {
"user": user_info,
"tenant_associations": tenant_info,
"estimated_deletions": {
"training_models": "All models for associated tenants",
"forecasts": "All forecasts for associated tenants",
"notifications": "All user notification data",
"tenant_memberships": tenant_info['total_tenants'],
"owned_tenants": f"{tenant_info['owned_tenants']} (will be transferred or deleted)"
},
"warning": "This operation is irreversible and will permanently delete all associated data"
}
return preview
@router.get("/api/v1/auth/users/{user_id}", response_model=UserResponse)
async def get_user_by_id(
user_id: str = Path(..., description="User ID"),
db: AsyncSession = Depends(get_db)
):
"""
Get user information by user ID.
This endpoint is for internal service-to-service communication.
It returns user details needed by other services (e.g., tenant service for enriching member data).
"""
try:
# Validate UUID format
try:
uuid.UUID(user_id)
except ValueError:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Invalid user ID format"
)
# Fetch user from database
from app.repositories import UserRepository
user_repo = UserRepository(User, db)
user = await user_repo.get_by_id(user_id)
if not user:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"User {user_id} not found"
)
logger.debug("Retrieved user by ID", user_id=user_id, email=user.email)
return UserResponse(
id=str(user.id),
email=user.email,
full_name=user.full_name,
is_active=user.is_active,
is_verified=user.is_verified,
phone=user.phone,
language=user.language or "es",
timezone=user.timezone or "Europe/Madrid",
created_at=user.created_at,
last_login=user.last_login,
role=user.role,
tenant_id=None,
payment_customer_id=user.payment_customer_id,
default_payment_method_id=user.default_payment_method_id
)
except HTTPException:
raise
except Exception as e:
logger.error("Get user by ID error", user_id=user_id, error=str(e))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to get user information"
)
@router.put("/api/v1/auth/users/{user_id}", response_model=UserResponse)
async def update_user_profile(
user_id: str = Path(..., description="User ID"),
update_data: UserUpdate = Body(..., description="User profile update data"),
current_user = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Update user profile information.
This endpoint allows users to update their profile information including:
- Full name
- Phone number
- Language preference
- Timezone
**Permissions:** Users can update their own profile, admins can update any user's profile
"""
try:
# Validate UUID format
try:
uuid.UUID(user_id)
except ValueError:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Invalid user ID format"
)
# Check permissions - user can update their own profile, admins can update any
if current_user["user_id"] != user_id:
# Check if current user has admin privileges
user_role = current_user.get("role", "user")
if user_role not in ["admin", "super_admin", "manager"]:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail="Insufficient permissions to update this user's profile"
)
# Fetch user from database
from app.repositories import UserRepository
user_repo = UserRepository(User, db)
user = await user_repo.get_by_id(user_id)
if not user:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"User {user_id} not found"
)
# Prepare update data (only include fields that are provided)
update_fields = update_data.dict(exclude_unset=True)
if not update_fields:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="No update data provided"
)
# Update user
updated_user = await user_repo.update(user_id, update_fields)
logger.info("User profile updated", user_id=user_id, updated_fields=list(update_fields.keys()))
# Log audit event for user profile update
try:
# Get tenant_id from current_user or use a placeholder for system-level operations
tenant_id_str = current_user.get("tenant_id", "00000000-0000-0000-0000-000000000000")
await audit_logger.log_event(
db_session=db,
tenant_id=tenant_id_str,
user_id=current_user["user_id"],
action=AuditAction.UPDATE.value,
resource_type="user",
resource_id=user_id,
severity=AuditSeverity.MEDIUM.value,
description=f"User {current_user.get('email', current_user['user_id'])} updated profile for user {user.email}",
changes={"updated_fields": list(update_fields.keys())},
audit_metadata={"updated_data": update_fields},
endpoint="/users/{user_id}",
method="PUT"
)
except Exception as audit_error:
logger.warning("Failed to log audit event", error=str(audit_error))
return UserResponse(
id=str(updated_user.id),
email=updated_user.email,
full_name=updated_user.full_name,
is_active=updated_user.is_active,
is_verified=updated_user.is_verified,
phone=updated_user.phone,
language=updated_user.language or "es",
timezone=updated_user.timezone or "Europe/Madrid",
created_at=updated_user.created_at,
last_login=updated_user.last_login,
role=updated_user.role,
tenant_id=None,
payment_customer_id=updated_user.payment_customer_id,
default_payment_method_id=updated_user.default_payment_method_id
)
except HTTPException:
raise
except Exception as e:
logger.error("Update user profile error", user_id=user_id, error=str(e))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to update user profile"
)
@router.post("/api/v1/auth/users/create-by-owner", response_model=UserResponse)
async def create_user_by_owner(
user_data: OwnerUserCreate,
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Create a new user account (owner/admin only - for pilot phase).
This endpoint allows tenant owners to directly create user accounts
with passwords during the pilot phase. In production, this will be
replaced with an invitation-based flow.
**Permissions:** Owner or Admin role required
**Security:** Password is hashed server-side before storage
"""
try:
# Verify caller has admin or owner privileges
# In pilot phase, we allow 'admin' role from auth service
user_role = current_user.get("role", "user")
if user_role not in ["admin", "super_admin", "manager"]:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail="Only administrators can create users directly"
)
# Validate email uniqueness
from app.repositories import UserRepository
user_repo = UserRepository(User, db)
existing_user = await user_repo.get_by_email(user_data.email)
if existing_user:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=f"User with email {user_data.email} already exists"
)
# Hash password
from app.core.security import SecurityManager
hashed_password = SecurityManager.hash_password(user_data.password)
# Create user
create_data = {
"email": user_data.email,
"full_name": user_data.full_name,
"hashed_password": hashed_password,
"phone": user_data.phone,
"role": user_data.role,
"language": user_data.language or "es",
"timezone": user_data.timezone or "Europe/Madrid",
"is_active": True,
"is_verified": False # Can be verified later if needed
}
new_user = await user_repo.create_user(create_data)
logger.info(
"User created by owner",
created_user_id=str(new_user.id),
created_user_email=new_user.email,
created_by=current_user.get("user_id"),
created_by_email=current_user.get("email")
)
# Return user response
return UserResponse(
id=str(new_user.id),
email=new_user.email,
full_name=new_user.full_name,
is_active=new_user.is_active,
is_verified=new_user.is_verified,
phone=new_user.phone,
language=new_user.language,
timezone=new_user.timezone,
created_at=new_user.created_at,
last_login=new_user.last_login,
role=new_user.role,
tenant_id=None # Will be set when added to tenant
)
except HTTPException:
raise
except Exception as e:
logger.error(
"Failed to create user by owner",
email=user_data.email,
error=str(e),
created_by=current_user.get("user_id")
)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to create user account"
)
@router.post("/api/v1/auth/users/batch", response_model=Dict[str, Any])
async def get_users_batch(
request: BatchUserRequest,
db: AsyncSession = Depends(get_db)
):
"""
Get multiple users by their IDs in a single request.
This endpoint is for internal service-to-service communication.
It efficiently fetches multiple user records needed by other services
(e.g., tenant service for enriching member lists).
Returns a dict mapping user_id -> user data, with null for non-existent users.
"""
try:
# Validate all UUIDs
validated_ids = []
for user_id in request.user_ids:
try:
uuid.UUID(user_id)
validated_ids.append(user_id)
except ValueError:
logger.warning(f"Invalid user ID format in batch request: {user_id}")
continue
if not validated_ids:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="No valid user IDs provided"
)
# Fetch users from database
from app.repositories import UserRepository
user_repo = UserRepository(User, db)
# Build response map
user_map = {}
for user_id in validated_ids:
user = await user_repo.get_by_id(user_id)
if user:
user_map[user_id] = {
"id": str(user.id),
"email": user.email,
"full_name": user.full_name,
"is_active": user.is_active,
"is_verified": user.is_verified,
"phone": user.phone,
"language": user.language or "es",
"timezone": user.timezone or "Europe/Madrid",
"created_at": user.created_at.isoformat() if user.created_at else None,
"last_login": user.last_login.isoformat() if user.last_login else None,
"role": user.role
}
else:
user_map[user_id] = None
logger.debug(
"Batch user fetch completed",
requested_count=len(request.user_ids),
found_count=sum(1 for v in user_map.values() if v is not None)
)
return {
"users": user_map,
"requested_count": len(request.user_ids),
"found_count": sum(1 for v in user_map.values() if v is not None)
}
except HTTPException:
raise
except Exception as e:
logger.error("Batch user fetch error", error=str(e))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to fetch users"
)
@router.get("/api/v1/auth/users/{user_id}/activity")
async def get_user_activity(
user_id: str = Path(..., description="User ID"),
current_user = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Get user activity information.
This endpoint returns detailed activity information for a user including:
- Last login timestamp
- Account creation date
- Active session count
- Last activity timestamp
- User status information
**Permissions:** User can view their own activity, admins can view any user's activity
"""
try:
# Validate UUID format
try:
uuid.UUID(user_id)
except ValueError:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Invalid user ID format"
)
# Check permissions - user can view their own activity, admins can view any
if current_user["user_id"] != user_id:
# Check if current user has admin privileges
user_role = current_user.get("role", "user")
if user_role not in ["admin", "super_admin", "manager"]:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail="Insufficient permissions to view this user's activity"
)
# Initialize enhanced user service
from app.core.config import settings
from shared.database.base import create_database_manager
database_manager = create_database_manager(settings.DATABASE_URL, "tenant-service")
user_service = EnhancedUserService(database_manager)
# Get user activity data
activity_data = await user_service.get_user_activity(user_id)
if "error" in activity_data:
if activity_data["error"] == "User not found":
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="User not found"
)
else:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Failed to get user activity: {activity_data['error']}"
)
logger.debug("Retrieved user activity", user_id=user_id)
return activity_data
except HTTPException:
raise
except Exception as e:
logger.error("Get user activity error", user_id=user_id, error=str(e))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to get user activity information"
)
@router.patch("/api/v1/auth/users/{user_id}/tenant")
async def update_user_tenant(
user_id: str = Path(..., description="User ID"),
tenant_data: Dict[str, Any] = Body(..., description="Tenant data containing tenant_id"),
db: AsyncSession = Depends(get_db)
):
"""
Update user's tenant_id after tenant registration
This endpoint is called by the tenant service after a user creates their tenant.
It links the user to their newly created tenant.
"""
try:
# Log the incoming request data for debugging
logger.debug("Received tenant update request",
user_id=user_id,
tenant_data=tenant_data)
tenant_id = tenant_data.get("tenant_id")
if not tenant_id:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="tenant_id is required"
)
logger.info("Updating user tenant_id",
user_id=user_id,
tenant_id=tenant_id)
user_service = EnhancedUserService(db)
user = await user_service.get_user_by_id(uuid.UUID(user_id), session=db)
if not user:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="User not found"
)
# DEPRECATED: User-tenant relationships are now managed by tenant service
# This endpoint is kept for backward compatibility but does nothing
# The tenant service should manage user-tenant relationships internally
logger.warning("DEPRECATED: update_user_tenant endpoint called - user-tenant relationships are now managed by tenant service",
user_id=user_id,
tenant_id=tenant_id)
# Return success for backward compatibility, but don't actually update anything
return {
"success": True,
"user_id": str(user.id),
"tenant_id": tenant_id,
"message": "User-tenant relationships are now managed by tenant service. This endpoint is deprecated."
}
except HTTPException:
raise
except Exception as e:
logger.error("Failed to update user tenant_id",
user_id=user_id,
error=str(e))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to update user tenant_id"
)

View File

View File

@@ -0,0 +1,132 @@
"""
Authentication dependency for auth service
services/auth/app/core/auth.py
"""
from fastapi import Depends, HTTPException, status
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select
from jose import JWTError, jwt
import structlog
from app.core.config import settings
from app.core.database import get_db
from app.models.users import User
logger = structlog.get_logger()
security = HTTPBearer()
async def get_current_user(
credentials: HTTPAuthorizationCredentials = Depends(security),
db: AsyncSession = Depends(get_db)
) -> User:
"""
Dependency to get the current authenticated user
"""
credentials_exception = HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Could not validate credentials",
headers={"WWW-Authenticate": "Bearer"},
)
try:
# Decode JWT token
payload = jwt.decode(
credentials.credentials,
settings.JWT_SECRET_KEY,
algorithms=[settings.JWT_ALGORITHM]
)
# Get user identifier from token
user_id: str = payload.get("sub")
if user_id is None:
logger.warning("Token payload missing 'sub' field")
raise credentials_exception
logger.info(f"Authenticating user: {user_id}")
except JWTError as e:
logger.warning(f"JWT decode error: {e}")
raise credentials_exception
try:
# Get user from database
result = await db.execute(
select(User).where(User.id == user_id)
)
user = result.scalar_one_or_none()
if user is None:
logger.warning(f"User not found for ID: {user_id}")
raise credentials_exception
if not user.is_active:
logger.warning(f"Inactive user attempted access: {user_id}")
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Inactive user"
)
logger.info(f"User authenticated: {user.email}")
return user
except Exception as e:
logger.error(f"Error getting user: {e}")
raise credentials_exception
async def get_current_active_user(
current_user: User = Depends(get_current_user)
) -> User:
"""
Dependency to get the current active user
"""
if not current_user.is_active:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Inactive user"
)
return current_user
def verify_password(plain_password: str, hashed_password: str) -> bool:
"""Verify a password against its hash"""
from passlib.context import CryptContext
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
return pwd_context.verify(plain_password, hashed_password)
def get_password_hash(password: str) -> str:
"""Generate password hash"""
from passlib.context import CryptContext
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
return pwd_context.hash(password)
def create_access_token(data: dict, expires_delta=None):
"""Create JWT access token"""
from datetime import datetime, timedelta
to_encode = data.copy()
if expires_delta:
expire = datetime.utcnow() + expires_delta
else:
expire = datetime.utcnow() + timedelta(minutes=settings.ACCESS_TOKEN_EXPIRE_MINUTES)
to_encode.update({"exp": expire})
encoded_jwt = jwt.encode(to_encode, settings.JWT_SECRET_KEY, algorithm=settings.JWT_ALGORITHM)
return encoded_jwt
def create_refresh_token(data: dict):
"""Create JWT refresh token"""
from datetime import datetime, timedelta
to_encode = data.copy()
expire = datetime.utcnow() + timedelta(days=settings.REFRESH_TOKEN_EXPIRE_DAYS)
to_encode.update({"exp": expire})
encoded_jwt = jwt.encode(to_encode, settings.JWT_SECRET_KEY, algorithm=settings.JWT_ALGORITHM)
return encoded_jwt

View File

@@ -0,0 +1,70 @@
# ================================================================
# AUTH SERVICE CONFIGURATION
# services/auth/app/core/config.py
# ================================================================
"""
Authentication service configuration
User management and JWT token handling
"""
from shared.config.base import BaseServiceSettings
import os
class AuthSettings(BaseServiceSettings):
"""Auth service specific settings"""
# Service Identity
APP_NAME: str = "Authentication Service"
SERVICE_NAME: str = "auth-service"
DESCRIPTION: str = "User authentication and authorization service"
# Database configuration (secure approach - build from components)
@property
def DATABASE_URL(self) -> str:
"""Build database URL from secure components"""
# Try complete URL first (for backward compatibility)
complete_url = os.getenv("AUTH_DATABASE_URL")
if complete_url:
return complete_url
# Build from components (secure approach)
user = os.getenv("AUTH_DB_USER", "auth_user")
password = os.getenv("AUTH_DB_PASSWORD", "auth_pass123")
host = os.getenv("AUTH_DB_HOST", "localhost")
port = os.getenv("AUTH_DB_PORT", "5432")
name = os.getenv("AUTH_DB_NAME", "auth_db")
return f"postgresql+asyncpg://{user}:{password}@{host}:{port}/{name}"
# Redis Database (dedicated for auth)
REDIS_DB: int = 0
# Enhanced Password Requirements for Spain
PASSWORD_MIN_LENGTH: int = 8
PASSWORD_REQUIRE_UPPERCASE: bool = True
PASSWORD_REQUIRE_LOWERCASE: bool = True
PASSWORD_REQUIRE_NUMBERS: bool = True
PASSWORD_REQUIRE_SYMBOLS: bool = False
# Spanish GDPR Compliance
GDPR_COMPLIANCE_ENABLED: bool = True
DATA_RETENTION_DAYS: int = int(os.getenv("AUTH_DATA_RETENTION_DAYS", "365"))
CONSENT_REQUIRED: bool = True
PRIVACY_POLICY_URL: str = os.getenv("PRIVACY_POLICY_URL", "/privacy")
# Account Security
ACCOUNT_LOCKOUT_ENABLED: bool = True
MAX_LOGIN_ATTEMPTS: int = 5
LOCKOUT_DURATION_MINUTES: int = 30
PASSWORD_HISTORY_COUNT: int = 5
# Session Management
SESSION_TIMEOUT_MINUTES: int = int(os.getenv("SESSION_TIMEOUT_MINUTES", "60"))
CONCURRENT_SESSIONS_LIMIT: int = int(os.getenv("CONCURRENT_SESSIONS_LIMIT", "3"))
# Email Verification
EMAIL_VERIFICATION_REQUIRED: bool = os.getenv("EMAIL_VERIFICATION_REQUIRED", "true").lower() == "true"
EMAIL_VERIFICATION_EXPIRE_HOURS: int = int(os.getenv("EMAIL_VERIFICATION_EXPIRE_HOURS", "24"))
settings = AuthSettings()

View File

@@ -0,0 +1,290 @@
# ================================================================
# services/auth/app/core/database.py (ENHANCED VERSION)
# ================================================================
"""
Database configuration for authentication service
"""
import structlog
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession, async_sessionmaker
from sqlalchemy.pool import NullPool
from app.core.config import settings
from shared.database.base import Base
logger = structlog.get_logger()
# Create async engine
engine = create_async_engine(
settings.DATABASE_URL,
poolclass=NullPool,
echo=settings.DEBUG,
future=True
)
# Create session factory
AsyncSessionLocal = async_sessionmaker(
engine,
class_=AsyncSession,
expire_on_commit=False,
autoflush=False,
autocommit=False
)
async def get_db() -> AsyncSession:
"""Database dependency"""
async with AsyncSessionLocal() as session:
try:
yield session
await session.commit()
except Exception as e:
await session.rollback()
logger.error(f"Database session error: {e}")
raise
finally:
await session.close()
async def create_tables():
"""Create database tables"""
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.create_all)
logger.info("Database tables created successfully")
# ================================================================
# services/auth/app/core/database.py - UPDATED TO USE SHARED INFRASTRUCTURE
# ================================================================
"""
Database configuration for authentication service
Uses shared database infrastructure for consistency
"""
import structlog
from typing import AsyncGenerator
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import text
from shared.database.base import DatabaseManager, Base
from app.core.config import settings
logger = structlog.get_logger()
# ✅ Initialize database manager using shared infrastructure
database_manager = DatabaseManager(settings.DATABASE_URL)
# ✅ Alias for convenience - matches the existing interface
get_db = database_manager.get_db
# ✅ Use the shared background session method
get_background_db_session = database_manager.get_background_session
async def get_db_health() -> bool:
"""
Health check function for database connectivity
"""
try:
async with database_manager.async_engine.begin() as conn:
await conn.execute(text("SELECT 1"))
logger.debug("Database health check passed")
return True
except Exception as e:
logger.error(f"Database health check failed: {str(e)}")
return False
async def create_tables():
"""Create database tables using shared infrastructure"""
await database_manager.create_tables()
logger.info("Auth database tables created successfully")
# ✅ Auth service specific database utilities
class AuthDatabaseUtils:
"""Auth service specific database utilities"""
@staticmethod
async def cleanup_old_refresh_tokens(days_old: int = 30):
"""Clean up old refresh tokens"""
try:
async with database_manager.get_background_session() as session:
if settings.DATABASE_URL.startswith("sqlite"):
query = text(
"DELETE FROM refresh_tokens "
"WHERE created_at < datetime('now', :days_param)"
)
params = {"days_param": f"-{days_old} days"}
else:
# PostgreSQL
query = text(
"DELETE FROM refresh_tokens "
"WHERE created_at < NOW() - INTERVAL :days_param"
)
params = {"days_param": f"{days_old} days"}
result = await session.execute(query, params)
# No need to commit - get_background_session() handles it
logger.info("Cleaned up old refresh tokens",
deleted_count=result.rowcount,
days_old=days_old)
return result.rowcount
except Exception as e:
logger.error("Failed to cleanup old refresh tokens",
error=str(e))
return 0
@staticmethod
async def get_auth_statistics(tenant_id: str = None) -> dict:
"""Get authentication statistics"""
try:
async with database_manager.get_background_session() as session:
# Base query for users
users_query = text("SELECT COUNT(*) as count FROM users WHERE is_active = :is_active")
params = {}
if tenant_id:
# If tenant filtering is needed (though auth service might not have tenant_id in users table)
# This is just an example - adjust based on your actual schema
pass
# Get active users count
active_users_result = await session.execute(
users_query,
{**params, "is_active": True}
)
active_users = active_users_result.scalar() or 0
# Get inactive users count
inactive_users_result = await session.execute(
users_query,
{**params, "is_active": False}
)
inactive_users = inactive_users_result.scalar() or 0
# Get refresh tokens count
tokens_query = text("SELECT COUNT(*) as count FROM refresh_tokens")
tokens_result = await session.execute(tokens_query)
active_tokens = tokens_result.scalar() or 0
return {
"active_users": active_users,
"inactive_users": inactive_users,
"total_users": active_users + inactive_users,
"active_tokens": active_tokens
}
except Exception as e:
logger.error(f"Failed to get auth statistics: {str(e)}")
return {
"active_users": 0,
"inactive_users": 0,
"total_users": 0,
"active_tokens": 0
}
@staticmethod
async def check_user_exists(user_id: str) -> bool:
"""Check if user exists"""
try:
async with database_manager.get_background_session() as session:
query = text(
"SELECT COUNT(*) as count "
"FROM users "
"WHERE id = :user_id "
"LIMIT 1"
)
result = await session.execute(query, {"user_id": user_id})
count = result.scalar() or 0
return count > 0
except Exception as e:
logger.error("Failed to check user existence",
user_id=user_id, error=str(e))
return False
@staticmethod
async def get_user_token_count(user_id: str) -> int:
"""Get count of active refresh tokens for a user"""
try:
async with database_manager.get_background_session() as session:
query = text(
"SELECT COUNT(*) as count "
"FROM refresh_tokens "
"WHERE user_id = :user_id"
)
result = await session.execute(query, {"user_id": user_id})
count = result.scalar() or 0
return count
except Exception as e:
logger.error("Failed to get user token count",
user_id=user_id, error=str(e))
return 0
# Enhanced database session dependency with better error handling
async def get_db_session() -> AsyncGenerator[AsyncSession, None]:
"""
Enhanced database session dependency with better logging and error handling
"""
async with database_manager.async_session_local() as session:
try:
logger.debug("Database session created")
yield session
except Exception as e:
logger.error(f"Database session error: {str(e)}", exc_info=True)
await session.rollback()
raise
finally:
await session.close()
logger.debug("Database session closed")
# Database initialization for auth service
async def initialize_auth_database():
"""Initialize database tables for auth service"""
try:
logger.info("Initializing auth service database")
# Import models to ensure they're registered
from app.models.users import User
from app.models.refresh_tokens import RefreshToken
# Create tables using shared infrastructure
await database_manager.create_tables()
logger.info("Auth service database initialized successfully")
except Exception as e:
logger.error(f"Failed to initialize auth service database: {str(e)}")
raise
# Database cleanup for auth service
async def cleanup_auth_database():
"""Cleanup database connections for auth service"""
try:
logger.info("Cleaning up auth service database connections")
# Close engine connections
if hasattr(database_manager, 'async_engine') and database_manager.async_engine:
await database_manager.async_engine.dispose()
logger.info("Auth service database cleanup completed")
except Exception as e:
logger.error(f"Failed to cleanup auth service database: {str(e)}")
# Export the commonly used items to maintain compatibility
__all__ = [
'Base',
'database_manager',
'get_db',
'get_background_db_session',
'get_db_session',
'get_db_health',
'AuthDatabaseUtils',
'initialize_auth_database',
'cleanup_auth_database',
'create_tables'
]

View File

@@ -0,0 +1,453 @@
# services/auth/app/core/security.py - FIXED VERSION
"""
Security utilities for authentication service
FIXED VERSION - Consistent password hashing using passlib
"""
import re
import hashlib
from datetime import datetime, timedelta, timezone
from typing import Optional, Dict, Any, List
from shared.redis_utils import get_redis_client
from fastapi import HTTPException, status
import structlog
from passlib.context import CryptContext
from app.core.config import settings
from shared.auth.jwt_handler import JWTHandler
logger = structlog.get_logger()
# ✅ FIX: Use passlib for consistent password hashing
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
# Initialize JWT handler with SAME configuration as gateway
jwt_handler = JWTHandler(settings.JWT_SECRET_KEY, settings.JWT_ALGORITHM)
# Note: Redis client is now accessed via get_redis_client() from shared.redis_utils
class SecurityManager:
"""Security utilities for authentication - FIXED VERSION"""
@staticmethod
def validate_password(password: str) -> bool:
"""Validate password strength"""
if len(password) < settings.PASSWORD_MIN_LENGTH:
return False
if len(password) > 128: # Max length from Pydantic schema
return False
if settings.PASSWORD_REQUIRE_UPPERCASE and not re.search(r'[A-Z]', password):
return False
if settings.PASSWORD_REQUIRE_LOWERCASE and not re.search(r'[a-z]', password):
return False
if settings.PASSWORD_REQUIRE_NUMBERS and not re.search(r'\d', password):
return False
if settings.PASSWORD_REQUIRE_SYMBOLS and not re.search(r'[!@#$%^&*(),.?":{}|<>]', password):
return False
return True
@staticmethod
def get_password_validation_errors(password: str) -> List[str]:
"""Get detailed password validation errors for better UX"""
errors = []
if len(password) < settings.PASSWORD_MIN_LENGTH:
errors.append(f"Password must be at least {settings.PASSWORD_MIN_LENGTH} characters long")
if len(password) > 128:
errors.append("Password cannot exceed 128 characters")
if settings.PASSWORD_REQUIRE_UPPERCASE and not re.search(r'[A-Z]', password):
errors.append("Password must contain at least one uppercase letter")
if settings.PASSWORD_REQUIRE_LOWERCASE and not re.search(r'[a-z]', password):
errors.append("Password must contain at least one lowercase letter")
if settings.PASSWORD_REQUIRE_NUMBERS and not re.search(r'\d', password):
errors.append("Password must contain at least one number")
if settings.PASSWORD_REQUIRE_SYMBOLS and not re.search(r'[!@#$%^&*(),.?":{}|<>]', password):
errors.append("Password must contain at least one symbol (!@#$%^&*(),.?\":{}|<>)")
return errors
@staticmethod
def hash_password(password: str) -> str:
"""Hash password using passlib bcrypt - FIXED"""
return pwd_context.hash(password)
@staticmethod
def verify_password(password: str, hashed_password: str) -> bool:
"""Verify password against hash using passlib - FIXED"""
try:
return pwd_context.verify(password, hashed_password)
except Exception as e:
logger.error(f"Password verification error: {e}")
return False
@staticmethod
def create_access_token(user_data: Dict[str, Any]) -> str:
"""
Create JWT ACCESS token with proper payload structure
✅ FIXED: Only creates access tokens
"""
# Validate required fields for access token
if "user_id" not in user_data:
raise ValueError("user_id required for access token creation")
if "email" not in user_data:
raise ValueError("email required for access token creation")
try:
expire = datetime.now(timezone.utc) + timedelta(minutes=settings.JWT_ACCESS_TOKEN_EXPIRE_MINUTES)
# ✅ FIX 1: ACCESS TOKEN payload structure
payload = {
"sub": user_data["user_id"],
"user_id": user_data["user_id"],
"email": user_data["email"],
"type": "access", # ✅ EXPLICITLY set as access token
"exp": expire,
"iat": datetime.now(timezone.utc),
"iss": "bakery-auth"
}
# Add optional fields for access tokens
if "full_name" in user_data:
payload["full_name"] = user_data["full_name"]
if "is_verified" in user_data:
payload["is_verified"] = user_data["is_verified"]
if "is_active" in user_data:
payload["is_active"] = user_data["is_active"]
# ✅ CRITICAL FIX: Include role in access token!
if "role" in user_data:
payload["role"] = user_data["role"]
else:
payload["role"] = "admin" # Default role if not specified
# NEW: Add subscription data to JWT payload
if "tenant_id" in user_data:
payload["tenant_id"] = user_data["tenant_id"]
if "tenant_role" in user_data:
payload["tenant_role"] = user_data["tenant_role"]
if "subscription" in user_data:
payload["subscription"] = user_data["subscription"]
if "tenant_access" in user_data:
# Limit tenant_access to 10 entries to prevent JWT size explosion
tenant_access = user_data["tenant_access"]
if tenant_access and len(tenant_access) > 10:
tenant_access = tenant_access[:10]
logger.warning(f"Truncated tenant_access to 10 entries for user {user_data['user_id']}")
payload["tenant_access"] = tenant_access
logger.debug(f"Creating access token with payload keys: {list(payload.keys())}")
# ✅ FIX 2: Use JWT handler to create access token
token = jwt_handler.create_access_token_from_payload(payload)
logger.debug(f"Access token created successfully for user {user_data['email']}")
return token
except Exception as e:
logger.error(f"Access token creation failed for {user_data.get('email', 'unknown')}: {e}")
raise ValueError(f"Failed to create access token: {str(e)}")
@staticmethod
def create_refresh_token(user_data: Dict[str, Any]) -> str:
"""
Create JWT REFRESH token with minimal payload structure
✅ FIXED: Only creates refresh tokens, different from access tokens
"""
# Validate required fields for refresh token
if "user_id" not in user_data:
raise ValueError("user_id required for refresh token creation")
if not user_data.get("user_id"):
raise ValueError("user_id cannot be empty")
try:
expire = datetime.now(timezone.utc) + timedelta(days=settings.JWT_REFRESH_TOKEN_EXPIRE_DAYS)
# ✅ FIX 3: REFRESH TOKEN payload structure (minimal, different from access)
payload = {
"sub": user_data["user_id"],
"user_id": user_data["user_id"],
"type": "refresh", # ✅ EXPLICITLY set as refresh token
"exp": expire,
"iat": datetime.now(timezone.utc),
"iss": "bakery-auth"
}
# Add unique JTI for refresh tokens to prevent duplicates
if "jti" in user_data:
payload["jti"] = user_data["jti"]
else:
import uuid
payload["jti"] = str(uuid.uuid4())
# Include email only if available (optional for refresh tokens)
if "email" in user_data and user_data["email"]:
payload["email"] = user_data["email"]
logger.debug(f"Creating refresh token with payload keys: {list(payload.keys())}")
# ✅ FIX 4: Use JWT handler to create REFRESH token (not access token!)
token = jwt_handler.create_refresh_token_from_payload(payload)
logger.debug(f"Refresh token created successfully for user {user_data['user_id']}")
return token
except Exception as e:
logger.error(f"Refresh token creation failed for {user_data.get('user_id', 'unknown')}: {e}")
raise ValueError(f"Failed to create refresh token: {str(e)}")
@staticmethod
def verify_token(token: str) -> Optional[Dict[str, Any]]:
"""Verify JWT token with enhanced error handling"""
try:
payload = jwt_handler.verify_token(token)
if payload:
logger.debug(f"Token verified successfully for user: {payload.get('email', 'unknown')}")
return payload
except Exception as e:
logger.warning(f"Token verification failed: {e}")
return None
@staticmethod
def decode_token(token: str) -> Dict[str, Any]:
"""Decode JWT token without verification (for refresh token handling)"""
try:
payload = jwt_handler.decode_token_no_verify(token)
return payload
except Exception as e:
logger.error(f"Token decoding failed: {e}")
raise ValueError("Invalid token format")
@staticmethod
def generate_secure_hash(data: str) -> str:
"""Generate secure hash for token storage"""
return hashlib.sha256(data.encode()).hexdigest()
@staticmethod
def create_service_token(service_name: str, tenant_id: Optional[str] = None) -> str:
"""
Create JWT service token for inter-service communication
✅ UNIFIED: Uses shared JWT handler for consistent token creation
✅ ENHANCED: Supports tenant context for tenant-scoped operations
Args:
service_name: Name of the service (e.g., 'auth-service', 'tenant-service')
tenant_id: Optional tenant ID for tenant-scoped service operations
Returns:
Encoded JWT service token
"""
try:
# Use unified JWT handler to create service token
token = jwt_handler.create_service_token(
service_name=service_name,
tenant_id=tenant_id
)
logger.debug(f"Created service token for {service_name}", tenant_id=tenant_id)
return token
except Exception as e:
logger.error(f"Failed to create service token for {service_name}: {e}")
raise ValueError(f"Failed to create service token: {str(e)}")
@staticmethod
async def track_login_attempt(email: str, ip_address: str, success: bool) -> None:
"""Track login attempts for security monitoring"""
try:
# This would use Redis for production
# For now, just log the attempt
logger.info(f"Login attempt tracked: email={email}, ip={ip_address}, success={success}")
except Exception as e:
logger.warning(f"Failed to track login attempt: {e}")
@staticmethod
def is_token_expired(token: str) -> bool:
"""Check if token is expired"""
try:
payload = SecurityManager.decode_token(token)
exp_timestamp = payload.get("exp")
if exp_timestamp:
exp_datetime = datetime.fromtimestamp(exp_timestamp, tz=timezone.utc)
return datetime.now(timezone.utc) > exp_datetime
return True
except Exception:
return True
@staticmethod
def verify_token(token: str) -> Optional[Dict[str, Any]]:
"""Verify JWT token with enhanced error handling"""
try:
payload = jwt_handler.verify_token(token)
if payload:
logger.debug(f"Token verified successfully for user: {payload.get('email', 'unknown')}")
return payload
except Exception as e:
logger.warning(f"Token verification failed: {e}")
return None
@staticmethod
async def track_login_attempt(email: str, ip_address: str, success: bool) -> None:
"""Track login attempts for security monitoring"""
try:
redis_client = await get_redis_client()
key = f"login_attempts:{email}:{ip_address}"
if success:
# Clear failed attempts on successful login
await redis_client.delete(key)
else:
# Increment failed attempts
attempts = await redis_client.incr(key)
if attempts == 1:
# Set expiration on first failed attempt
await redis_client.expire(key, settings.LOCKOUT_DURATION_MINUTES * 60)
if attempts >= settings.MAX_LOGIN_ATTEMPTS:
logger.warning(f"Account locked for {email} from {ip_address}")
raise HTTPException(
status_code=status.HTTP_429_TOO_MANY_REQUESTS,
detail=f"Too many failed login attempts. Try again in {settings.LOCKOUT_DURATION_MINUTES} minutes."
)
except HTTPException:
raise # Re-raise HTTPException
except Exception as e:
logger.error(f"Failed to track login attempt: {e}")
@staticmethod
async def is_account_locked(email: str, ip_address: str) -> bool:
"""Check if account is locked due to failed login attempts"""
try:
redis_client = await get_redis_client()
key = f"login_attempts:{email}:{ip_address}"
attempts = await redis_client.get(key)
if attempts:
attempts = int(attempts)
return attempts >= settings.MAX_LOGIN_ATTEMPTS
except Exception as e:
logger.error(f"Failed to check account lock status: {e}")
return False
@staticmethod
def hash_api_key(api_key: str) -> str:
"""Hash API key for storage"""
return hashlib.sha256(api_key.encode()).hexdigest()
@staticmethod
def generate_secure_token(length: int = 32) -> str:
"""Generate secure random token"""
import secrets
return secrets.token_urlsafe(length)
@staticmethod
def generate_reset_token() -> str:
"""Generate a secure password reset token"""
import secrets
return secrets.token_urlsafe(32)
@staticmethod
def mask_sensitive_data(data: str, visible_chars: int = 4) -> str:
"""Mask sensitive data for logging"""
if not data or len(data) <= visible_chars:
return "*" * len(data) if data else ""
return data[:visible_chars] + "*" * (len(data) - visible_chars)
@staticmethod
async def check_login_attempts(email: str) -> bool:
"""Check if user has exceeded login attempts"""
try:
redis_client = await get_redis_client()
key = f"login_attempts:{email}"
attempts = await redis_client.get(key)
if attempts is None:
return True
return int(attempts) < settings.MAX_LOGIN_ATTEMPTS
except Exception as e:
logger.error(f"Error checking login attempts: {e}")
return True # Allow on error
@staticmethod
async def increment_login_attempts(email: str) -> None:
"""Increment login attempts for email"""
try:
redis_client = await get_redis_client()
key = f"login_attempts:{email}"
await redis_client.incr(key)
await redis_client.expire(key, settings.LOCKOUT_DURATION_MINUTES * 60)
except Exception as e:
logger.error(f"Error incrementing login attempts: {e}")
@staticmethod
async def clear_login_attempts(email: str) -> None:
"""Clear login attempts for email after successful login"""
try:
redis_client = await get_redis_client()
key = f"login_attempts:{email}"
await redis_client.delete(key)
logger.debug(f"Cleared login attempts for {email}")
except Exception as e:
logger.error(f"Error clearing login attempts: {e}")
@staticmethod
async def store_refresh_token(user_id: str, token: str) -> None:
"""Store refresh token in Redis"""
try:
redis_client = await get_redis_client()
token_hash = SecurityManager.hash_api_key(token) # Reuse hash method
key = f"refresh_token:{user_id}:{token_hash}"
# Store with expiration matching JWT refresh token expiry
expire_seconds = settings.JWT_REFRESH_TOKEN_EXPIRE_DAYS * 24 * 60 * 60
await redis_client.setex(key, expire_seconds, "valid")
except Exception as e:
logger.error(f"Error storing refresh token: {e}")
@staticmethod
async def is_refresh_token_valid(user_id: str, token: str) -> bool:
"""Check if refresh token is still valid in Redis"""
try:
redis_client = await get_redis_client()
token_hash = SecurityManager.hash_api_key(token)
key = f"refresh_token:{user_id}:{token_hash}"
exists = await redis_client.exists(key)
return bool(exists)
except Exception as e:
logger.error(f"Error checking refresh token validity: {e}")
return False
@staticmethod
async def revoke_refresh_token(user_id: str, token: str) -> None:
"""Revoke refresh token by removing from Redis"""
try:
redis_client = await get_redis_client()
token_hash = SecurityManager.hash_api_key(token)
key = f"refresh_token:{user_id}:{token_hash}"
await redis_client.delete(key)
logger.debug(f"Revoked refresh token for user {user_id}")
except Exception as e:
logger.error(f"Error revoking refresh token: {e}")

225
services/auth/app/main.py Normal file
View File

@@ -0,0 +1,225 @@
"""
Authentication Service Main Application
"""
from fastapi import FastAPI
from sqlalchemy import text
from app.core.config import settings
from app.core.database import database_manager
from app.api import auth_operations, users, onboarding_progress, consent, data_export, account_deletion, internal_demo, password_reset
from shared.service_base import StandardFastAPIService
from shared.messaging import UnifiedEventPublisher
class AuthService(StandardFastAPIService):
"""Authentication Service with standardized setup"""
async def on_startup(self, app):
"""Custom startup logic including migration verification and Redis initialization"""
self.logger.info("Starting auth service on_startup")
await self.verify_migrations()
# Initialize Redis if not already done during service creation
if not self.redis_initialized:
try:
from shared.redis_utils import initialize_redis, get_redis_client
await initialize_redis(settings.REDIS_URL_WITH_DB, db=settings.REDIS_DB, max_connections=getattr(settings, 'REDIS_MAX_CONNECTIONS', 50))
self.redis_client = await get_redis_client()
self.redis_initialized = True
self.logger.info("Connected to Redis for token management")
except Exception as e:
self.logger.error(f"Failed to connect to Redis during startup: {e}")
raise
await super().on_startup(app)
async def on_shutdown(self, app):
"""Custom shutdown logic for Auth Service"""
await super().on_shutdown(app)
# Close Redis
from shared.redis_utils import close_redis
await close_redis()
self.logger.info("Redis connection closed")
async def verify_migrations(self):
"""Verify database schema matches the latest migrations."""
try:
async with self.database_manager.get_session() as session:
# Check if alembic_version table exists
result = await session.execute(text("""
SELECT EXISTS (
SELECT FROM information_schema.tables
WHERE table_schema = 'public'
AND table_name = 'alembic_version'
)
"""))
table_exists = result.scalar()
if table_exists:
# If table exists, check the version
result = await session.execute(text("SELECT version_num FROM alembic_version"))
version = result.scalar()
self.logger.info(f"Migration verification successful: {version}")
else:
# If table doesn't exist, migrations might not have run yet
# This is OK - the migration job should create it
self.logger.warning("alembic_version table does not exist yet - migrations may not have run")
except Exception as e:
self.logger.warning(f"Migration verification failed (this may be expected during initial setup): {e}")
def __init__(self):
# Initialize Redis during service creation so it's available when needed
try:
import asyncio
# We need to run the async initialization in a sync context
try:
# Check if there's already a running event loop
loop = asyncio.get_running_loop()
# If there is, we'll initialize Redis later in on_startup
self.redis_initialized = False
self.redis_client = None
except RuntimeError:
# No event loop running, safe to run the async function
import asyncio
import nest_asyncio
nest_asyncio.apply() # Allow nested event loops
async def init_redis():
from shared.redis_utils import initialize_redis, get_redis_client
await initialize_redis(settings.REDIS_URL_WITH_DB, db=settings.REDIS_DB, max_connections=getattr(settings, 'REDIS_MAX_CONNECTIONS', 50))
return await get_redis_client()
self.redis_client = asyncio.run(init_redis())
self.redis_initialized = True
self.logger.info("Connected to Redis for token management")
except Exception as e:
self.logger.error(f"Failed to initialize Redis during service creation: {e}")
self.redis_initialized = False
self.redis_client = None
# Define expected database tables for health checks
auth_expected_tables = [
'users', 'refresh_tokens', 'user_onboarding_progress',
'user_onboarding_summary', 'login_attempts', 'user_consents',
'consent_history', 'audit_logs'
]
# Define custom metrics for auth service
auth_custom_metrics = {
"registration_total": {
"type": "counter",
"description": "Total user registrations by status",
"labels": ["status"]
},
"login_success_total": {
"type": "counter",
"description": "Total successful user logins"
},
"login_failure_total": {
"type": "counter",
"description": "Total failed user logins by reason",
"labels": ["reason"]
},
"token_refresh_total": {
"type": "counter",
"description": "Total token refreshes by status",
"labels": ["status"]
},
"token_verify_total": {
"type": "counter",
"description": "Total token verifications by status",
"labels": ["status"]
},
"logout_total": {
"type": "counter",
"description": "Total user logouts by status",
"labels": ["status"]
},
"registration_duration_seconds": {
"type": "histogram",
"description": "Registration request duration"
},
"login_duration_seconds": {
"type": "histogram",
"description": "Login request duration"
},
"token_refresh_duration_seconds": {
"type": "histogram",
"description": "Token refresh duration"
}
}
super().__init__(
service_name="auth-service",
app_name="Authentication Service",
description="Handles user authentication and authorization for bakery forecasting platform",
version="1.0.0",
log_level=settings.LOG_LEVEL,
api_prefix="", # Empty because RouteBuilder already includes /api/v1
database_manager=database_manager,
expected_tables=auth_expected_tables,
enable_messaging=True,
custom_metrics=auth_custom_metrics
)
async def _setup_messaging(self):
"""Setup messaging for auth service"""
from shared.messaging import RabbitMQClient
try:
self.rabbitmq_client = RabbitMQClient(settings.RABBITMQ_URL, service_name="auth-service")
await self.rabbitmq_client.connect()
# Create event publisher
self.event_publisher = UnifiedEventPublisher(self.rabbitmq_client, "auth-service")
self.logger.info("Auth service messaging setup completed")
except Exception as e:
self.logger.error("Failed to setup auth messaging", error=str(e))
raise
async def _cleanup_messaging(self):
"""Cleanup messaging for auth service"""
try:
if self.rabbitmq_client:
await self.rabbitmq_client.disconnect()
self.logger.info("Auth service messaging cleanup completed")
except Exception as e:
self.logger.error("Error during auth messaging cleanup", error=str(e))
async def on_shutdown(self, app: FastAPI):
"""Custom shutdown logic for auth service"""
self.logger.info("Authentication Service shutdown complete")
def get_service_features(self):
"""Return auth-specific features"""
return [
"user_authentication",
"token_management",
"user_onboarding",
"role_based_access",
"messaging_integration"
]
# Create service instance
service = AuthService()
# Create FastAPI app with standardized setup
app = service.create_app(
docs_url="/docs",
redoc_url="/redoc"
)
# Setup standard endpoints
service.setup_standard_endpoints()
# Include routers with specific configurations
# Note: Routes now use RouteBuilder which includes full paths, so no prefix needed
service.add_router(auth_operations.router, tags=["authentication"])
service.add_router(users.router, tags=["users"])
service.add_router(onboarding_progress.router, tags=["onboarding"])
service.add_router(consent.router, tags=["gdpr", "consent"])
service.add_router(data_export.router, tags=["gdpr", "data-export"])
service.add_router(account_deletion.router, tags=["gdpr", "account-deletion"])
service.add_router(internal_demo.router, tags=["internal-demo"])
service.add_router(password_reset.router, tags=["password-reset"])

View File

@@ -0,0 +1,31 @@
# services/auth/app/models/__init__.py
"""
Models export for auth service
"""
# Import AuditLog model for this service
from shared.security import create_audit_log_model
from shared.database.base import Base
# Create audit log model for this service
AuditLog = create_audit_log_model(Base)
from .users import User
from .tokens import RefreshToken, LoginAttempt
from .onboarding import UserOnboardingProgress, UserOnboardingSummary
from .consent import UserConsent, ConsentHistory
from .deletion_job import DeletionJob
from .password_reset_tokens import PasswordResetToken
__all__ = [
'User',
'RefreshToken',
'LoginAttempt',
'UserOnboardingProgress',
'UserOnboardingSummary',
'UserConsent',
'ConsentHistory',
'DeletionJob',
'PasswordResetToken',
"AuditLog",
]

View File

@@ -0,0 +1,110 @@
"""
User consent tracking models for GDPR compliance
"""
from sqlalchemy import Column, String, Boolean, DateTime, Text, ForeignKey, Index
from sqlalchemy.dialects.postgresql import UUID, JSON
from datetime import datetime, timezone
import uuid
from shared.database.base import Base
class UserConsent(Base):
"""
Tracks user consent for various data processing activities
GDPR Article 7 - Conditions for consent
"""
__tablename__ = "user_consents"
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
user_id = Column(UUID(as_uuid=True), ForeignKey("users.id", ondelete="CASCADE"), nullable=False, index=True)
# Consent types
terms_accepted = Column(Boolean, nullable=False, default=False)
privacy_accepted = Column(Boolean, nullable=False, default=False)
marketing_consent = Column(Boolean, nullable=False, default=False)
analytics_consent = Column(Boolean, nullable=False, default=False)
# Consent metadata
consent_version = Column(String(20), nullable=False, default="1.0")
consent_method = Column(String(50), nullable=False) # registration, settings_update, cookie_banner
ip_address = Column(String(45), nullable=True)
user_agent = Column(Text, nullable=True)
# Consent text at time of acceptance
terms_text_hash = Column(String(64), nullable=True)
privacy_text_hash = Column(String(64), nullable=True)
# Timestamps
consented_at = Column(DateTime(timezone=True), nullable=False, default=lambda: datetime.now(timezone.utc))
withdrawn_at = Column(DateTime(timezone=True), nullable=True)
# Additional metadata (renamed from 'metadata' to avoid SQLAlchemy reserved word)
extra_data = Column(JSON, nullable=True)
__table_args__ = (
Index('idx_user_consent_user_id', 'user_id'),
Index('idx_user_consent_consented_at', 'consented_at'),
)
def __repr__(self):
return f"<UserConsent(user_id={self.user_id}, version={self.consent_version})>"
def to_dict(self):
return {
"id": str(self.id),
"user_id": str(self.user_id),
"terms_accepted": self.terms_accepted,
"privacy_accepted": self.privacy_accepted,
"marketing_consent": self.marketing_consent,
"analytics_consent": self.analytics_consent,
"consent_version": self.consent_version,
"consent_method": self.consent_method,
"consented_at": self.consented_at.isoformat() if self.consented_at else None,
"withdrawn_at": self.withdrawn_at.isoformat() if self.withdrawn_at else None,
}
class ConsentHistory(Base):
"""
Historical record of all consent changes
Provides audit trail for GDPR compliance
"""
__tablename__ = "consent_history"
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
user_id = Column(UUID(as_uuid=True), nullable=False, index=True)
consent_id = Column(UUID(as_uuid=True), ForeignKey("user_consents.id", ondelete="SET NULL"), nullable=True)
# Action type
action = Column(String(50), nullable=False) # granted, updated, withdrawn, revoked
# Consent state at time of action
consent_snapshot = Column(JSON, nullable=False)
# Context
ip_address = Column(String(45), nullable=True)
user_agent = Column(Text, nullable=True)
consent_method = Column(String(50), nullable=True)
# Timestamp
created_at = Column(DateTime(timezone=True), nullable=False, default=lambda: datetime.now(timezone.utc), index=True)
__table_args__ = (
Index('idx_consent_history_user_id', 'user_id'),
Index('idx_consent_history_created_at', 'created_at'),
Index('idx_consent_history_action', 'action'),
)
def __repr__(self):
return f"<ConsentHistory(user_id={self.user_id}, action={self.action})>"
def to_dict(self):
return {
"id": str(self.id),
"user_id": str(self.user_id),
"action": self.action,
"consent_snapshot": self.consent_snapshot,
"created_at": self.created_at.isoformat() if self.created_at else None,
}

View File

@@ -0,0 +1,64 @@
"""
Deletion Job Model
Tracks tenant deletion jobs for persistence and recovery
"""
from sqlalchemy import Column, String, DateTime, Text, JSON, Index, Integer
from sqlalchemy.dialects.postgresql import UUID
from sqlalchemy.sql import func
import uuid
from shared.database.base import Base
class DeletionJob(Base):
"""
Persistent storage for tenant deletion jobs
Enables job recovery and tracking across service restarts
"""
__tablename__ = "deletion_jobs"
# Primary identifiers
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4, index=True)
job_id = Column(String(100), nullable=False, unique=True, index=True) # External job ID
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
# Job Metadata
tenant_name = Column(String(255), nullable=True)
initiated_by = Column(UUID(as_uuid=True), nullable=True) # User ID who started deletion
# Job Status
status = Column(String(50), nullable=False, default="pending", index=True) # pending, in_progress, completed, failed, rolled_back
# Service Results
service_results = Column(JSON, nullable=True) # Dict of service_name -> result details
# Progress Tracking
total_items_deleted = Column(Integer, default=0, nullable=False)
services_completed = Column(Integer, default=0, nullable=False)
services_failed = Column(Integer, default=0, nullable=False)
# Error Tracking
error_log = Column(JSON, nullable=True) # Array of error messages
# Timestamps
started_at = Column(DateTime(timezone=True), nullable=True, index=True)
completed_at = Column(DateTime(timezone=True), nullable=True)
created_at = Column(DateTime(timezone=True), server_default=func.now(), nullable=False)
updated_at = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now(), nullable=False)
# Additional Context
notes = Column(Text, nullable=True)
extra_metadata = Column(JSON, nullable=True) # Additional job-specific data
# Indexes for performance
__table_args__ = (
Index('idx_deletion_job_id', 'job_id'),
Index('idx_deletion_tenant_id', 'tenant_id'),
Index('idx_deletion_status', 'status'),
Index('idx_deletion_started_at', 'started_at'),
Index('idx_deletion_tenant_status', 'tenant_id', 'status'),
)
def __repr__(self):
return f"<DeletionJob(job_id='{self.job_id}', tenant_id={self.tenant_id}, status='{self.status}')>"

View File

@@ -0,0 +1,91 @@
# services/auth/app/models/onboarding.py
"""
User onboarding progress models
"""
from sqlalchemy import Column, String, Boolean, DateTime, Text, ForeignKey, JSON, UniqueConstraint
from sqlalchemy.dialects.postgresql import UUID
from datetime import datetime, timezone
import uuid
from shared.database.base import Base
class UserOnboardingProgress(Base):
"""User onboarding progress tracking model"""
__tablename__ = "user_onboarding_progress"
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
user_id = Column(UUID(as_uuid=True), ForeignKey("users.id", ondelete="CASCADE"), nullable=False, index=True)
# Step tracking
step_name = Column(String(50), nullable=False)
completed = Column(Boolean, default=False, nullable=False)
completed_at = Column(DateTime(timezone=True))
# Additional step data (JSON field for flexibility)
step_data = Column(JSON, default=dict)
# Timestamps
created_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc))
updated_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc), onupdate=lambda: datetime.now(timezone.utc))
# Unique constraint to prevent duplicate step entries per user
__table_args__ = (
UniqueConstraint('user_id', 'step_name', name='uq_user_step'),
{'extend_existing': True}
)
def __repr__(self):
return f"<UserOnboardingProgress(id={self.id}, user_id={self.user_id}, step={self.step_name}, completed={self.completed})>"
def to_dict(self):
"""Convert to dictionary"""
return {
"id": str(self.id),
"user_id": str(self.user_id),
"step_name": self.step_name,
"completed": self.completed,
"completed_at": self.completed_at.isoformat() if self.completed_at else None,
"step_data": self.step_data or {},
"created_at": self.created_at.isoformat() if self.created_at else None,
"updated_at": self.updated_at.isoformat() if self.updated_at else None
}
class UserOnboardingSummary(Base):
"""User onboarding summary for quick lookups"""
__tablename__ = "user_onboarding_summary"
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
user_id = Column(UUID(as_uuid=True), ForeignKey("users.id", ondelete="CASCADE"), nullable=False, unique=True, index=True)
# Summary fields
current_step = Column(String(50), nullable=False, default="user_registered")
next_step = Column(String(50))
completion_percentage = Column(String(50), default="0.0") # Store as string for precision
fully_completed = Column(Boolean, default=False)
# Progress tracking
steps_completed_count = Column(String(50), default="0") # Store as string: "3/5"
# Timestamps
created_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc))
updated_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc), onupdate=lambda: datetime.now(timezone.utc))
last_activity_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc))
def __repr__(self):
return f"<UserOnboardingSummary(user_id={self.user_id}, current_step={self.current_step}, completion={self.completion_percentage}%)>"
def to_dict(self):
"""Convert to dictionary"""
return {
"id": str(self.id),
"user_id": str(self.user_id),
"current_step": self.current_step,
"next_step": self.next_step,
"completion_percentage": float(self.completion_percentage) if self.completion_percentage else 0.0,
"fully_completed": self.fully_completed,
"steps_completed_count": self.steps_completed_count,
"created_at": self.created_at.isoformat() if self.created_at else None,
"updated_at": self.updated_at.isoformat() if self.updated_at else None,
"last_activity_at": self.last_activity_at.isoformat() if self.last_activity_at else None
}

View File

@@ -0,0 +1,39 @@
# services/auth/app/models/password_reset_tokens.py
"""
Password reset token model for authentication service
"""
import uuid
from datetime import datetime, timezone
from sqlalchemy import Column, String, DateTime, Boolean, Index
from sqlalchemy.dialects.postgresql import UUID
from shared.database.base import Base
class PasswordResetToken(Base):
"""
Password reset token model
Stores temporary tokens for password reset functionality
"""
__tablename__ = "password_reset_tokens"
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
user_id = Column(UUID(as_uuid=True), nullable=False, index=True)
token = Column(String(255), nullable=False, unique=True, index=True)
expires_at = Column(DateTime(timezone=True), nullable=False)
is_used = Column(Boolean, default=False, nullable=False)
created_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc))
used_at = Column(DateTime(timezone=True), nullable=True)
# Add indexes for better performance
__table_args__ = (
Index('ix_password_reset_tokens_user_id', 'user_id'),
Index('ix_password_reset_tokens_token', 'token'),
Index('ix_password_reset_tokens_expires_at', 'expires_at'),
Index('ix_password_reset_tokens_is_used', 'is_used'),
)
def __repr__(self):
return f"<PasswordResetToken(id={self.id}, user_id={self.user_id}, token={self.token[:10]}..., is_used={self.is_used})>"

View File

@@ -0,0 +1,92 @@
# ================================================================
# services/auth/app/models/tokens.py
# ================================================================
"""
Token models for authentication service
"""
import hashlib
import uuid
from datetime import datetime, timezone
from sqlalchemy import Column, String, Boolean, DateTime, Text, Index
from sqlalchemy.dialects.postgresql import UUID
from shared.database.base import Base
class RefreshToken(Base):
"""
Refresh token model - FIXED to prevent duplicate constraint violations
"""
__tablename__ = "refresh_tokens"
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
user_id = Column(UUID(as_uuid=True), nullable=False, index=True)
# ✅ FIX 1: Use TEXT instead of VARCHAR to handle longer tokens
token = Column(Text, nullable=False)
# ✅ FIX 2: Add token hash for uniqueness instead of full token
token_hash = Column(String(255), nullable=True, unique=True)
expires_at = Column(DateTime(timezone=True), nullable=False)
is_revoked = Column(Boolean, default=False, nullable=False)
created_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc))
revoked_at = Column(DateTime(timezone=True), nullable=True)
# ✅ FIX 3: Add indexes for better performance
__table_args__ = (
Index('ix_refresh_tokens_user_id_active', 'user_id', 'is_revoked'),
Index('ix_refresh_tokens_expires_at', 'expires_at'),
Index('ix_refresh_tokens_token_hash', 'token_hash'),
)
def __init__(self, **kwargs):
"""Initialize refresh token with automatic hash generation"""
super().__init__(**kwargs)
if self.token and not self.token_hash:
self.token_hash = self._generate_token_hash(self.token)
@staticmethod
def _generate_token_hash(token: str) -> str:
"""Generate a hash of the token for uniqueness checking"""
return hashlib.sha256(token.encode()).hexdigest()
def update_token(self, new_token: str):
"""Update token and regenerate hash"""
self.token = new_token
self.token_hash = self._generate_token_hash(new_token)
@classmethod
async def create_refresh_token(cls, user_id: uuid.UUID, token: str, expires_at: datetime):
"""
Create a new refresh token with proper hash generation
"""
return cls(
id=uuid.uuid4(),
user_id=user_id,
token=token,
token_hash=cls._generate_token_hash(token),
expires_at=expires_at,
is_revoked=False,
created_at=datetime.now(timezone.utc)
)
def __repr__(self):
return f"<RefreshToken(id={self.id}, user_id={self.user_id}, expires_at={self.expires_at})>"
class LoginAttempt(Base):
"""Login attempt tracking model"""
__tablename__ = "login_attempts"
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
email = Column(String(255), nullable=False, index=True)
ip_address = Column(String(45), nullable=False)
user_agent = Column(Text)
success = Column(Boolean, default=False)
failure_reason = Column(String(255))
created_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc))
def __repr__(self):
return f"<LoginAttempt(id={self.id}, email={self.email}, success={self.success})>"

View File

@@ -0,0 +1,61 @@
# services/auth/app/models/users.py - FIXED VERSION
"""
User models for authentication service - FIXED
Removed tenant relationships to eliminate cross-service dependencies
"""
from sqlalchemy import Column, String, Boolean, DateTime, Text, ForeignKey
from sqlalchemy.dialects.postgresql import UUID
from datetime import datetime, timezone
import uuid
from shared.database.base import Base
class User(Base):
"""User model - FIXED without cross-service relationships"""
__tablename__ = "users"
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
email = Column(String(255), unique=True, index=True, nullable=False)
hashed_password = Column(String(255), nullable=False)
full_name = Column(String(255), nullable=False)
is_active = Column(Boolean, default=True)
is_verified = Column(Boolean, default=False)
# Timezone-aware datetime fields
created_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc))
updated_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc), onupdate=lambda: datetime.now(timezone.utc))
last_login = Column(DateTime(timezone=True))
# Profile fields
phone = Column(String(20))
language = Column(String(10), default="es")
timezone = Column(String(50), default="Europe/Madrid")
role = Column(String(20), nullable=False)
# Payment integration fields
payment_customer_id = Column(String(255), nullable=True, index=True)
default_payment_method_id = Column(String(255), nullable=True)
# REMOVED: All tenant relationships - these are handled by tenant service
# No tenant_memberships, tenants relationships
def __repr__(self):
return f"<User(id={self.id}, email={self.email})>"
def to_dict(self):
"""Convert user to dictionary"""
return {
"id": str(self.id),
"email": self.email,
"full_name": self.full_name,
"is_active": self.is_active,
"is_verified": self.is_verified,
"phone": self.phone,
"language": self.language,
"timezone": self.timezone,
"role": self.role,
"created_at": self.created_at.isoformat() if self.created_at else None,
"updated_at": self.updated_at.isoformat() if self.updated_at else None,
"last_login": self.last_login.isoformat() if self.last_login else None
}

View File

@@ -0,0 +1,16 @@
"""
Auth Service Repositories
Repository implementations for authentication service
"""
from .base import AuthBaseRepository
from .user_repository import UserRepository
from .token_repository import TokenRepository
from .onboarding_repository import OnboardingRepository
__all__ = [
"AuthBaseRepository",
"UserRepository",
"TokenRepository",
"OnboardingRepository"
]

View File

@@ -0,0 +1,101 @@
"""
Base Repository for Auth Service
Service-specific repository base class with auth service utilities
"""
from typing import Optional, List, Dict, Any, Type
from sqlalchemy.ext.asyncio import AsyncSession
from datetime import datetime, timezone
import structlog
from shared.database.repository import BaseRepository
from shared.database.exceptions import DatabaseError, ValidationError
logger = structlog.get_logger()
class AuthBaseRepository(BaseRepository):
"""Base repository for auth service with common auth operations"""
def __init__(self, model: Type, session: AsyncSession, cache_ttl: Optional[int] = 600):
# Auth data benefits from longer caching (10 minutes)
super().__init__(model, session, cache_ttl)
async def get_active_records(self, skip: int = 0, limit: int = 100) -> List:
"""Get active records (if model has is_active field)"""
if hasattr(self.model, 'is_active'):
return await self.get_multi(
skip=skip,
limit=limit,
filters={"is_active": True},
order_by="created_at",
order_desc=True
)
return await self.get_multi(skip=skip, limit=limit)
async def get_by_email(self, email: str) -> Optional:
"""Get record by email (if model has email field)"""
if hasattr(self.model, 'email'):
return await self.get_by_field("email", email)
return None
async def get_by_username(self, username: str) -> Optional:
"""Get record by username (if model has username field)"""
if hasattr(self.model, 'username'):
return await self.get_by_field("username", username)
return None
async def deactivate_record(self, record_id: Any) -> Optional:
"""Deactivate a record instead of deleting it"""
if hasattr(self.model, 'is_active'):
return await self.update(record_id, {"is_active": False})
return await self.delete(record_id)
async def activate_record(self, record_id: Any) -> Optional:
"""Activate a record"""
if hasattr(self.model, 'is_active'):
return await self.update(record_id, {"is_active": True})
return await self.get_by_id(record_id)
async def cleanup_expired_records(self, field_name: str = "expires_at") -> int:
"""Clean up expired records (for tokens, sessions, etc.)"""
try:
if not hasattr(self.model, field_name):
logger.warning(f"Model {self.model.__name__} has no {field_name} field for cleanup")
return 0
# This would need custom implementation with raw SQL for date comparison
# For now, return 0 to indicate no cleanup performed
logger.info(f"Cleanup requested for {self.model.__name__} but not implemented")
return 0
except Exception as e:
logger.error("Failed to cleanup expired records",
model=self.model.__name__,
error=str(e))
raise DatabaseError(f"Cleanup failed: {str(e)}")
def _validate_auth_data(self, data: Dict[str, Any], required_fields: List[str]) -> Dict[str, Any]:
"""Validate authentication-related data"""
errors = []
for field in required_fields:
if field not in data or not data[field]:
errors.append(f"Missing required field: {field}")
# Validate email format if present
if "email" in data and data["email"]:
email = data["email"]
if "@" not in email or "." not in email.split("@")[-1]:
errors.append("Invalid email format")
# Validate password strength if present
if "password" in data and data["password"]:
password = data["password"]
if len(password) < 8:
errors.append("Password must be at least 8 characters long")
return {
"is_valid": len(errors) == 0,
"errors": errors
}

View File

@@ -0,0 +1,110 @@
"""
Deletion Job Repository
Database operations for deletion job persistence
"""
from typing import List, Optional
from uuid import UUID
from sqlalchemy import select, and_, desc
from sqlalchemy.ext.asyncio import AsyncSession
import structlog
from app.models.deletion_job import DeletionJob
logger = structlog.get_logger()
class DeletionJobRepository:
"""Repository for deletion job database operations"""
def __init__(self, session: AsyncSession):
self.session = session
async def create(self, deletion_job: DeletionJob) -> DeletionJob:
"""Create a new deletion job record"""
try:
self.session.add(deletion_job)
await self.session.flush()
await self.session.refresh(deletion_job)
return deletion_job
except Exception as e:
logger.error("Failed to create deletion job", error=str(e))
raise
async def get_by_job_id(self, job_id: str) -> Optional[DeletionJob]:
"""Get deletion job by job_id"""
try:
query = select(DeletionJob).where(DeletionJob.job_id == job_id)
result = await self.session.execute(query)
return result.scalar_one_or_none()
except Exception as e:
logger.error("Failed to get deletion job", error=str(e), job_id=job_id)
raise
async def get_by_id(self, id: UUID) -> Optional[DeletionJob]:
"""Get deletion job by database ID"""
try:
return await self.session.get(DeletionJob, id)
except Exception as e:
logger.error("Failed to get deletion job by ID", error=str(e), id=str(id))
raise
async def list_by_tenant(
self,
tenant_id: UUID,
status: Optional[str] = None,
limit: int = 100
) -> List[DeletionJob]:
"""List deletion jobs for a tenant"""
try:
query = select(DeletionJob).where(DeletionJob.tenant_id == tenant_id)
if status:
query = query.where(DeletionJob.status == status)
query = query.order_by(desc(DeletionJob.started_at)).limit(limit)
result = await self.session.execute(query)
return list(result.scalars().all())
except Exception as e:
logger.error("Failed to list deletion jobs", error=str(e), tenant_id=str(tenant_id))
raise
async def list_all(
self,
status: Optional[str] = None,
limit: int = 100
) -> List[DeletionJob]:
"""List all deletion jobs with optional status filter"""
try:
query = select(DeletionJob)
if status:
query = query.where(DeletionJob.status == status)
query = query.order_by(desc(DeletionJob.started_at)).limit(limit)
result = await self.session.execute(query)
return list(result.scalars().all())
except Exception as e:
logger.error("Failed to list all deletion jobs", error=str(e))
raise
async def update(self, deletion_job: DeletionJob) -> DeletionJob:
"""Update a deletion job record"""
try:
await self.session.flush()
await self.session.refresh(deletion_job)
return deletion_job
except Exception as e:
logger.error("Failed to update deletion job", error=str(e))
raise
async def delete(self, deletion_job: DeletionJob) -> None:
"""Delete a deletion job record"""
try:
await self.session.delete(deletion_job)
await self.session.flush()
except Exception as e:
logger.error("Failed to delete deletion job", error=str(e))
raise

View File

@@ -0,0 +1,313 @@
# services/auth/app/repositories/onboarding_repository.py
"""
Onboarding Repository for database operations
"""
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select, update, delete, and_
from sqlalchemy.dialects.postgresql import insert
from typing import List, Dict, Any, Optional
from datetime import datetime, timezone
import structlog
from app.models.onboarding import UserOnboardingProgress, UserOnboardingSummary
logger = structlog.get_logger()
class OnboardingRepository:
"""Repository for onboarding progress operations"""
def __init__(self, db: AsyncSession):
self.db = db
async def get_user_progress_steps(self, user_id: str) -> List[UserOnboardingProgress]:
"""Get all onboarding steps for a user"""
try:
result = await self.db.execute(
select(UserOnboardingProgress)
.where(UserOnboardingProgress.user_id == user_id)
.order_by(UserOnboardingProgress.created_at)
)
return result.scalars().all()
except Exception as e:
logger.error(f"Error getting user progress steps for {user_id}: {e}")
return []
async def get_user_step(self, user_id: str, step_name: str) -> Optional[UserOnboardingProgress]:
"""Get a specific step for a user"""
try:
result = await self.db.execute(
select(UserOnboardingProgress)
.where(
and_(
UserOnboardingProgress.user_id == user_id,
UserOnboardingProgress.step_name == step_name
)
)
)
return result.scalars().first()
except Exception as e:
logger.error(f"Error getting step {step_name} for user {user_id}: {e}")
return None
async def upsert_user_step(
self,
user_id: str,
step_name: str,
completed: bool,
step_data: Dict[str, Any] = None,
auto_commit: bool = True
) -> UserOnboardingProgress:
"""Insert or update a user's onboarding step
Args:
user_id: User ID
step_name: Name of the step
completed: Whether the step is completed
step_data: Additional data for the step
auto_commit: Whether to auto-commit (set to False when used within UnitOfWork)
"""
try:
completed_at = datetime.now(timezone.utc) if completed else None
step_data = step_data or {}
# Use PostgreSQL UPSERT (INSERT ... ON CONFLICT ... DO UPDATE)
stmt = insert(UserOnboardingProgress).values(
user_id=user_id,
step_name=step_name,
completed=completed,
completed_at=completed_at,
step_data=step_data,
updated_at=datetime.now(timezone.utc)
)
# On conflict, update the existing record
stmt = stmt.on_conflict_do_update(
index_elements=['user_id', 'step_name'],
set_=dict(
completed=stmt.excluded.completed,
completed_at=stmt.excluded.completed_at,
step_data=stmt.excluded.step_data,
updated_at=stmt.excluded.updated_at
)
)
# Return the updated record
stmt = stmt.returning(UserOnboardingProgress)
result = await self.db.execute(stmt)
# Only commit if auto_commit is True (not within a UnitOfWork)
if auto_commit:
await self.db.commit()
else:
# Flush to ensure the statement is executed
await self.db.flush()
return result.scalars().first()
except Exception as e:
logger.error(f"Error upserting step {step_name} for user {user_id}: {e}")
if auto_commit:
await self.db.rollback()
raise
async def get_user_summary(self, user_id: str) -> Optional[UserOnboardingSummary]:
"""Get user's onboarding summary"""
try:
result = await self.db.execute(
select(UserOnboardingSummary)
.where(UserOnboardingSummary.user_id == user_id)
)
return result.scalars().first()
except Exception as e:
logger.error(f"Error getting onboarding summary for user {user_id}: {e}")
return None
async def upsert_user_summary(
self,
user_id: str,
current_step: str,
next_step: Optional[str],
completion_percentage: float,
fully_completed: bool,
steps_completed_count: str
) -> UserOnboardingSummary:
"""Insert or update user's onboarding summary"""
try:
# Use PostgreSQL UPSERT
stmt = insert(UserOnboardingSummary).values(
user_id=user_id,
current_step=current_step,
next_step=next_step,
completion_percentage=str(completion_percentage),
fully_completed=fully_completed,
steps_completed_count=steps_completed_count,
updated_at=datetime.now(timezone.utc),
last_activity_at=datetime.now(timezone.utc)
)
# On conflict, update the existing record
stmt = stmt.on_conflict_do_update(
index_elements=['user_id'],
set_=dict(
current_step=stmt.excluded.current_step,
next_step=stmt.excluded.next_step,
completion_percentage=stmt.excluded.completion_percentage,
fully_completed=stmt.excluded.fully_completed,
steps_completed_count=stmt.excluded.steps_completed_count,
updated_at=stmt.excluded.updated_at,
last_activity_at=stmt.excluded.last_activity_at
)
)
# Return the updated record
stmt = stmt.returning(UserOnboardingSummary)
result = await self.db.execute(stmt)
await self.db.commit()
return result.scalars().first()
except Exception as e:
logger.error(f"Error upserting summary for user {user_id}: {e}")
await self.db.rollback()
raise
async def delete_user_progress(self, user_id: str) -> bool:
"""Delete all onboarding progress for a user"""
try:
# Delete steps
await self.db.execute(
delete(UserOnboardingProgress)
.where(UserOnboardingProgress.user_id == user_id)
)
# Delete summary
await self.db.execute(
delete(UserOnboardingSummary)
.where(UserOnboardingSummary.user_id == user_id)
)
await self.db.commit()
return True
except Exception as e:
logger.error(f"Error deleting progress for user {user_id}: {e}")
await self.db.rollback()
return False
async def save_step_data(
self,
user_id: str,
step_name: str,
step_data: Dict[str, Any],
auto_commit: bool = True
) -> UserOnboardingProgress:
"""Save data for a specific step without marking it as completed
Args:
user_id: User ID
step_name: Name of the step
step_data: Data to save
auto_commit: Whether to auto-commit (set to False when used within UnitOfWork)
"""
try:
# Get existing step or create new one
existing_step = await self.get_user_step(user_id, step_name)
if existing_step:
# Update existing step data (merge with existing data)
merged_data = {**(existing_step.step_data or {}), **step_data}
stmt = update(UserOnboardingProgress).where(
and_(
UserOnboardingProgress.user_id == user_id,
UserOnboardingProgress.step_name == step_name
)
).values(
step_data=merged_data,
updated_at=datetime.now(timezone.utc)
).returning(UserOnboardingProgress)
result = await self.db.execute(stmt)
if auto_commit:
await self.db.commit()
else:
await self.db.flush()
return result.scalars().first()
else:
# Create new step with data but not completed
return await self.upsert_user_step(
user_id=user_id,
step_name=step_name,
completed=False,
step_data=step_data,
auto_commit=auto_commit
)
except Exception as e:
logger.error(f"Error saving step data for {step_name}, user {user_id}: {e}")
if auto_commit:
await self.db.rollback()
raise
async def get_step_data(self, user_id: str, step_name: str) -> Optional[Dict[str, Any]]:
"""Get data for a specific step"""
try:
step = await self.get_user_step(user_id, step_name)
return step.step_data if step else None
except Exception as e:
logger.error(f"Error getting step data for {step_name}, user {user_id}: {e}")
return None
async def get_subscription_parameters(self, user_id: str) -> Optional[Dict[str, Any]]:
"""Get subscription parameters saved during onboarding for tenant creation"""
try:
step_data = await self.get_step_data(user_id, "user_registered")
if step_data:
# Extract subscription-related parameters
subscription_params = {
"subscription_plan": step_data.get("subscription_plan", "starter"),
"billing_cycle": step_data.get("billing_cycle", "monthly"),
"coupon_code": step_data.get("coupon_code"),
"payment_method_id": step_data.get("payment_method_id"),
"payment_customer_id": step_data.get("payment_customer_id"),
"saved_at": step_data.get("saved_at")
}
return subscription_params
return None
except Exception as e:
logger.error(f"Error getting subscription parameters for user {user_id}: {e}")
return None
async def get_completion_stats(self) -> Dict[str, Any]:
"""Get completion statistics across all users"""
try:
# Get total users with onboarding data
total_result = await self.db.execute(
select(UserOnboardingSummary).count()
)
total_users = total_result.scalar()
# Get completed users
completed_result = await self.db.execute(
select(UserOnboardingSummary)
.where(UserOnboardingSummary.fully_completed == True)
.count()
)
completed_users = completed_result.scalar()
return {
"total_users_in_onboarding": total_users,
"fully_completed_users": completed_users,
"completion_rate": (completed_users / total_users * 100) if total_users > 0 else 0
}
except Exception as e:
logger.error(f"Error getting completion stats: {e}")
return {
"total_users_in_onboarding": 0,
"fully_completed_users": 0,
"completion_rate": 0
}

View File

@@ -0,0 +1,124 @@
# services/auth/app/repositories/password_reset_repository.py
"""
Password reset token repository
Repository for password reset token operations
"""
from typing import Optional, Dict, Any
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select, and_, text
from datetime import datetime, timezone
import structlog
import uuid
from .base import AuthBaseRepository
from app.models.password_reset_tokens import PasswordResetToken
from shared.database.exceptions import DatabaseError
logger = structlog.get_logger()
class PasswordResetTokenRepository(AuthBaseRepository):
"""Repository for password reset token operations"""
def __init__(self, session: AsyncSession):
super().__init__(PasswordResetToken, session)
async def create_token(self, user_id: str, token: str, expires_at: datetime) -> PasswordResetToken:
"""Create a new password reset token"""
try:
token_data = {
"user_id": user_id,
"token": token,
"expires_at": expires_at,
"is_used": False
}
reset_token = await self.create(token_data)
logger.debug("Password reset token created",
user_id=user_id,
token_id=reset_token.id,
expires_at=expires_at)
return reset_token
except Exception as e:
logger.error("Failed to create password reset token",
user_id=user_id,
error=str(e))
raise DatabaseError(f"Failed to create password reset token: {str(e)}")
async def get_token_by_value(self, token: str) -> Optional[PasswordResetToken]:
"""Get password reset token by token value"""
try:
stmt = select(PasswordResetToken).where(
and_(
PasswordResetToken.token == token,
PasswordResetToken.is_used == False,
PasswordResetToken.expires_at > datetime.now(timezone.utc)
)
)
result = await self.session.execute(stmt)
return result.scalar_one_or_none()
except Exception as e:
logger.error("Failed to get password reset token by value", error=str(e))
raise DatabaseError(f"Failed to get password reset token: {str(e)}")
async def mark_token_as_used(self, token_id: str) -> Optional[PasswordResetToken]:
"""Mark a password reset token as used"""
try:
return await self.update(token_id, {
"is_used": True,
"used_at": datetime.now(timezone.utc)
})
except Exception as e:
logger.error("Failed to mark password reset token as used",
token_id=token_id,
error=str(e))
raise DatabaseError(f"Failed to mark token as used: {str(e)}")
async def cleanup_expired_tokens(self) -> int:
"""Clean up expired password reset tokens"""
try:
now = datetime.now(timezone.utc)
# Delete expired tokens
query = text("""
DELETE FROM password_reset_tokens
WHERE expires_at < :now OR is_used = true
""")
result = await self.session.execute(query, {"now": now})
deleted_count = result.rowcount
logger.info("Cleaned up expired password reset tokens",
deleted_count=deleted_count)
return deleted_count
except Exception as e:
logger.error("Failed to cleanup expired password reset tokens", error=str(e))
raise DatabaseError(f"Token cleanup failed: {str(e)}")
async def get_valid_token_for_user(self, user_id: str) -> Optional[PasswordResetToken]:
"""Get a valid (unused, not expired) password reset token for a user"""
try:
stmt = select(PasswordResetToken).where(
and_(
PasswordResetToken.user_id == user_id,
PasswordResetToken.is_used == False,
PasswordResetToken.expires_at > datetime.now(timezone.utc)
)
).order_by(PasswordResetToken.created_at.desc())
result = await self.session.execute(stmt)
return result.scalar_one_or_none()
except Exception as e:
logger.error("Failed to get valid token for user",
user_id=user_id,
error=str(e))
raise DatabaseError(f"Failed to get valid token for user: {str(e)}")

View File

@@ -0,0 +1,305 @@
"""
Token Repository
Repository for refresh token operations
"""
from typing import Optional, List, Dict, Any
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select, and_, text
from datetime import datetime, timezone, timedelta
import structlog
from .base import AuthBaseRepository
from app.models.tokens import RefreshToken
from shared.database.exceptions import DatabaseError
logger = structlog.get_logger()
class TokenRepository(AuthBaseRepository):
"""Repository for refresh token operations"""
def __init__(self, model, session: AsyncSession, cache_ttl: Optional[int] = 300):
# Tokens change frequently, shorter cache time
super().__init__(model, session, cache_ttl)
async def create_token(self, token_data: Dict[str, Any]) -> RefreshToken:
"""Create a new refresh token from dictionary data"""
return await self.create(token_data)
async def create_refresh_token(
self,
user_id: str,
token: str,
expires_at: datetime
) -> RefreshToken:
"""Create a new refresh token"""
try:
token_data = {
"user_id": user_id,
"token": token,
"expires_at": expires_at,
"is_revoked": False
}
refresh_token = await self.create(token_data)
logger.debug("Refresh token created",
user_id=user_id,
token_id=refresh_token.id,
expires_at=expires_at)
return refresh_token
except Exception as e:
logger.error("Failed to create refresh token",
user_id=user_id,
error=str(e))
raise DatabaseError(f"Failed to create refresh token: {str(e)}")
async def get_token_by_value(self, token: str) -> Optional[RefreshToken]:
"""Get refresh token by token value"""
try:
return await self.get_by_field("token", token)
except Exception as e:
logger.error("Failed to get token by value", error=str(e))
raise DatabaseError(f"Failed to get token: {str(e)}")
async def get_active_tokens_for_user(self, user_id: str) -> List[RefreshToken]:
"""Get all active (non-revoked, non-expired) tokens for a user"""
try:
now = datetime.now(timezone.utc)
# Use raw query for complex filtering
query = text("""
SELECT * FROM refresh_tokens
WHERE user_id = :user_id
AND is_revoked = false
AND expires_at > :now
ORDER BY created_at DESC
""")
result = await self.session.execute(query, {
"user_id": user_id,
"now": now
})
# Convert rows to RefreshToken objects
tokens = []
for row in result.fetchall():
token = RefreshToken(
id=row.id,
user_id=row.user_id,
token=row.token,
expires_at=row.expires_at,
is_revoked=row.is_revoked,
created_at=row.created_at,
revoked_at=row.revoked_at
)
tokens.append(token)
return tokens
except Exception as e:
logger.error("Failed to get active tokens for user",
user_id=user_id,
error=str(e))
raise DatabaseError(f"Failed to get active tokens: {str(e)}")
async def revoke_token(self, token_id: str) -> Optional[RefreshToken]:
"""Revoke a refresh token"""
try:
return await self.update(token_id, {
"is_revoked": True,
"revoked_at": datetime.now(timezone.utc)
})
except Exception as e:
logger.error("Failed to revoke token",
token_id=token_id,
error=str(e))
raise DatabaseError(f"Failed to revoke token: {str(e)}")
async def revoke_all_user_tokens(self, user_id: str) -> int:
"""Revoke all tokens for a user"""
try:
# Use bulk update for efficiency
now = datetime.now(timezone.utc)
query = text("""
UPDATE refresh_tokens
SET is_revoked = true, revoked_at = :revoked_at
WHERE user_id = :user_id AND is_revoked = false
""")
result = await self.session.execute(query, {
"user_id": user_id,
"revoked_at": now
})
revoked_count = result.rowcount
logger.info("Revoked all user tokens",
user_id=user_id,
revoked_count=revoked_count)
return revoked_count
except Exception as e:
logger.error("Failed to revoke all user tokens",
user_id=user_id,
error=str(e))
raise DatabaseError(f"Failed to revoke user tokens: {str(e)}")
async def is_token_valid(self, token: str) -> bool:
"""Check if a token is valid (exists, not revoked, not expired)"""
try:
refresh_token = await self.get_token_by_value(token)
if not refresh_token:
return False
if refresh_token.is_revoked:
return False
if refresh_token.expires_at < datetime.now(timezone.utc):
return False
return True
except Exception as e:
logger.error("Failed to validate token", error=str(e))
return False
async def validate_refresh_token(self, token: str, user_id: str) -> bool:
"""Validate refresh token for a specific user"""
try:
refresh_token = await self.get_token_by_value(token)
if not refresh_token:
logger.debug("Refresh token not found", token_prefix=token[:10] + "...")
return False
# Convert both to strings for comparison to handle UUID vs string mismatch
token_user_id = str(refresh_token.user_id)
expected_user_id = str(user_id)
if token_user_id != expected_user_id:
logger.warning("Refresh token user_id mismatch",
expected_user_id=expected_user_id,
actual_user_id=token_user_id)
return False
if refresh_token.is_revoked:
logger.debug("Refresh token is revoked", user_id=user_id)
return False
if refresh_token.expires_at < datetime.now(timezone.utc):
logger.debug("Refresh token is expired", user_id=user_id)
return False
logger.debug("Refresh token is valid", user_id=user_id)
return True
except Exception as e:
logger.error("Failed to validate refresh token",
user_id=user_id,
error=str(e))
return False
async def cleanup_expired_tokens(self) -> int:
"""Clean up expired refresh tokens"""
try:
now = datetime.now(timezone.utc)
# Delete expired tokens
query = text("""
DELETE FROM refresh_tokens
WHERE expires_at < :now
""")
result = await self.session.execute(query, {"now": now})
deleted_count = result.rowcount
logger.info("Cleaned up expired tokens",
deleted_count=deleted_count)
return deleted_count
except Exception as e:
logger.error("Failed to cleanup expired tokens", error=str(e))
raise DatabaseError(f"Token cleanup failed: {str(e)}")
async def cleanup_old_revoked_tokens(self, days_old: int = 30) -> int:
"""Clean up old revoked tokens"""
try:
cutoff_date = datetime.now(timezone.utc) - timedelta(days=days_old)
query = text("""
DELETE FROM refresh_tokens
WHERE is_revoked = true
AND revoked_at < :cutoff_date
""")
result = await self.session.execute(query, {
"cutoff_date": cutoff_date
})
deleted_count = result.rowcount
logger.info("Cleaned up old revoked tokens",
deleted_count=deleted_count,
days_old=days_old)
return deleted_count
except Exception as e:
logger.error("Failed to cleanup old revoked tokens",
days_old=days_old,
error=str(e))
raise DatabaseError(f"Revoked token cleanup failed: {str(e)}")
async def get_token_statistics(self) -> Dict[str, Any]:
"""Get token statistics"""
try:
now = datetime.now(timezone.utc)
# Get counts with raw queries
stats_query = text("""
SELECT
COUNT(*) as total_tokens,
COUNT(CASE WHEN is_revoked = false AND expires_at > :now THEN 1 END) as active_tokens,
COUNT(CASE WHEN is_revoked = true THEN 1 END) as revoked_tokens,
COUNT(CASE WHEN expires_at <= :now THEN 1 END) as expired_tokens,
COUNT(DISTINCT user_id) as users_with_tokens
FROM refresh_tokens
""")
result = await self.session.execute(stats_query, {"now": now})
row = result.fetchone()
if row:
return {
"total_tokens": row.total_tokens,
"active_tokens": row.active_tokens,
"revoked_tokens": row.revoked_tokens,
"expired_tokens": row.expired_tokens,
"users_with_tokens": row.users_with_tokens
}
return {
"total_tokens": 0,
"active_tokens": 0,
"revoked_tokens": 0,
"expired_tokens": 0,
"users_with_tokens": 0
}
except Exception as e:
logger.error("Failed to get token statistics", error=str(e))
return {
"total_tokens": 0,
"active_tokens": 0,
"revoked_tokens": 0,
"expired_tokens": 0,
"users_with_tokens": 0
}

View File

@@ -0,0 +1,277 @@
"""
User Repository
Repository for user operations with authentication-specific queries
"""
from typing import Optional, List, Dict, Any
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select, and_, or_, func, desc, text
from datetime import datetime, timezone, timedelta
import structlog
from .base import AuthBaseRepository
from app.models.users import User
from shared.database.exceptions import DatabaseError, ValidationError, DuplicateRecordError
logger = structlog.get_logger()
class UserRepository(AuthBaseRepository):
"""Repository for user operations"""
def __init__(self, model, session: AsyncSession, cache_ttl: Optional[int] = 600):
super().__init__(model, session, cache_ttl)
async def create_user(self, user_data: Dict[str, Any]) -> User:
"""Create a new user with validation"""
try:
# Validate user data
validation_result = self._validate_auth_data(
user_data,
["email", "hashed_password", "full_name", "role"]
)
if not validation_result["is_valid"]:
raise ValidationError(f"Invalid user data: {validation_result['errors']}")
# Check if user already exists
existing_user = await self.get_by_email(user_data["email"])
if existing_user:
raise DuplicateRecordError(f"User with email {user_data['email']} already exists")
# Create user
user = await self.create(user_data)
logger.info("User created successfully",
user_id=user.id,
email=user.email,
role=user.role)
return user
except (ValidationError, DuplicateRecordError):
raise
except Exception as e:
logger.error("Failed to create user",
email=user_data.get("email"),
error=str(e))
raise DatabaseError(f"Failed to create user: {str(e)}")
async def get_user_by_email(self, email: str) -> Optional[User]:
"""Get user by email address"""
return await self.get_by_email(email)
async def get_active_users(self, skip: int = 0, limit: int = 100) -> List[User]:
"""Get all active users"""
return await self.get_active_records(skip=skip, limit=limit)
async def authenticate_user(self, email: str, password: str) -> Optional[User]:
"""Authenticate user with email and plain password"""
try:
user = await self.get_by_email(email)
if not user:
logger.debug("User not found for authentication", email=email)
return None
if not user.is_active:
logger.debug("User account is inactive", email=email)
return None
# Verify password using security manager
from app.core.security import SecurityManager
if SecurityManager.verify_password(password, user.hashed_password):
# Update last login
await self.update_last_login(user.id)
logger.info("User authenticated successfully",
user_id=user.id,
email=email)
return user
logger.debug("Invalid password for user", email=email)
return None
except Exception as e:
logger.error("Authentication failed",
email=email,
error=str(e))
raise DatabaseError(f"Authentication failed: {str(e)}")
async def update_last_login(self, user_id: str) -> Optional[User]:
"""Update user's last login timestamp"""
try:
return await self.update(user_id, {
"last_login": datetime.now(timezone.utc)
})
except Exception as e:
logger.error("Failed to update last login",
user_id=user_id,
error=str(e))
# Don't raise here - last login update is not critical
return None
async def update_user_profile(self, user_id: str, profile_data: Dict[str, Any]) -> Optional[User]:
"""Update user profile information"""
try:
# Remove sensitive fields that shouldn't be updated via profile
profile_data.pop("id", None)
profile_data.pop("hashed_password", None)
profile_data.pop("created_at", None)
profile_data.pop("is_active", None)
# Validate email if being updated
if "email" in profile_data:
validation_result = self._validate_auth_data(
profile_data,
["email"]
)
if not validation_result["is_valid"]:
raise ValidationError(f"Invalid profile data: {validation_result['errors']}")
# Check for email conflicts
existing_user = await self.get_by_email(profile_data["email"])
if existing_user and str(existing_user.id) != str(user_id):
raise DuplicateRecordError(f"Email {profile_data['email']} is already in use")
updated_user = await self.update(user_id, profile_data)
if updated_user:
logger.info("User profile updated",
user_id=user_id,
updated_fields=list(profile_data.keys()))
return updated_user
except (ValidationError, DuplicateRecordError):
raise
except Exception as e:
logger.error("Failed to update user profile",
user_id=user_id,
error=str(e))
raise DatabaseError(f"Failed to update profile: {str(e)}")
async def change_password(self, user_id: str, new_password_hash: str) -> bool:
"""Change user password"""
try:
updated_user = await self.update(user_id, {
"hashed_password": new_password_hash
})
if updated_user:
logger.info("Password changed successfully", user_id=user_id)
return True
return False
except Exception as e:
logger.error("Failed to change password",
user_id=user_id,
error=str(e))
raise DatabaseError(f"Failed to change password: {str(e)}")
async def verify_user_email(self, user_id: str) -> Optional[User]:
"""Mark user email as verified"""
try:
return await self.update(user_id, {
"is_verified": True
})
except Exception as e:
logger.error("Failed to verify user email",
user_id=user_id,
error=str(e))
raise DatabaseError(f"Failed to verify email: {str(e)}")
async def deactivate_user(self, user_id: str) -> Optional[User]:
"""Deactivate user account"""
return await self.deactivate_record(user_id)
async def activate_user(self, user_id: str) -> Optional[User]:
"""Activate user account"""
return await self.activate_record(user_id)
async def get_users_by_role(self, role: str, skip: int = 0, limit: int = 100) -> List[User]:
"""Get users by role"""
try:
return await self.get_multi(
skip=skip,
limit=limit,
filters={"role": role, "is_active": True},
order_by="created_at",
order_desc=True
)
except Exception as e:
logger.error("Failed to get users by role",
role=role,
error=str(e))
raise DatabaseError(f"Failed to get users by role: {str(e)}")
async def search_users(self, search_term: str, skip: int = 0, limit: int = 50) -> List[User]:
"""Search users by email or full name"""
try:
return await self.search(
search_term=search_term,
search_fields=["email", "full_name"],
skip=skip,
limit=limit
)
except Exception as e:
logger.error("Failed to search users",
search_term=search_term,
error=str(e))
raise DatabaseError(f"Failed to search users: {str(e)}")
async def get_user_statistics(self) -> Dict[str, Any]:
"""Get user statistics"""
try:
# Get basic counts
total_users = await self.count()
active_users = await self.count(filters={"is_active": True})
verified_users = await self.count(filters={"is_verified": True})
# Get users by role using raw query
role_query = text("""
SELECT role, COUNT(*) as count
FROM users
WHERE is_active = true
GROUP BY role
ORDER BY count DESC
""")
result = await self.session.execute(role_query)
role_stats = {row.role: row.count for row in result.fetchall()}
# Recent activity (users created in last 30 days)
thirty_days_ago = datetime.now(timezone.utc) - timedelta(days=30)
recent_users_query = text("""
SELECT COUNT(*) as count
FROM users
WHERE created_at >= :thirty_days_ago
""")
recent_result = await self.session.execute(
recent_users_query,
{"thirty_days_ago": thirty_days_ago}
)
recent_users = recent_result.scalar() or 0
return {
"total_users": total_users,
"active_users": active_users,
"inactive_users": total_users - active_users,
"verified_users": verified_users,
"unverified_users": active_users - verified_users,
"recent_registrations": recent_users,
"users_by_role": role_stats
}
except Exception as e:
logger.error("Failed to get user statistics", error=str(e))
return {
"total_users": 0,
"active_users": 0,
"inactive_users": 0,
"verified_users": 0,
"unverified_users": 0,
"recent_registrations": 0,
"users_by_role": {}
}

View File

View File

@@ -0,0 +1,230 @@
# services/auth/app/schemas/auth.py - UPDATED WITH UNIFIED TOKEN RESPONSE
"""
Authentication schemas - Updated with unified token response format
Following industry best practices from Firebase, Cognito, etc.
"""
from pydantic import BaseModel, EmailStr, Field
from typing import Optional, Dict, Any
from datetime import datetime
# ================================================================
# REQUEST SCHEMAS
# ================================================================
class UserRegistration(BaseModel):
"""User registration request"""
email: EmailStr
password: str = Field(..., min_length=8, max_length=128)
full_name: str = Field(..., min_length=1, max_length=255)
tenant_name: Optional[str] = Field(None, max_length=255)
role: Optional[str] = Field("admin", pattern=r'^(user|admin|manager|super_admin)$')
subscription_plan: Optional[str] = Field("starter", description="Selected subscription plan (starter, professional, enterprise)")
billing_cycle: Optional[str] = Field("monthly", description="Billing cycle (monthly, yearly)")
coupon_code: Optional[str] = Field(None, description="Discount coupon code")
payment_method_id: Optional[str] = Field(None, description="Stripe payment method ID")
# GDPR Consent fields
terms_accepted: Optional[bool] = Field(True, description="Accept terms of service")
privacy_accepted: Optional[bool] = Field(True, description="Accept privacy policy")
marketing_consent: Optional[bool] = Field(False, description="Consent to marketing communications")
analytics_consent: Optional[bool] = Field(False, description="Consent to analytics cookies")
class UserLogin(BaseModel):
"""User login request"""
email: EmailStr
password: str
class RefreshTokenRequest(BaseModel):
"""Refresh token request"""
refresh_token: str
class PasswordChange(BaseModel):
"""Password change request"""
current_password: str
new_password: str = Field(..., min_length=8, max_length=128)
class PasswordReset(BaseModel):
"""Password reset request"""
email: EmailStr
class PasswordResetConfirm(BaseModel):
"""Password reset confirmation"""
token: str
new_password: str = Field(..., min_length=8, max_length=128)
# ================================================================
# RESPONSE SCHEMAS
# ================================================================
class UserData(BaseModel):
"""User data embedded in token responses"""
id: str
email: str
full_name: str
is_active: bool
is_verified: bool
created_at: str # ISO format datetime string
tenant_id: Optional[str] = None
role: Optional[str] = "admin"
class TokenResponse(BaseModel):
"""
Unified token response for both registration and login
Follows industry standards (Firebase, AWS Cognito, etc.)
"""
access_token: str
refresh_token: Optional[str] = None
token_type: str = "bearer"
expires_in: int = 3600 # seconds
user: Optional[UserData] = None
subscription_id: Optional[str] = Field(None, description="Subscription ID if created during registration")
# Payment action fields (3DS, SetupIntent, etc.)
requires_action: Optional[bool] = Field(None, description="Whether payment action is required (3DS, SetupIntent confirmation)")
action_type: Optional[str] = Field(None, description="Type of action required (setup_intent_confirmation, payment_intent_confirmation)")
client_secret: Optional[str] = Field(None, description="Client secret for payment confirmation")
payment_intent_id: Optional[str] = Field(None, description="Payment intent ID for 3DS authentication")
setup_intent_id: Optional[str] = Field(None, description="SetupIntent ID for payment method verification")
customer_id: Optional[str] = Field(None, description="Stripe customer ID")
# Additional fields for post-confirmation subscription completion
plan_id: Optional[str] = Field(None, description="Subscription plan ID")
payment_method_id: Optional[str] = Field(None, description="Payment method ID")
trial_period_days: Optional[int] = Field(None, description="Trial period in days")
user_id: Optional[str] = Field(None, description="User ID for post-confirmation processing")
billing_interval: Optional[str] = Field(None, description="Billing interval (monthly, yearly)")
message: Optional[str] = Field(None, description="Additional message about payment action required")
class Config:
schema_extra = {
"example": {
"access_token": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9...",
"refresh_token": "def502004b8b7f8f...",
"token_type": "bearer",
"expires_in": 3600,
"user": {
"id": "123e4567-e89b-12d3-a456-426614174000",
"email": "user@example.com",
"full_name": "John Doe",
"is_active": True,
"is_verified": False,
"created_at": "2025-07-22T10:00:00Z",
"role": "user"
},
"subscription_id": "sub_1234567890",
"requires_action": True,
"action_type": "setup_intent_confirmation",
"client_secret": "seti_1234_secret_5678",
"payment_intent_id": None,
"setup_intent_id": "seti_1234567890",
"customer_id": "cus_1234567890"
}
}
class UserResponse(BaseModel):
"""User response for user management endpoints - FIXED"""
id: str
email: str
full_name: str
is_active: bool
is_verified: bool
created_at: datetime # ✅ Changed from str to datetime
last_login: Optional[datetime] = None # ✅ Added missing field
phone: Optional[str] = None # ✅ Added missing field
language: Optional[str] = None # ✅ Added missing field
timezone: Optional[str] = None # ✅ Added missing field
tenant_id: Optional[str] = None
role: Optional[str] = "admin"
payment_customer_id: Optional[str] = None # ✅ Added payment integration field
default_payment_method_id: Optional[str] = None # ✅ Added payment integration field
class Config:
from_attributes = True # ✅ Enable ORM mode for SQLAlchemy objects
class TokenVerification(BaseModel):
"""Token verification response"""
valid: bool
user_id: Optional[str] = None
email: Optional[str] = None
exp: Optional[int] = None
message: Optional[str] = None
class PasswordResetResponse(BaseModel):
"""Password reset response"""
message: str
reset_token: Optional[str] = None
class LogoutResponse(BaseModel):
"""Logout response"""
message: str
success: bool = True
# ================================================================
# ERROR SCHEMAS
# ================================================================
class ErrorDetail(BaseModel):
"""Error detail for API responses"""
message: str
code: Optional[str] = None
field: Optional[str] = None
class ErrorResponse(BaseModel):
"""Standardized error response"""
success: bool = False
error: ErrorDetail
timestamp: str
class Config:
schema_extra = {
"example": {
"success": False,
"error": {
"message": "Invalid credentials",
"code": "AUTH_001"
},
"timestamp": "2025-07-22T10:00:00Z"
}
}
# ================================================================
# VALIDATION SCHEMAS
# ================================================================
class EmailVerificationRequest(BaseModel):
"""Email verification request"""
email: EmailStr
class EmailVerificationConfirm(BaseModel):
"""Email verification confirmation"""
token: str
class ProfileUpdate(BaseModel):
"""Profile update request"""
full_name: Optional[str] = Field(None, min_length=1, max_length=255)
email: Optional[EmailStr] = None
# ================================================================
# INTERNAL SCHEMAS (for service communication)
# ================================================================
class UserContext(BaseModel):
"""User context for internal service communication"""
user_id: str
email: str
tenant_id: Optional[str] = None
roles: list[str] = ["admin"]
is_verified: bool = False
class TokenClaims(BaseModel):
"""JWT token claims structure"""
sub: str # subject (user_id)
email: str
full_name: str
user_id: str
is_verified: bool
tenant_id: Optional[str] = None
iat: int # issued at
exp: int # expires at
iss: str = "bakery-auth" # issuer

View File

@@ -0,0 +1,63 @@
# ================================================================
# services/auth/app/schemas/users.py
# ================================================================
"""
User schemas
"""
from pydantic import BaseModel, EmailStr, Field, validator
from typing import Optional, List
from datetime import datetime
from shared.utils.validation import validate_spanish_phone
class UserUpdate(BaseModel):
"""User update schema"""
full_name: Optional[str] = Field(None, min_length=2, max_length=100)
phone: Optional[str] = None
language: Optional[str] = Field(None, pattern="^(es|en)$")
timezone: Optional[str] = None
@validator('phone')
def validate_phone(cls, v):
"""Validate phone number"""
if v and not validate_spanish_phone(v):
raise ValueError('Invalid Spanish phone number')
return v
class UserProfile(BaseModel):
"""User profile schema"""
id: str
email: str
full_name: str
phone: Optional[str]
language: str
timezone: str
is_active: bool
is_verified: bool
created_at: datetime
last_login: Optional[datetime]
class Config:
from_attributes = True
class BatchUserRequest(BaseModel):
"""Request schema for batch user fetch"""
user_ids: List[str] = Field(..., description="List of user IDs to fetch", min_items=1, max_items=100)
class OwnerUserCreate(BaseModel):
"""Schema for owner-created users (pilot phase)"""
email: EmailStr = Field(..., description="User email address")
full_name: str = Field(..., min_length=2, max_length=100, description="Full name of the user")
password: str = Field(..., min_length=8, max_length=128, description="Initial password for the user")
phone: Optional[str] = Field(None, description="Phone number")
role: str = Field("user", pattern="^(user|admin|manager)$", description="User role in the system")
language: Optional[str] = Field("es", pattern="^(es|en|eu)$", description="Preferred language")
timezone: Optional[str] = Field("Europe/Madrid", description="User timezone")
@validator('phone')
def validate_phone_number(cls, v):
"""Validate phone number"""
if v and not validate_spanish_phone(v):
raise ValueError('Invalid Spanish phone number format')
return v

View File

@@ -0,0 +1,18 @@
"""
Auth Service Layer
Business logic services for authentication and user management
"""
from .auth_service import AuthService
from .auth_service import EnhancedAuthService
from .user_service import EnhancedUserService
from .auth_service_clients import AuthServiceClientFactory
from .admin_delete import AdminUserDeleteService
__all__ = [
"AuthService",
"EnhancedAuthService",
"EnhancedUserService",
"AuthServiceClientFactory",
"AdminUserDeleteService"
]

View File

@@ -0,0 +1,624 @@
# ================================================================
# Admin User Delete API - Complete Implementation
# ================================================================
"""
Complete admin user deletion API that handles all associated data
across all microservices in the bakery forecasting platform.
This implementation ensures proper cascade deletion of:
1. User account and authentication data
2. Tenant ownership and memberships
3. All training models and artifacts
4. Forecasts and predictions
5. Notification preferences and logs
6. Refresh tokens and sessions
"""
from fastapi import APIRouter, Depends, HTTPException, status, BackgroundTasks
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select, delete, text
from typing import Dict, List, Any, Optional
import structlog
import uuid
from datetime import datetime
from shared.auth.decorators import get_current_user_dep
from app.core.database import get_db
from app.services.auth_service_clients import AuthServiceClientFactory
from app.core.config import settings
logger = structlog.get_logger()
router = APIRouter()
class AdminUserDeleteService:
"""Service to handle complete admin user deletion across all microservices"""
def __init__(self, db: AsyncSession, event_publisher=None):
self.db = db
self.clients = AuthServiceClientFactory(settings)
self.event_publisher = event_publisher
async def delete_admin_user_complete(self, user_id: str, requesting_user_id: str) -> Dict[str, Any]:
"""
Complete admin user deletion with all associated data using inter-service clients
Args:
user_id: ID of the admin user to delete
requesting_user_id: ID of the user performing the deletion
Returns:
Dictionary with deletion results from all services
"""
deletion_results = {
'user_id': user_id,
'requested_by': requesting_user_id,
'started_at': datetime.utcnow().isoformat(),
'services_processed': {},
'errors': [],
'summary': {}
}
try:
# Step 1: Validate user exists and is admin
user_info = await self._validate_admin_user(user_id)
if not user_info:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Admin user {user_id} not found"
)
deletion_results['user_info'] = user_info
# Step 2: Get all tenant associations using tenant client
tenant_info = await self._get_user_tenant_info(user_id)
deletion_results['tenant_associations'] = tenant_info
# Step 3: Delete in proper order to respect dependencies
# 3.1 Stop all active training jobs and delete models
training_result = await self._delete_training_data(tenant_info['tenant_ids'])
deletion_results['services_processed']['training'] = training_result
# 3.2 Delete all forecasts and predictions
forecasting_result = await self._delete_forecasting_data(tenant_info['tenant_ids'])
deletion_results['services_processed']['forecasting'] = forecasting_result
# 3.3 Delete notification preferences and logs
notification_result = await self._delete_notification_data(user_id)
deletion_results['services_processed']['notification'] = notification_result
# 3.4 Delete tenant memberships and handle owned tenants
tenant_result = await self._delete_tenant_data(user_id, tenant_info)
deletion_results['services_processed']['tenant'] = tenant_result
# 3.5 Finally delete user account and auth data
auth_result = await self._delete_auth_data(user_id)
deletion_results['services_processed']['auth'] = auth_result
# Step 4: Generate summary
deletion_results['summary'] = await self._generate_deletion_summary(deletion_results)
deletion_results['completed_at'] = datetime.utcnow().isoformat()
deletion_results['status'] = 'success'
# Step 5: Publish deletion event
await self._publish_user_deleted_event(user_id, deletion_results)
# Step 6: Send notification to admins
await self._notify_admins_of_deletion(user_info, deletion_results)
logger.info("Admin user deletion completed successfully",
user_id=user_id,
tenants_affected=len(tenant_info['tenant_ids']))
return deletion_results
except Exception as e:
deletion_results['status'] = 'failed'
deletion_results['error'] = str(e)
deletion_results['completed_at'] = datetime.utcnow().isoformat()
logger.error("Admin user deletion failed",
user_id=user_id,
error=str(e))
# Attempt to publish failure event
try:
await self._publish_user_deletion_failed_event(user_id, str(e))
except:
pass
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"User deletion failed: {str(e)}"
)
async def _validate_admin_user(self, user_id: str) -> Optional[Dict[str, Any]]:
"""Validate user exists and get basic info from local database"""
try:
from app.models.users import User
from app.models.tokens import RefreshToken
# Query user from local auth database
query = select(User).where(User.id == uuid.UUID(user_id))
result = await self.db.execute(query)
user = result.scalar_one_or_none()
if not user:
return None
return {
'id': str(user.id),
'email': user.email,
'full_name': user.full_name,
'created_at': user.created_at.isoformat() if user.created_at else None,
'is_active': user.is_active,
'is_verified': user.is_verified
}
except Exception as e:
logger.error("Failed to validate admin user", user_id=user_id, error=str(e))
raise
async def _get_user_tenant_info(self, user_id: str) -> Dict[str, Any]:
"""Get all tenant associations for the user using tenant client"""
try:
# Use tenant service client to get memberships
memberships = await self.clients.tenant_client.get_user_tenants(user_id)
if not memberships:
return {
'tenant_ids': [],
'total_tenants': 0,
'owned_tenants': 0,
'memberships': []
}
tenant_ids = [m['tenant_id'] for m in memberships]
owned_tenants = [m for m in memberships if m.get('role') == 'owner']
return {
'tenant_ids': tenant_ids,
'total_tenants': len(tenant_ids),
'owned_tenants': len(owned_tenants),
'memberships': memberships
}
except Exception as e:
logger.error("Failed to get tenant info", user_id=user_id, error=str(e))
return {'tenant_ids': [], 'total_tenants': 0, 'owned_tenants': 0, 'memberships': []}
async def _delete_training_data(self, tenant_ids: List[str]) -> Dict[str, Any]:
"""Delete all training models, jobs, and artifacts for user's tenants"""
result = {
'models_deleted': 0,
'jobs_cancelled': 0,
'artifacts_deleted': 0,
'total_tenants_processed': 0,
'errors': []
}
try:
for tenant_id in tenant_ids:
try:
# Cancel active training jobs using training client
cancel_result = await self.clients.training_client.cancel_tenant_training_jobs(tenant_id)
if cancel_result:
result['jobs_cancelled'] += cancel_result.get('jobs_cancelled', 0)
if cancel_result.get('errors'):
result['errors'].extend(cancel_result['errors'])
# Delete all models and artifacts using training client
delete_result = await self.clients.training_client.delete_tenant_models(tenant_id)
if delete_result:
result['models_deleted'] += delete_result.get('models_deleted', 0)
result['artifacts_deleted'] += delete_result.get('artifacts_deleted', 0)
if delete_result.get('errors'):
result['errors'].extend(delete_result['errors'])
result['total_tenants_processed'] += 1
logger.debug("Training data deleted for tenant",
tenant_id=tenant_id,
models=delete_result.get('models_deleted', 0) if delete_result else 0)
except Exception as e:
error_msg = f"Error deleting training data for tenant {tenant_id}: {str(e)}"
result['errors'].append(error_msg)
logger.error(error_msg)
except Exception as e:
result['errors'].append(f"Training service communication error: {str(e)}")
return result
async def _delete_forecasting_data(self, tenant_ids: List[str]) -> Dict[str, Any]:
"""Delete all forecasts, predictions, and caches for user's tenants"""
result = {
'forecasts_deleted': 0,
'predictions_deleted': 0,
'cache_cleared': 0,
'batches_cancelled': 0,
'total_tenants_processed': 0,
'errors': []
}
try:
for tenant_id in tenant_ids:
try:
# Cancel any active prediction batches
batch_result = await self.clients.forecasting_client.cancel_tenant_prediction_batches(tenant_id)
if batch_result:
result['batches_cancelled'] += batch_result.get('batches_cancelled', 0)
if batch_result.get('errors'):
result['errors'].extend(batch_result['errors'])
# Clear prediction cache
cache_result = await self.clients.forecasting_client.clear_tenant_prediction_cache(tenant_id)
if cache_result:
result['cache_cleared'] += cache_result.get('cache_cleared', 0)
if cache_result.get('errors'):
result['errors'].extend(cache_result['errors'])
# Delete all forecasts for tenant
delete_result = await self.clients.forecasting_client.delete_tenant_forecasts(tenant_id)
if delete_result:
result['forecasts_deleted'] += delete_result.get('forecasts_deleted', 0)
result['predictions_deleted'] += delete_result.get('predictions_deleted', 0)
if delete_result.get('errors'):
result['errors'].extend(delete_result['errors'])
result['total_tenants_processed'] += 1
logger.debug("Forecasting data deleted for tenant",
tenant_id=tenant_id,
forecasts=delete_result.get('forecasts_deleted', 0) if delete_result else 0)
except Exception as e:
error_msg = f"Error deleting forecasting data for tenant {tenant_id}: {str(e)}"
result['errors'].append(error_msg)
logger.error(error_msg)
except Exception as e:
result['errors'].append(f"Forecasting service communication error: {str(e)}")
return result
async def _delete_notification_data(self, user_id: str) -> Dict[str, Any]:
"""Delete notification preferences, logs, and pending notifications"""
result = {
'preferences_deleted': 0,
'notifications_deleted': 0,
'notifications_cancelled': 0,
'logs_deleted': 0,
'errors': []
}
try:
# Cancel pending notifications first
cancel_result = await self.clients.notification_client.cancel_pending_user_notifications(user_id)
if cancel_result:
result['notifications_cancelled'] = cancel_result.get('notifications_cancelled', 0)
if cancel_result.get('errors'):
result['errors'].extend(cancel_result['errors'])
# Delete all notification data for user
delete_result = await self.clients.notification_client.delete_user_notification_data(user_id)
if delete_result:
result['preferences_deleted'] = delete_result.get('preferences_deleted', 0)
result['notifications_deleted'] = delete_result.get('notifications_deleted', 0)
result['logs_deleted'] = delete_result.get('logs_deleted', 0)
if delete_result.get('errors'):
result['errors'].extend(delete_result['errors'])
logger.debug("Notification data deleted for user",
user_id=user_id,
notifications=result['notifications_deleted'])
except Exception as e:
result['errors'].append(f"Notification service communication error: {str(e)}")
return result
async def _delete_tenant_data(self, user_id: str, tenant_info: Dict[str, Any]) -> Dict[str, Any]:
"""Delete tenant memberships and handle owned tenants using tenant client"""
result = {
'memberships_deleted': 0,
'tenants_deleted': 0,
'tenants_transferred': 0,
'errors': []
}
try:
# Handle owned tenants - either delete or transfer ownership
for membership in tenant_info['memberships']:
if membership.get('role') == 'owner':
tenant_id = membership['tenant_id']
try:
# Check if tenant has other admin members who can take ownership
has_other_admins = await self.clients.tenant_client.check_tenant_has_other_admins(
tenant_id, user_id
)
if has_other_admins:
# Get tenant members to find first admin
members = await self.clients.tenant_client.get_tenant_members(tenant_id)
admin_members = [
m for m in members
if m.get('role') == 'admin' and m.get('user_id') != user_id
]
if admin_members:
# Transfer ownership to first admin
transfer_result = await self.clients.tenant_client.transfer_tenant_ownership(
tenant_id, user_id, admin_members[0]['user_id']
)
if transfer_result:
result['tenants_transferred'] += 1
logger.info("Transferred tenant ownership",
tenant_id=tenant_id,
new_owner=admin_members[0]['user_id'])
else:
result['errors'].append(f"Failed to transfer ownership of tenant {tenant_id}")
else:
result['errors'].append(f"No admin members found for tenant {tenant_id}")
else:
# No other admins, delete the tenant completely
delete_result = await self.clients.tenant_client.delete_tenant(tenant_id)
if delete_result:
result['tenants_deleted'] += 1
logger.info("Deleted tenant", tenant_id=tenant_id)
else:
result['errors'].append(f"Failed to delete tenant {tenant_id}")
except Exception as e:
error_msg = f"Error handling owned tenant {tenant_id}: {str(e)}"
result['errors'].append(error_msg)
logger.error(error_msg)
# Delete user's memberships
delete_result = await self.clients.tenant_client.delete_user_memberships(user_id)
if delete_result:
result['memberships_deleted'] = delete_result.get('memberships_deleted', 0)
if delete_result.get('errors'):
result['errors'].extend(delete_result['errors'])
else:
result['errors'].append("Failed to delete user memberships")
except Exception as e:
result['errors'].append(f"Tenant service communication error: {str(e)}")
return result
async def _delete_auth_data(self, user_id: str) -> Dict[str, Any]:
"""Delete user account, refresh tokens, and auth data from local database"""
result = {
'user_deleted': False,
'refresh_tokens_deleted': 0,
'sessions_invalidated': 0,
'errors': []
}
try:
from app.models.users import User
from app.models.tokens import RefreshToken
# Delete refresh tokens
token_delete_query = delete(RefreshToken).where(RefreshToken.user_id == uuid.UUID(user_id))
token_result = await self.db.execute(token_delete_query)
result['refresh_tokens_deleted'] = token_result.rowcount
# Delete user account
user_delete_query = delete(User).where(User.id == uuid.UUID(user_id))
user_result = await self.db.execute(user_delete_query)
if user_result.rowcount > 0:
result['user_deleted'] = True
await self.db.commit()
logger.info("User and tokens deleted from auth database",
user_id=user_id,
tokens_deleted=result['refresh_tokens_deleted'])
else:
result['errors'].append("User not found in auth database")
await self.db.rollback()
except Exception as e:
await self.db.rollback()
error_msg = f"Auth database error: {str(e)}"
result['errors'].append(error_msg)
logger.error(error_msg)
return result
async def _generate_deletion_summary(self, deletion_results: Dict[str, Any]) -> Dict[str, Any]:
"""Generate summary of deletion operation"""
summary = {
'total_tenants_affected': deletion_results['tenant_associations']['total_tenants'],
'total_models_deleted': deletion_results['services_processed']['training']['models_deleted'],
'total_forecasts_deleted': deletion_results['services_processed']['forecasting']['forecasts_deleted'],
'total_notifications_deleted': deletion_results['services_processed']['notification']['notifications_deleted'],
'tenants_transferred': deletion_results['services_processed']['tenant']['tenants_transferred'],
'tenants_deleted': deletion_results['services_processed']['tenant']['tenants_deleted'],
'user_deleted': deletion_results['services_processed']['auth']['user_deleted'],
'total_errors': 0
}
# Count total errors across all services
for service_result in deletion_results['services_processed'].values():
if isinstance(service_result, dict) and 'errors' in service_result:
summary['total_errors'] += len(service_result['errors'])
# Add success indicator
summary['deletion_successful'] = (
summary['user_deleted'] and
summary['total_errors'] == 0
)
return summary
async def _publish_user_deleted_event(self, user_id: str, deletion_results: Dict[str, Any]):
"""Publish user deletion event to message queue"""
if self.event_publisher:
try:
await self.event_publisher.publish_business_event(
event_type="auth.user.deleted",
tenant_id="system",
data={
"user_id": user_id,
"timestamp": datetime.utcnow().isoformat(),
"deletion_summary": deletion_results['summary'],
"services_affected": list(deletion_results['services_processed'].keys())
}
)
logger.info("Published user deletion event", user_id=user_id)
except Exception as e:
logger.error("Failed to publish user deletion event", error=str(e))
async def _publish_user_deletion_failed_event(self, user_id: str, error: str):
"""Publish user deletion failure event"""
if self.event_publisher:
try:
await self.event_publisher.publish_business_event(
event_type="auth.user.deletion_failed",
tenant_id="system",
data={
"user_id": user_id,
"error": error,
"timestamp": datetime.utcnow().isoformat()
}
)
logger.info("Published user deletion failure event", user_id=user_id)
except Exception as e:
logger.error("Failed to publish deletion failure event", error=str(e))
async def _notify_admins_of_deletion(self, user_info: Dict[str, Any], deletion_results: Dict[str, Any]):
"""Send notification to other admins about the user deletion"""
try:
# Get requesting user info for notification
requesting_user_id = deletion_results['requested_by']
requesting_user = await self._validate_admin_user(requesting_user_id)
if requesting_user:
await self.clients.notification_client.send_user_deletion_notification(
admin_email=requesting_user['email'],
deleted_user_email=user_info['email'],
deletion_summary=deletion_results['summary']
)
logger.info("Sent user deletion notification",
deleted_user=user_info['email'],
notified_admin=requesting_user['email'])
except Exception as e:
logger.error("Failed to send admin notification", error=str(e))
async def preview_user_deletion(self, user_id: str) -> Dict[str, Any]:
"""
Preview what data would be deleted for an admin user without actually deleting
"""
try:
# Get user info
user_info = await self._validate_admin_user(user_id)
if not user_info:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Admin user {user_id} not found"
)
# Get tenant associations
tenant_info = await self._get_user_tenant_info(user_id)
# Get counts from each service
training_models_count = 0
forecasts_count = 0
notifications_count = 0
for tenant_id in tenant_info['tenant_ids']:
try:
# Get training models count
models_count = await self.clients.training_client.get_tenant_models_count(tenant_id)
training_models_count += models_count
# Get forecasts count
tenant_forecasts = await self.clients.forecasting_client.get_tenant_forecasts_count(tenant_id)
forecasts_count += tenant_forecasts
except Exception as e:
logger.warning("Could not get counts for tenant", tenant_id=tenant_id, error=str(e))
try:
# Get user notifications count
notifications_count = await self.clients.notification_client.get_user_notification_count(user_id)
except Exception as e:
logger.warning("Could not get notification count", user_id=user_id, error=str(e))
# Build preview
preview = {
"user": user_info,
"tenant_associations": tenant_info,
"estimated_deletions": {
"training_models": training_models_count,
"forecasts": forecasts_count,
"notifications": notifications_count,
"tenant_memberships": tenant_info['total_tenants'],
"owned_tenants": tenant_info['owned_tenants']
},
"tenant_handling": await self._preview_tenant_handling(user_id, tenant_info),
"warning": "This operation is irreversible and will permanently delete all associated data"
}
return preview
except HTTPException:
raise
except Exception as e:
logger.error("Error generating deletion preview", user_id=user_id, error=str(e))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to generate deletion preview"
)
async def _preview_tenant_handling(self, user_id: str, tenant_info: Dict[str, Any]) -> List[Dict[str, Any]]:
"""Preview how each owned tenant would be handled"""
tenant_handling = []
for membership in tenant_info['memberships']:
if membership.get('role') == 'owner':
tenant_id = membership['tenant_id']
try:
has_other_admins = await self.clients.tenant_client.check_tenant_has_other_admins(
tenant_id, user_id
)
if has_other_admins:
members = await self.clients.tenant_client.get_tenant_members(tenant_id)
admin_members = [
m for m in members
if m.get('role') == 'admin' and m.get('user_id') != user_id
]
tenant_handling.append({
"tenant_id": tenant_id,
"action": "transfer_ownership",
"details": f"Ownership will be transferred to admin: {admin_members[0]['user_id'] if admin_members else 'Unknown'}"
})
else:
tenant_handling.append({
"tenant_id": tenant_id,
"action": "delete_tenant",
"details": "Tenant will be deleted completely (no other admins found)"
})
except Exception as e:
tenant_handling.append({
"tenant_id": tenant_id,
"action": "error",
"details": f"Could not determine action: {str(e)}"
})
return tenant_handling

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,403 @@
# ================================================================
# Auth Service Inter-Service Communication Clients
# ================================================================
"""
Inter-service communication clients for the Auth Service to communicate
with other microservices in the bakery forecasting platform.
These clients handle authenticated API calls to:
- Tenant Service
- Training Service
- Forecasting Service
- Notification Service
"""
import structlog
from typing import Dict, Any, Optional, List, Union
from shared.clients.base_service_client import BaseServiceClient
from shared.config.base import BaseServiceSettings
logger = structlog.get_logger()
# ================================================================
# TENANT SERVICE CLIENT
# ================================================================
class AuthTenantServiceClient(BaseServiceClient):
"""Client for Auth Service to communicate with Tenant Service"""
def __init__(self, config: BaseServiceSettings):
super().__init__("auth", config)
self.service_url = config.TENANT_SERVICE_URL
def get_service_base_path(self) -> str:
return "/api/v1"
# ================================================================
# USER TENANT OPERATIONS
# ================================================================
async def get_user_tenants(self, user_id: str) -> Optional[List[Dict[str, Any]]]:
"""Get all tenant memberships for a user"""
try:
result = await self.get(f"tenants/user/{user_id}")
return result.get("memberships", []) if result else []
except Exception as e:
logger.error(f"Failed to get user tenants: {str(e)}, user_id: {user_id}")
return []
async def get_user_owned_tenants(self, user_id: str) -> Optional[List[Dict[str, Any]]]:
"""Get tenants owned by a user"""
try:
memberships = await self.get_user_tenants(user_id)
if memberships:
return [m for m in memberships if m.get('role') == 'owner']
return []
except Exception as e:
logger.error(f"Failed to get owned tenants: {str(e)}, user_id: {user_id}")
return []
async def transfer_tenant_ownership(
self,
tenant_id: str,
current_owner_id: str,
new_owner_id: str
) -> Optional[Dict[str, Any]]:
"""Transfer tenant ownership from one user to another"""
try:
data = {
"current_owner_id": current_owner_id,
"new_owner_id": new_owner_id
}
return await self.post(f"tenants/{tenant_id}/transfer-ownership", data=data)
except Exception as e:
logger.error("Failed to transfer tenant ownership",
tenant_id=tenant_id,
error=str(e))
return None
async def delete_tenant(self, tenant_id: str) -> Optional[Dict[str, Any]]:
"""Delete a tenant completely"""
try:
return await self.delete(f"tenants/{tenant_id}")
except Exception as e:
logger.error(f"Failed to delete tenant: {str(e)}, tenant_id: {tenant_id}")
return None
async def delete_user_memberships(self, user_id: str) -> Optional[Dict[str, Any]]:
"""Delete all tenant memberships for a user"""
try:
return await self.delete(f"/tenants/user/{user_id}/memberships")
except Exception as e:
logger.error(f"Failed to delete user memberships: {str(e)}, user_id: {user_id}")
return None
async def get_tenant_members(self, tenant_id: str) -> Optional[List[Dict[str, Any]]]:
"""Get all members of a tenant"""
try:
result = await self.get(f"tenants/{tenant_id}/members")
return result.get("members", []) if result else []
except Exception as e:
logger.error(f"Failed to get tenant members: {str(e)}, tenant_id: {tenant_id}")
return []
async def check_tenant_has_other_admins(self, tenant_id: str, excluding_user_id: str) -> bool:
"""Check if tenant has other admin users besides the excluded one"""
try:
members = await self.get_tenant_members(tenant_id)
if members:
admin_members = [
m for m in members
if m.get('role') in ['admin', 'owner'] and m.get('user_id') != excluding_user_id
]
return len(admin_members) > 0
return False
except Exception as e:
logger.error("Failed to check tenant admins",
tenant_id=tenant_id,
error=str(e))
return False
# ================================================================
# TRAINING SERVICE CLIENT
# ================================================================
class AuthTrainingServiceClient(BaseServiceClient):
"""Client for Auth Service to communicate with Training Service"""
def __init__(self, config: BaseServiceSettings):
super().__init__("auth", config)
self.service_url = config.TRAINING_SERVICE_URL
def get_service_base_path(self) -> str:
return "/api/v1"
# ================================================================
# TRAINING JOB OPERATIONS
# ================================================================
async def cancel_tenant_training_jobs(self, tenant_id: str) -> Optional[Dict[str, Any]]:
"""Cancel all active training jobs for a tenant"""
try:
data = {"tenant_id": tenant_id}
return await self.post("/tenants/{tenant_id}/training/jobs/cancel", data=data)
except Exception as e:
logger.error("Failed to cancel tenant training jobs",
tenant_id=tenant_id,
error=str(e))
return {"jobs_cancelled": 0, "errors": [str(e)]}
async def get_tenant_active_jobs(self, tenant_id: str) -> Optional[List[Dict[str, Any]]]:
"""Get all active training jobs for a tenant"""
try:
params = {"status": "running,queued,pending", "tenant_id": tenant_id}
result = await self.get("/tenants/{tenant_id}/training/jobs/active", params=params)
return result.get("jobs", []) if result else []
except Exception as e:
logger.error("Failed to get tenant active jobs",
tenant_id=tenant_id,
error=str(e))
return []
# ================================================================
# MODEL OPERATIONS
# ================================================================
async def delete_tenant_models(self, tenant_id: str) -> Optional[Dict[str, Any]]:
"""Delete all trained models and artifacts for a tenant"""
try:
return await self.delete(f"models/tenant/{tenant_id}")
except Exception as e:
logger.error("Failed to delete tenant models",
tenant_id=tenant_id,
error=str(e))
return {"models_deleted": 0, "artifacts_deleted": 0, "errors": [str(e)]}
async def get_tenant_models_count(self, tenant_id: str) -> int:
"""Get count of trained models for a tenant"""
try:
result = await self.get(f"models/tenant/{tenant_id}/count")
return result.get("count", 0) if result else 0
except Exception as e:
logger.error("Failed to get tenant models count",
tenant_id=tenant_id,
error=str(e))
return 0
async def get_tenant_model_artifacts(self, tenant_id: str) -> Optional[List[Dict[str, Any]]]:
"""Get all model artifacts for a tenant"""
try:
result = await self.get(f"models/tenant/{tenant_id}/artifacts")
return result.get("artifacts", []) if result else []
except Exception as e:
logger.error("Failed to get tenant model artifacts",
tenant_id=tenant_id,
error=str(e))
return []
# ================================================================
# FORECASTING SERVICE CLIENT
# ================================================================
class AuthForecastingServiceClient(BaseServiceClient):
"""Client for Auth Service to communicate with Forecasting Service"""
def __init__(self, config: BaseServiceSettings):
super().__init__("auth", config)
self.service_url = config.FORECASTING_SERVICE_URL
def get_service_base_path(self) -> str:
return "/api/v1"
# ================================================================
# FORECAST OPERATIONS
# ================================================================
async def delete_tenant_forecasts(self, tenant_id: str) -> Optional[Dict[str, Any]]:
"""Delete all forecasts and predictions for a tenant"""
try:
return await self.delete(f"forecasts/tenant/{tenant_id}")
except Exception as e:
logger.error("Failed to delete tenant forecasts",
tenant_id=tenant_id,
error=str(e))
return {
"forecasts_deleted": 0,
"predictions_deleted": 0,
"cache_cleared": 0,
"errors": [str(e)]
}
async def get_tenant_forecasts_count(self, tenant_id: str) -> int:
"""Get count of forecasts for a tenant"""
try:
result = await self.get(f"forecasts/tenant/{tenant_id}/count")
return result.get("count", 0) if result else 0
except Exception as e:
logger.error("Failed to get tenant forecasts count",
tenant_id=tenant_id,
error=str(e))
return 0
async def clear_tenant_prediction_cache(self, tenant_id: str) -> Optional[Dict[str, Any]]:
"""Clear prediction cache for a tenant"""
try:
return await self.post(f"predictions/cache/clear/{tenant_id}")
except Exception as e:
logger.error("Failed to clear tenant prediction cache",
tenant_id=tenant_id,
error=str(e))
return {"cache_cleared": 0, "errors": [str(e)]}
async def cancel_tenant_prediction_batches(self, tenant_id: str) -> Optional[Dict[str, Any]]:
"""Cancel any active prediction batches for a tenant"""
try:
data = {"tenant_id": tenant_id}
return await self.post("predictions/batches/cancel", data=data)
except Exception as e:
logger.error("Failed to cancel tenant prediction batches",
tenant_id=tenant_id,
error=str(e))
return {"batches_cancelled": 0, "errors": [str(e)]}
# ================================================================
# NOTIFICATION SERVICE CLIENT
# ================================================================
class AuthNotificationServiceClient(BaseServiceClient):
"""Client for Auth Service to communicate with Notification Service"""
def __init__(self, config: BaseServiceSettings):
super().__init__("auth", config)
self.service_url = config.NOTIFICATION_SERVICE_URL
def get_service_base_path(self) -> str:
return "/api/v1"
# ================================================================
# USER NOTIFICATION OPERATIONS
# ================================================================
async def delete_user_notification_data(self, user_id: str) -> Optional[Dict[str, Any]]:
"""Delete all notification data for a user"""
try:
return await self.delete(f"/users/{user_id}/notification-data")
except Exception as e:
logger.error("Failed to delete user notification data",
user_id=user_id,
error=str(e))
return {
"preferences_deleted": 0,
"notifications_deleted": 0,
"logs_deleted": 0,
"errors": [str(e)]
}
async def cancel_pending_user_notifications(self, user_id: str) -> Optional[Dict[str, Any]]:
"""Cancel all pending notifications for a user"""
try:
return await self.post(f"users/{user_id}/notifications/cancel-pending")
except Exception as e:
logger.error("Failed to cancel user notifications",
user_id=user_id,
error=str(e))
return {"notifications_cancelled": 0, "errors": [str(e)]}
async def get_user_notification_count(self, user_id: str) -> int:
"""Get count of notifications for a user"""
try:
result = await self.get(f"users/{user_id}/notifications/count")
return result.get("count", 0) if result else 0
except Exception as e:
logger.error("Failed to get user notification count",
user_id=user_id,
error=str(e))
return 0
async def send_user_deletion_notification(
self,
admin_email: str,
deleted_user_email: str,
deletion_summary: Dict[str, Any]
) -> Optional[Dict[str, Any]]:
"""Send notification about user deletion to administrators"""
try:
data = {
"type": "email",
"recipient_email": admin_email,
"template_key": "user_deletion_notification",
"template_data": {
"deleted_user_email": deleted_user_email,
"deletion_summary": deletion_summary
},
"priority": "high"
}
return await self.post("notifications/send", data=data)
except Exception as e:
logger.error("Failed to send user deletion notification",
admin_email=admin_email,
error=str(e))
return None
# ================================================================
# CLIENT FACTORY
# ================================================================
class AuthServiceClientFactory:
"""Factory for creating inter-service clients for Auth Service"""
def __init__(self, config: BaseServiceSettings):
self.config = config
self._tenant_client = None
self._training_client = None
self._forecasting_client = None
self._notification_client = None
@property
def tenant_client(self) -> AuthTenantServiceClient:
"""Get or create tenant service client"""
if self._tenant_client is None:
self._tenant_client = AuthTenantServiceClient(self.config)
return self._tenant_client
@property
def training_client(self) -> AuthTrainingServiceClient:
"""Get or create training service client"""
if self._training_client is None:
self._training_client = AuthTrainingServiceClient(self.config)
return self._training_client
@property
def forecasting_client(self) -> AuthForecastingServiceClient:
"""Get or create forecasting service client"""
if self._forecasting_client is None:
self._forecasting_client = AuthForecastingServiceClient(self.config)
return self._forecasting_client
@property
def notification_client(self) -> AuthNotificationServiceClient:
"""Get or create notification service client"""
if self._notification_client is None:
self._notification_client = AuthNotificationServiceClient(self.config)
return self._notification_client
async def health_check_all_services(self) -> Dict[str, bool]:
"""Check health of all services"""
results = {}
clients = [
("tenant", self.tenant_client),
("training", self.training_client),
("forecasting", self.forecasting_client),
("notification", self.notification_client)
]
for service_name, client in clients:
try:
health = await client.get("health")
results[service_name] = health is not None and health.get("status") == "healthy"
except Exception as e:
logger.error(f"Health check failed for {service_name} service", error=str(e))
results[service_name] = False
return results

View File

@@ -0,0 +1,200 @@
"""
User data export service for GDPR compliance
Implements Article 15 (Right to Access) and Article 20 (Right to Data Portability)
"""
from typing import Dict, Any, List
from uuid import UUID
from datetime import datetime, timezone
import structlog
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select
from app.models.users import User
from app.models.tokens import RefreshToken, LoginAttempt
from app.models.consent import UserConsent, ConsentHistory
from app.models.onboarding import UserOnboardingProgress
from app.models import AuditLog
logger = structlog.get_logger()
class DataExportService:
"""Service to export all user data in machine-readable format"""
def __init__(self, db: AsyncSession):
self.db = db
async def export_user_data(self, user_id: UUID) -> Dict[str, Any]:
"""
Export all user data from auth service
Returns data in structured JSON format
"""
try:
export_data = {
"export_metadata": {
"user_id": str(user_id),
"export_date": datetime.now(timezone.utc).isoformat(),
"data_controller": "Panadería IA",
"format_version": "1.0",
"gdpr_article": "Article 15 (Right to Access) & Article 20 (Data Portability)"
},
"personal_data": await self._export_personal_data(user_id),
"account_data": await self._export_account_data(user_id),
"consent_data": await self._export_consent_data(user_id),
"security_data": await self._export_security_data(user_id),
"onboarding_data": await self._export_onboarding_data(user_id),
"audit_logs": await self._export_audit_logs(user_id)
}
logger.info("data_export_completed", user_id=str(user_id))
return export_data
except Exception as e:
logger.error("data_export_failed", user_id=str(user_id), error=str(e))
raise
async def _export_personal_data(self, user_id: UUID) -> Dict[str, Any]:
"""Export personal identifiable information"""
query = select(User).where(User.id == user_id)
result = await self.db.execute(query)
user = result.scalar_one_or_none()
if not user:
return {}
return {
"user_id": str(user.id),
"email": user.email,
"full_name": user.full_name,
"phone": user.phone,
"language": user.language,
"timezone": user.timezone,
"is_active": user.is_active,
"is_verified": user.is_verified,
"role": user.role,
"created_at": user.created_at.isoformat() if user.created_at else None,
"updated_at": user.updated_at.isoformat() if user.updated_at else None,
"last_login": user.last_login.isoformat() if user.last_login else None
}
async def _export_account_data(self, user_id: UUID) -> Dict[str, Any]:
"""Export account-related data"""
query = select(RefreshToken).where(RefreshToken.user_id == user_id)
result = await self.db.execute(query)
tokens = result.scalars().all()
active_sessions = []
for token in tokens:
if token.expires_at > datetime.now(timezone.utc) and not token.is_revoked:
active_sessions.append({
"token_id": str(token.id),
"created_at": token.created_at.isoformat() if token.created_at else None,
"expires_at": token.expires_at.isoformat() if token.expires_at else None,
"is_revoked": token.is_revoked
})
return {
"active_sessions_count": len(active_sessions),
"active_sessions": active_sessions,
"total_tokens_issued": len(tokens)
}
async def _export_consent_data(self, user_id: UUID) -> Dict[str, Any]:
"""Export consent history"""
consent_query = select(UserConsent).where(UserConsent.user_id == user_id)
consent_result = await self.db.execute(consent_query)
consents = consent_result.scalars().all()
history_query = select(ConsentHistory).where(ConsentHistory.user_id == user_id)
history_result = await self.db.execute(history_query)
history = history_result.scalars().all()
return {
"current_consent": consents[0].to_dict() if consents else None,
"consent_history": [h.to_dict() for h in history],
"total_consent_changes": len(history)
}
async def _export_security_data(self, user_id: UUID) -> Dict[str, Any]:
"""Export security-related data"""
# First get user email
user_query = select(User).where(User.id == user_id)
user_result = await self.db.execute(user_query)
user = user_result.scalar_one_or_none()
if not user:
return {
"recent_login_attempts": [],
"total_attempts_exported": 0,
"note": "User not found"
}
# LoginAttempt uses email, not user_id
query = select(LoginAttempt).where(
LoginAttempt.email == user.email
).order_by(LoginAttempt.created_at.desc()).limit(50)
result = await self.db.execute(query)
attempts = result.scalars().all()
login_attempts = []
for attempt in attempts:
login_attempts.append({
"attempted_at": attempt.created_at.isoformat() if attempt.created_at else None,
"success": attempt.success,
"ip_address": attempt.ip_address,
"user_agent": attempt.user_agent,
"failure_reason": attempt.failure_reason
})
return {
"recent_login_attempts": login_attempts,
"total_attempts_exported": len(login_attempts),
"note": "Only last 50 login attempts included for data minimization"
}
async def _export_onboarding_data(self, user_id: UUID) -> Dict[str, Any]:
"""Export onboarding progress"""
query = select(UserOnboardingProgress).where(UserOnboardingProgress.user_id == user_id)
result = await self.db.execute(query)
progress = result.scalars().all()
return {
"onboarding_steps": [
{
"step_id": str(p.id),
"step_name": p.step_name,
"completed": p.completed,
"completed_at": p.completed_at.isoformat() if p.completed_at else None
}
for p in progress
]
}
async def _export_audit_logs(self, user_id: UUID) -> Dict[str, Any]:
"""Export audit logs related to user"""
query = select(AuditLog).where(
AuditLog.user_id == user_id
).order_by(AuditLog.created_at.desc()).limit(100)
result = await self.db.execute(query)
logs = result.scalars().all()
return {
"audit_trail": [
{
"log_id": str(log.id),
"action": log.action,
"resource_type": log.resource_type,
"resource_id": log.resource_id,
"severity": log.severity,
"description": log.description,
"ip_address": log.ip_address,
"created_at": log.created_at.isoformat() if log.created_at else None
}
for log in logs
],
"total_logs_exported": len(logs),
"note": "Only last 100 audit logs included for data minimization"
}

View File

@@ -0,0 +1,607 @@
"""
Deletion Orchestrator Service
Coordinates tenant deletion across all microservices with saga pattern support
"""
from typing import Dict, List, Any, Optional
from dataclasses import dataclass, field
from datetime import datetime, timezone
from enum import Enum
import structlog
import httpx
import asyncio
from uuid import uuid4, UUID
from sqlalchemy.ext.asyncio import AsyncSession
from app.models.deletion_job import DeletionJob as DeletionJobModel
from app.repositories.deletion_job_repository import DeletionJobRepository
from shared.auth.jwt_handler import JWTHandler
logger = structlog.get_logger()
class DeletionStatus(Enum):
"""Status of deletion job"""
PENDING = "pending"
IN_PROGRESS = "in_progress"
COMPLETED = "completed"
FAILED = "failed"
ROLLED_BACK = "rolled_back"
class ServiceDeletionStatus(Enum):
"""Status of individual service deletion"""
PENDING = "pending"
IN_PROGRESS = "in_progress"
SUCCESS = "success"
FAILED = "failed"
ROLLED_BACK = "rolled_back"
@dataclass
class ServiceDeletionResult:
"""Result from a single service deletion"""
service_name: str
status: ServiceDeletionStatus
deleted_counts: Dict[str, int] = field(default_factory=dict)
errors: List[str] = field(default_factory=list)
duration_seconds: float = 0.0
timestamp: str = field(default_factory=lambda: datetime.now(timezone.utc).isoformat())
@property
def total_deleted(self) -> int:
return sum(self.deleted_counts.values())
@property
def success(self) -> bool:
return self.status == ServiceDeletionStatus.SUCCESS and len(self.errors) == 0
@dataclass
class DeletionJob:
"""Tracks a complete tenant deletion job"""
job_id: str
tenant_id: str
tenant_name: Optional[str] = None
initiated_by: Optional[str] = None
status: DeletionStatus = DeletionStatus.PENDING
service_results: Dict[str, ServiceDeletionResult] = field(default_factory=dict)
started_at: Optional[str] = None
completed_at: Optional[str] = None
error_log: List[str] = field(default_factory=list)
@property
def total_items_deleted(self) -> int:
return sum(result.total_deleted for result in self.service_results.values())
@property
def services_completed(self) -> int:
return sum(1 for r in self.service_results.values()
if r.status == ServiceDeletionStatus.SUCCESS)
@property
def services_failed(self) -> int:
return sum(1 for r in self.service_results.values()
if r.status == ServiceDeletionStatus.FAILED)
def to_dict(self) -> Dict[str, Any]:
"""Convert to dictionary for API responses"""
return {
"job_id": self.job_id,
"tenant_id": self.tenant_id,
"tenant_name": self.tenant_name,
"initiated_by": self.initiated_by,
"status": self.status.value,
"total_items_deleted": self.total_items_deleted,
"services_completed": self.services_completed,
"services_failed": self.services_failed,
"service_results": {
name: {
"status": result.status.value,
"deleted_counts": result.deleted_counts,
"total_deleted": result.total_deleted,
"errors": result.errors,
"duration_seconds": result.duration_seconds
}
for name, result in self.service_results.items()
},
"started_at": self.started_at,
"completed_at": self.completed_at,
"error_log": self.error_log
}
class DeletionOrchestrator:
"""
Orchestrates tenant deletion across all microservices
Implements saga pattern for distributed transactions
"""
# Service registry with deletion endpoints
# All services implement DELETE /tenant/{tenant_id} and GET /tenant/{tenant_id}/deletion-preview
# STATUS: 12/12 services implemented (100% COMPLETE)
SERVICE_DELETION_ENDPOINTS = {
# Core business services (6/6 complete)
"orders": "http://orders-service:8000/api/v1/orders/tenant/{tenant_id}",
"inventory": "http://inventory-service:8000/api/v1/inventory/tenant/{tenant_id}",
"recipes": "http://recipes-service:8000/api/v1/recipes/tenant/{tenant_id}",
"production": "http://production-service:8000/api/v1/production/tenant/{tenant_id}",
"sales": "http://sales-service:8000/api/v1/sales/tenant/{tenant_id}",
"suppliers": "http://suppliers-service:8000/api/v1/suppliers/tenant/{tenant_id}",
# Integration services (2/2 complete)
"pos": "http://pos-service:8000/api/v1/pos/tenant/{tenant_id}",
"external": "http://external-service:8000/api/v1/external/tenant/{tenant_id}",
# AI/ML services (2/2 complete)
"forecasting": "http://forecasting-service:8000/api/v1/forecasting/tenant/{tenant_id}",
"training": "http://training-service:8000/api/v1/training/tenant/{tenant_id}",
# Alert and notification services (2/2 complete)
"alert_processor": "http://alert-processor-service:8000/api/v1/alerts/tenant/{tenant_id}",
"notification": "http://notification-service:8000/api/v1/notifications/tenant/{tenant_id}",
}
def __init__(self, auth_token: Optional[str] = None, db: Optional[AsyncSession] = None):
"""
Initialize orchestrator
Args:
auth_token: JWT token for service-to-service authentication (deprecated - will be auto-generated)
db: Database session for persistence (optional for backward compatibility)
"""
self.auth_token = auth_token # Deprecated: kept for backward compatibility
self.db = db
self.jobs: Dict[str, DeletionJob] = {} # In-memory cache for active jobs
# Initialize JWT handler for creating service tokens
from app.core.config import settings
self.jwt_handler = JWTHandler(settings.JWT_SECRET_KEY, settings.JWT_ALGORITHM)
async def _save_job_to_db(self, job: DeletionJob) -> None:
"""Save or update job to database"""
if not self.db:
return
try:
repository = DeletionJobRepository(self.db)
# Check if job exists
existing = await repository.get_by_job_id(job.job_id)
if existing:
# Update existing job
existing.status = job.status.value
existing.service_results = {
name: {
"status": result.status.value,
"deleted_counts": result.deleted_counts,
"total_deleted": result.total_deleted,
"errors": result.errors,
"duration_seconds": result.duration_seconds
}
for name, result in job.service_results.items()
}
existing.total_items_deleted = job.total_items_deleted
existing.services_completed = job.services_completed
existing.services_failed = job.services_failed
existing.error_log = job.error_log
existing.completed_at = datetime.fromisoformat(job.completed_at) if job.completed_at else None
await repository.update(existing)
else:
# Create new job
db_job = DeletionJobModel(
job_id=job.job_id,
tenant_id=UUID(job.tenant_id),
tenant_name=job.tenant_name,
initiated_by=UUID(job.initiated_by) if job.initiated_by else None,
status=job.status.value,
service_results={},
total_items_deleted=0,
services_completed=0,
services_failed=0,
error_log=job.error_log,
started_at=datetime.fromisoformat(job.started_at) if job.started_at else None,
completed_at=None
)
await repository.create(db_job)
await self.db.commit()
except Exception as e:
logger.error("Failed to save job to database", error=str(e), job_id=job.job_id)
# Don't fail the job if database save fails
pass
async def _load_job_from_db(self, job_id: str) -> Optional[DeletionJob]:
"""Load job from database"""
if not self.db:
return None
try:
repository = DeletionJobRepository(self.db)
db_job = await repository.get_by_job_id(job_id)
if not db_job:
return None
# Convert database model to dataclass
job = DeletionJob(
job_id=db_job.job_id,
tenant_id=str(db_job.tenant_id),
tenant_name=db_job.tenant_name,
initiated_by=str(db_job.initiated_by) if db_job.initiated_by else None,
status=DeletionStatus(db_job.status),
started_at=db_job.started_at.isoformat() if db_job.started_at else None,
completed_at=db_job.completed_at.isoformat() if db_job.completed_at else None,
error_log=db_job.error_log or []
)
# Reconstruct service results
if db_job.service_results:
for service_name, result_data in db_job.service_results.items():
job.service_results[service_name] = ServiceDeletionResult(
service_name=service_name,
status=ServiceDeletionStatus(result_data["status"]),
deleted_counts=result_data.get("deleted_counts", {}),
errors=result_data.get("errors", []),
duration_seconds=result_data.get("duration_seconds", 0.0)
)
return job
except Exception as e:
logger.error("Failed to load job from database", error=str(e), job_id=job_id)
return None
async def orchestrate_tenant_deletion(
self,
tenant_id: str,
tenant_name: Optional[str] = None,
initiated_by: Optional[str] = None
) -> DeletionJob:
"""
Orchestrate complete tenant deletion across all services
Args:
tenant_id: Tenant to delete
tenant_name: Name of tenant (for logging)
initiated_by: User ID who initiated deletion
Returns:
DeletionJob with complete results
"""
# Create deletion job
job = DeletionJob(
job_id=str(uuid4()),
tenant_id=tenant_id,
tenant_name=tenant_name,
initiated_by=initiated_by,
status=DeletionStatus.IN_PROGRESS,
started_at=datetime.now(timezone.utc).isoformat()
)
self.jobs[job.job_id] = job
# Save initial job to database
await self._save_job_to_db(job)
logger.info("Starting tenant deletion orchestration",
job_id=job.job_id,
tenant_id=tenant_id,
tenant_name=tenant_name,
service_count=len(self.SERVICE_DELETION_ENDPOINTS))
try:
# Delete data from all services in parallel
service_results = await self._delete_from_all_services(tenant_id)
# Store results in job
for service_name, result in service_results.items():
job.service_results[service_name] = result
# Check if all services succeeded
all_succeeded = all(r.success for r in service_results.values())
if all_succeeded:
job.status = DeletionStatus.COMPLETED
logger.info("Tenant deletion orchestration completed successfully",
job_id=job.job_id,
tenant_id=tenant_id,
total_items_deleted=job.total_items_deleted,
services_completed=job.services_completed)
else:
job.status = DeletionStatus.FAILED
failed_services = [name for name, r in service_results.items() if not r.success]
job.error_log.append(f"Failed services: {', '.join(failed_services)}")
logger.error("Tenant deletion orchestration failed",
job_id=job.job_id,
tenant_id=tenant_id,
failed_services=failed_services,
services_completed=job.services_completed,
services_failed=job.services_failed)
job.completed_at = datetime.now(timezone.utc).isoformat()
# Save final state to database
await self._save_job_to_db(job)
except Exception as e:
job.status = DeletionStatus.FAILED
job.error_log.append(f"Fatal orchestration error: {str(e)}")
job.completed_at = datetime.now(timezone.utc).isoformat()
logger.error("Fatal error during tenant deletion orchestration",
job_id=job.job_id,
tenant_id=tenant_id,
error=str(e))
# Save error state to database
await self._save_job_to_db(job)
return job
async def _delete_from_all_services(
self,
tenant_id: str
) -> Dict[str, ServiceDeletionResult]:
"""
Delete tenant data from all services in parallel
Args:
tenant_id: Tenant to delete
Returns:
Dict mapping service name to deletion result
"""
# Create tasks for parallel execution
tasks = []
service_names = []
for service_name, endpoint_template in self.SERVICE_DELETION_ENDPOINTS.items():
endpoint = endpoint_template.format(tenant_id=tenant_id)
task = self._delete_from_service(service_name, endpoint, tenant_id)
tasks.append(task)
service_names.append(service_name)
# Execute all deletions in parallel
results = await asyncio.gather(*tasks, return_exceptions=True)
# Build result dictionary
service_results = {}
for service_name, result in zip(service_names, results):
if isinstance(result, Exception):
# Task raised an exception
service_results[service_name] = ServiceDeletionResult(
service_name=service_name,
status=ServiceDeletionStatus.FAILED,
errors=[f"Exception: {str(result)}"]
)
else:
service_results[service_name] = result
return service_results
async def _delete_from_service(
self,
service_name: str,
endpoint: str,
tenant_id: str
) -> ServiceDeletionResult:
"""
Delete tenant data from a single service
Args:
service_name: Name of the service
endpoint: Full URL endpoint for deletion
tenant_id: Tenant to delete
Returns:
ServiceDeletionResult with deletion details
"""
start_time = datetime.now(timezone.utc)
logger.info("Calling service deletion endpoint",
service=service_name,
endpoint=endpoint,
tenant_id=tenant_id)
try:
# Always create a service token with tenant context for secure service-to-service communication
service_token = self.jwt_handler.create_service_token(
service_name="auth",
tenant_id=tenant_id
)
headers = {
"Authorization": f"Bearer {service_token}",
"X-Service": "auth-service",
"Content-Type": "application/json"
}
async with httpx.AsyncClient(timeout=60.0) as client:
response = await client.delete(endpoint, headers=headers)
duration = (datetime.now(timezone.utc) - start_time).total_seconds()
if response.status_code == 200:
data = response.json()
summary = data.get("summary", {})
result = ServiceDeletionResult(
service_name=service_name,
status=ServiceDeletionStatus.SUCCESS,
deleted_counts=summary.get("deleted_counts", {}),
errors=summary.get("errors", []),
duration_seconds=duration
)
logger.info("Service deletion succeeded",
service=service_name,
deleted_counts=result.deleted_counts,
total_deleted=result.total_deleted,
duration=duration)
return result
elif response.status_code == 404:
# Service/endpoint doesn't exist yet - not an error
logger.warning("Service deletion endpoint not found (not yet implemented)",
service=service_name,
endpoint=endpoint)
return ServiceDeletionResult(
service_name=service_name,
status=ServiceDeletionStatus.SUCCESS, # Treat as success
errors=[f"Endpoint not implemented yet: {endpoint}"],
duration_seconds=duration
)
else:
# Deletion failed
error_msg = f"HTTP {response.status_code}: {response.text}"
logger.error("Service deletion failed",
service=service_name,
status_code=response.status_code,
error=error_msg)
return ServiceDeletionResult(
service_name=service_name,
status=ServiceDeletionStatus.FAILED,
errors=[error_msg],
duration_seconds=duration
)
except httpx.TimeoutException:
duration = (datetime.now(timezone.utc) - start_time).total_seconds()
error_msg = f"Request timeout after {duration}s"
logger.error("Service deletion timeout",
service=service_name,
endpoint=endpoint,
duration=duration)
return ServiceDeletionResult(
service_name=service_name,
status=ServiceDeletionStatus.FAILED,
errors=[error_msg],
duration_seconds=duration
)
except Exception as e:
duration = (datetime.now(timezone.utc) - start_time).total_seconds()
error_msg = f"Exception: {str(e)}"
logger.error("Service deletion exception",
service=service_name,
endpoint=endpoint,
error=str(e))
return ServiceDeletionResult(
service_name=service_name,
status=ServiceDeletionStatus.FAILED,
errors=[error_msg],
duration_seconds=duration
)
async def get_job_status(self, job_id: str) -> Optional[Dict[str, Any]]:
"""
Get status of a deletion job
Args:
job_id: Job ID to query
Returns:
Job status dict or None if not found
"""
# Try in-memory cache first
job = self.jobs.get(job_id)
if job:
return job.to_dict()
# Try loading from database
job = await self._load_job_from_db(job_id)
if job:
self.jobs[job_id] = job # Cache it
return job.to_dict()
return None
async def list_jobs(
self,
tenant_id: Optional[str] = None,
status: Optional[DeletionStatus] = None,
limit: int = 100
) -> List[Dict[str, Any]]:
"""
List deletion jobs with optional filters
Args:
tenant_id: Filter by tenant ID
status: Filter by status
limit: Maximum number of jobs to return
Returns:
List of job dicts
"""
# If database is available, load from database
if self.db:
try:
repository = DeletionJobRepository(self.db)
if tenant_id:
db_jobs = await repository.list_by_tenant(
UUID(tenant_id),
status=status.value if status else None,
limit=limit
)
else:
db_jobs = await repository.list_all(
status=status.value if status else None,
limit=limit
)
# Convert to job dicts
jobs = []
for db_job in db_jobs:
job_dict = {
"job_id": db_job.job_id,
"tenant_id": str(db_job.tenant_id),
"tenant_name": db_job.tenant_name,
"initiated_by": str(db_job.initiated_by) if db_job.initiated_by else None,
"status": db_job.status,
"total_items_deleted": db_job.total_items_deleted,
"services_completed": db_job.services_completed,
"services_failed": db_job.services_failed,
"service_results": db_job.service_results or {},
"started_at": db_job.started_at.isoformat() if db_job.started_at else None,
"completed_at": db_job.completed_at.isoformat() if db_job.completed_at else None,
"error_log": db_job.error_log or []
}
jobs.append(job_dict)
return jobs
except Exception as e:
logger.error("Failed to list jobs from database", error=str(e))
# Fall back to in-memory cache
pass
# Fall back to in-memory cache
jobs = list(self.jobs.values())
# Apply filters
if tenant_id:
jobs = [j for j in jobs if j.tenant_id == tenant_id]
if status:
jobs = [j for j in jobs if j.status == status]
# Sort by started_at descending
jobs.sort(key=lambda j: j.started_at or "", reverse=True)
# Apply limit
jobs = jobs[:limit]
return [job.to_dict() for job in jobs]

View File

@@ -0,0 +1,525 @@
"""
Enhanced User Service
Updated to use repository pattern with dependency injection and improved error handling
"""
from datetime import datetime, timezone
from typing import Dict, Any, List, Optional
from sqlalchemy.ext.asyncio import AsyncSession
from fastapi import HTTPException, status
import structlog
from app.repositories import UserRepository, TokenRepository
from app.schemas.auth import UserResponse
from app.schemas.users import UserUpdate
from app.models.users import User
from app.models.tokens import RefreshToken
from app.core.security import SecurityManager
from shared.database.unit_of_work import UnitOfWork
from shared.database.transactions import transactional
from shared.database.exceptions import DatabaseError, ValidationError
logger = structlog.get_logger()
class EnhancedUserService:
"""Enhanced user management service using repository pattern"""
def __init__(self, database_manager):
"""Initialize service with database manager"""
self.database_manager = database_manager
async def get_user_by_id(self, user_id: str, session: Optional[AsyncSession] = None) -> Optional[UserResponse]:
"""Get user by ID using repository pattern"""
try:
if session:
# Use provided session (for direct session injection)
user_repo = UserRepository(User, session)
user = await user_repo.get_by_id(user_id)
else:
# Use database manager to get session
async with self.database_manager.get_session() as db_session:
user_repo = UserRepository(User, db_session)
user = await user_repo.get_by_id(user_id)
if not user:
return None
return UserResponse(
id=str(user.id),
email=user.email,
full_name=user.full_name,
is_active=user.is_active,
is_verified=user.is_verified,
created_at=user.created_at,
role=user.role,
phone=getattr(user, 'phone', None),
language=getattr(user, 'language', None),
timezone=getattr(user, 'timezone', None)
)
except Exception as e:
logger.error("Failed to get user by ID using repository pattern",
user_id=user_id,
error=str(e))
raise DatabaseError(f"Failed to get user: {str(e)}")
async def get_user_by_email(self, email: str) -> Optional[UserResponse]:
"""Get user by email using repository pattern"""
try:
async with self.database_manager.get_session() as session:
user_repo = UserRepository(User, session)
user = await user_repo.get_by_email(email)
if not user:
return None
return UserResponse(
id=str(user.id),
email=user.email,
full_name=user.full_name,
is_active=user.is_active,
is_verified=user.is_verified,
created_at=user.created_at,
role=user.role,
phone=getattr(user, 'phone', None),
language=getattr(user, 'language', None),
timezone=getattr(user, 'timezone', None)
)
except Exception as e:
logger.error("Failed to get user by email using repository pattern",
email=email,
error=str(e))
raise DatabaseError(f"Failed to get user: {str(e)}")
async def get_users_list(
self,
skip: int = 0,
limit: int = 100,
active_only: bool = True,
role: str = None
) -> List[UserResponse]:
"""Get paginated list of users using repository pattern"""
try:
async with self.database_manager.get_session() as session:
user_repo = UserRepository(User, session)
filters = {}
if active_only:
filters["is_active"] = True
if role:
filters["role"] = role
users = await user_repo.get_multi(
filters=filters,
skip=skip,
limit=limit,
order_by="created_at",
order_desc=True
)
return [
UserResponse(
id=str(user.id),
email=user.email,
full_name=user.full_name,
is_active=user.is_active,
is_verified=user.is_verified,
created_at=user.created_at,
role=user.role,
phone=getattr(user, 'phone', None),
language=getattr(user, 'language', None),
timezone=getattr(user, 'timezone', None)
)
for user in users
]
except Exception as e:
logger.error("Failed to get users list using repository pattern", error=str(e))
return []
@transactional
async def update_user(
self,
user_id: str,
user_data: UserUpdate,
session=None
) -> Optional[UserResponse]:
"""Update user information using repository pattern"""
try:
async with self.database_manager.get_session() as db_session:
user_repo = UserRepository(User, db_session)
# Validate user exists
existing_user = await user_repo.get_by_id(user_id)
if not existing_user:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="User not found"
)
# Prepare update data
update_data = {}
if user_data.full_name is not None:
update_data["full_name"] = user_data.full_name
if user_data.phone is not None:
update_data["phone"] = user_data.phone
if user_data.language is not None:
update_data["language"] = user_data.language
if user_data.timezone is not None:
update_data["timezone"] = user_data.timezone
if not update_data:
# No updates to apply
return UserResponse(
id=str(existing_user.id),
email=existing_user.email,
full_name=existing_user.full_name,
is_active=existing_user.is_active,
is_verified=existing_user.is_verified,
created_at=existing_user.created_at,
role=existing_user.role
)
# Update user using repository
updated_user = await user_repo.update(user_id, update_data)
if not updated_user:
raise DatabaseError("Failed to update user")
logger.info("User updated successfully using repository pattern",
user_id=user_id,
updated_fields=list(update_data.keys()))
return UserResponse(
id=str(updated_user.id),
email=updated_user.email,
full_name=updated_user.full_name,
is_active=updated_user.is_active,
is_verified=updated_user.is_verified,
created_at=updated_user.created_at,
role=updated_user.role,
phone=getattr(updated_user, 'phone', None),
language=getattr(updated_user, 'language', None),
timezone=getattr(updated_user, 'timezone', None)
)
except HTTPException:
raise
except Exception as e:
logger.error("Failed to update user using repository pattern",
user_id=user_id,
error=str(e))
raise DatabaseError(f"Failed to update user: {str(e)}")
@transactional
async def change_password(
self,
user_id: str,
current_password: str,
new_password: str,
session=None
) -> bool:
"""Change user password using repository pattern"""
try:
async with self.database_manager.get_session() as db_session:
async with UnitOfWork(db_session) as uow:
# Register repositories
user_repo = uow.register_repository("users", UserRepository)
token_repo = uow.register_repository("tokens", TokenRepository)
# Get user and verify current password
user = await user_repo.get_by_id(user_id)
if not user:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="User not found"
)
if not SecurityManager.verify_password(current_password, user.hashed_password):
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Current password is incorrect"
)
# Hash new password and update
new_hashed_password = SecurityManager.hash_password(new_password)
await user_repo.update(user_id, {"hashed_password": new_hashed_password})
# Revoke all existing tokens for security
await token_repo.revoke_user_tokens(user_id)
# Commit transaction
await uow.commit()
logger.info("Password changed successfully using repository pattern",
user_id=user_id)
return True
except HTTPException:
raise
except Exception as e:
logger.error("Failed to change password using repository pattern",
user_id=user_id,
error=str(e))
raise DatabaseError(f"Failed to change password: {str(e)}")
@transactional
async def deactivate_user(self, user_id: str, admin_user_id: str, session=None) -> bool:
"""Deactivate user account using repository pattern"""
try:
async with self.database_manager.get_session() as db_session:
async with UnitOfWork(db_session) as uow:
# Register repositories
user_repo = uow.register_repository("users", UserRepository)
token_repo = uow.register_repository("tokens", TokenRepository)
# Verify user exists
user = await user_repo.get_by_id(user_id)
if not user:
return False
# Update user status (soft delete)
updated_user = await user_repo.update(user_id, {"is_active": False})
if not updated_user:
return False
# Revoke all tokens
await token_repo.revoke_user_tokens(user_id)
# Commit transaction
await uow.commit()
logger.info("User deactivated successfully using repository pattern",
user_id=user_id,
admin_user_id=admin_user_id)
return True
except Exception as e:
logger.error("Failed to deactivate user using repository pattern",
user_id=user_id,
error=str(e))
return False
@transactional
async def activate_user(self, user_id: str, admin_user_id: str, session=None) -> bool:
"""Activate user account using repository pattern"""
try:
async with self.database_manager.get_session() as db_session:
user_repo = UserRepository(User, db_session)
# Update user status
updated_user = await user_repo.update(user_id, {"is_active": True})
if not updated_user:
return False
logger.info("User activated successfully using repository pattern",
user_id=user_id,
admin_user_id=admin_user_id)
return True
except Exception as e:
logger.error("Failed to activate user using repository pattern",
user_id=user_id,
error=str(e))
return False
async def verify_user_email(self, user_id: str, verification_token: str) -> bool:
"""Verify user email using repository pattern"""
try:
async with self.database_manager.get_session() as session:
user_repo = UserRepository(User, session)
# In a real implementation, you'd verify the verification_token
# For now, just mark user as verified
updated_user = await user_repo.update(user_id, {"is_verified": True})
if updated_user:
logger.info("User email verified using repository pattern",
user_id=user_id)
return True
return False
except Exception as e:
logger.error("Failed to verify email using repository pattern",
user_id=user_id,
error=str(e))
return False
async def get_user_statistics(self) -> Dict[str, Any]:
"""Get user statistics using repository pattern"""
try:
async with self.database_manager.get_session() as session:
user_repo = UserRepository(User, session)
# Get basic user statistics
statistics = await user_repo.get_user_statistics()
return statistics
except Exception as e:
logger.error("Failed to get user statistics using repository pattern", error=str(e))
return {
"total_users": 0,
"active_users": 0,
"verified_users": 0,
"users_by_role": {},
"recent_registrations_7d": 0
}
async def search_users(
self,
search_term: str,
role: str = None,
active_only: bool = True,
skip: int = 0,
limit: int = 50
) -> List[UserResponse]:
"""Search users by email or name using repository pattern"""
try:
async with self.database_manager.get_session() as session:
user_repo = UserRepository(User, session)
users = await user_repo.search_users(
search_term, role, active_only, skip, limit
)
return [
UserResponse(
id=str(user.id),
email=user.email,
full_name=user.full_name,
is_active=user.is_active,
is_verified=user.is_verified,
created_at=user.created_at,
role=user.role,
phone=getattr(user, 'phone', None),
language=getattr(user, 'language', None),
timezone=getattr(user, 'timezone', None)
)
for user in users
]
except Exception as e:
logger.error("Failed to search users using repository pattern",
search_term=search_term,
error=str(e))
return []
async def update_user_role(
self,
user_id: str,
new_role: str,
admin_user_id: str
) -> Optional[UserResponse]:
"""Update user role using repository pattern"""
try:
async with self.database_manager.get_session() as session:
user_repo = UserRepository(User, session)
# Validate role
valid_roles = ["user", "admin", "manager", "super_admin"]
if new_role not in valid_roles:
raise ValidationError(f"Invalid role. Must be one of: {valid_roles}")
# Update user role
updated_user = await user_repo.update(user_id, {"role": new_role})
if not updated_user:
return None
logger.info("User role updated using repository pattern",
user_id=user_id,
new_role=new_role,
admin_user_id=admin_user_id)
return UserResponse(
id=str(updated_user.id),
email=updated_user.email,
full_name=updated_user.full_name,
is_active=updated_user.is_active,
is_verified=updated_user.is_verified,
created_at=updated_user.created_at,
role=updated_user.role,
phone=getattr(updated_user, 'phone', None),
language=getattr(updated_user, 'language', None),
timezone=getattr(updated_user, 'timezone', None)
)
except ValidationError:
raise
except Exception as e:
logger.error("Failed to update user role using repository pattern",
user_id=user_id,
new_role=new_role,
error=str(e))
raise DatabaseError(f"Failed to update role: {str(e)}")
async def update_user_field(
self,
user_id: str,
field_name: str,
field_value: Any
) -> bool:
"""Update a single field on a user record"""
try:
async with self.database_manager.get_session() as session:
user_repo = UserRepository(User, session)
# Update the specific field
updated_user = await user_repo.update(user_id, {field_name: field_value})
if not updated_user:
logger.error("User not found for field update",
user_id=user_id,
field_name=field_name)
return False
await session.commit()
logger.info("User field updated",
user_id=user_id,
field_name=field_name)
return True
except Exception as e:
logger.error("Failed to update user field",
user_id=user_id,
field_name=field_name,
error=str(e))
return False
async def get_user_activity(self, user_id: str) -> Dict[str, Any]:
"""Get user activity information using repository pattern"""
try:
async with self.database_manager.get_session() as session:
user_repo = UserRepository(User, session)
token_repo = TokenRepository(RefreshToken, session)
# Get user
user = await user_repo.get_by_id(user_id)
if not user:
return {"error": "User not found"}
# Get token activity
active_tokens = await token_repo.get_active_tokens_for_user(user_id)
return {
"user_id": user_id,
"last_login": user.last_login.isoformat() if user.last_login else None,
"account_created": user.created_at.isoformat(),
"is_active": user.is_active,
"is_verified": user.is_verified,
"active_sessions": len(active_tokens),
"last_activity": max([token.created_at for token in active_tokens]).isoformat() if active_tokens else None
}
except Exception as e:
logger.error("Failed to get user activity using repository pattern",
user_id=user_id,
error=str(e))
return {"error": str(e)}

View File

@@ -0,0 +1,126 @@
"""Fetches subscription data for JWT enrichment at login time"""
from typing import Dict, Any, Optional, List
import logging
from fastapi import HTTPException, status
from shared.clients.tenant_client import TenantServiceClient
from shared.config.base import BaseServiceSettings
logger = logging.getLogger(__name__)
class SubscriptionFetcher:
def __init__(self, config: BaseServiceSettings):
"""
Initialize SubscriptionFetcher with service configuration
Args:
config: BaseServiceSettings containing service configuration
"""
self.tenant_client = TenantServiceClient(config)
logger.info("SubscriptionFetcher initialized with TenantServiceClient")
async def get_user_subscription_context(
self,
user_id: str
) -> Dict[str, Any]:
"""
Fetch user's tenant memberships and subscription data using shared tenant client.
Called ONCE at login, not per-request.
This method uses the shared TenantServiceClient instead of direct HTTP calls,
providing better error handling, circuit breaking, and consistency.
Returns:
{
"tenant_id": "primary-tenant-uuid",
"tenant_role": "owner",
"subscription": {
"tier": "professional",
"status": "active",
"valid_until": "2025-02-15T00:00:00Z"
},
"tenant_access": [
{"id": "uuid", "role": "admin", "tier": "starter"}
]
}
"""
try:
logger.debug("Fetching subscription data for user: %s", user_id)
# Get user's tenant memberships using shared tenant client
memberships = await self.tenant_client.get_user_memberships(user_id)
if not memberships:
logger.info(f"User {user_id} has no tenant memberships - returning default subscription context")
return {
"tenant_id": None,
"tenant_role": None,
"subscription": {
"tier": "starter",
"status": "active",
"valid_until": None
},
"tenant_access": []
}
# Get primary tenant (first one, or the one with highest role)
primary_membership = memberships[0]
for membership in memberships:
if membership.get("role") == "owner":
primary_membership = membership
break
primary_tenant_id = primary_membership["tenant_id"]
primary_role = primary_membership["role"]
# Get subscription for primary tenant using shared tenant client
subscription_data = await self.tenant_client.get_subscription_details(primary_tenant_id)
if not subscription_data:
logger.warning(f"No subscription data found for primary tenant {primary_tenant_id}")
# Return with basic info but no subscription
return {
"tenant_id": primary_tenant_id,
"tenant_role": primary_role,
"subscription": None,
"tenant_access": memberships
}
# Build tenant access list with subscription info
tenant_access = []
for membership in memberships:
tenant_id = membership["tenant_id"]
role = membership["role"]
# Get subscription for each tenant using shared tenant client
tenant_sub = await self.tenant_client.get_subscription_details(tenant_id)
tier = "starter" # default
if tenant_sub:
tier = tenant_sub.get("plan", "starter")
tenant_access.append({
"id": tenant_id,
"role": role,
"tier": tier
})
return {
"tenant_id": primary_tenant_id,
"tenant_role": primary_role,
"subscription": {
"tier": subscription_data.get("plan", "starter"),
"status": subscription_data.get("status", "active"),
"valid_until": subscription_data.get("valid_until", None)
},
"tenant_access": tenant_access
}
except Exception as e:
logger.error(f"Error fetching subscription data: {str(e)}", exc_info=True)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Error fetching subscription data: {str(e)}"
)