Improve the frontend 5

This commit is contained in:
Urtzi Alfaro
2025-11-02 20:24:44 +01:00
parent 0220da1725
commit 5adb0e39c0
90 changed files with 10658 additions and 2548 deletions

237
services/external/app/api/audit.py vendored Normal file
View File

@@ -0,0 +1,237 @@
# services/external/app/api/audit.py
"""
Audit Logs API - Retrieve audit trail for external service
"""
from fastapi import APIRouter, Depends, HTTPException, Query, Path, status
from typing import Optional, Dict, Any
from uuid import UUID
from datetime import datetime
import structlog
from sqlalchemy import select, func, and_
from sqlalchemy.ext.asyncio import AsyncSession
from app.models import AuditLog
from shared.auth.decorators import get_current_user_dep
from shared.auth.access_control import require_user_role
from shared.routing import RouteBuilder
from shared.models.audit_log_schemas import (
AuditLogResponse,
AuditLogListResponse,
AuditLogStatsResponse
)
from app.core.database import database_manager
route_builder = RouteBuilder('external')
router = APIRouter(tags=["audit-logs"])
logger = structlog.get_logger()
async def get_db():
"""Database session dependency"""
async with database_manager.get_session() as session:
yield session
@router.get(
route_builder.build_base_route("audit-logs"),
response_model=AuditLogListResponse
)
@require_user_role(['admin', 'owner'])
async def get_audit_logs(
tenant_id: UUID = Path(..., description="Tenant ID"),
start_date: Optional[datetime] = Query(None, description="Filter logs from this date"),
end_date: Optional[datetime] = Query(None, description="Filter logs until this date"),
user_id: Optional[UUID] = Query(None, description="Filter by user ID"),
action: Optional[str] = Query(None, description="Filter by action type"),
resource_type: Optional[str] = Query(None, description="Filter by resource type"),
severity: Optional[str] = Query(None, description="Filter by severity level"),
search: Optional[str] = Query(None, description="Search in description field"),
limit: int = Query(100, ge=1, le=1000, description="Number of records to return"),
offset: int = Query(0, ge=0, description="Number of records to skip"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Get audit logs for external service.
Requires admin or owner role.
"""
try:
logger.info(
"Retrieving audit logs",
tenant_id=tenant_id,
user_id=current_user.get("user_id"),
filters={
"start_date": start_date,
"end_date": end_date,
"action": action,
"resource_type": resource_type,
"severity": severity
}
)
# Build query filters
filters = [AuditLog.tenant_id == tenant_id]
if start_date:
filters.append(AuditLog.created_at >= start_date)
if end_date:
filters.append(AuditLog.created_at <= end_date)
if user_id:
filters.append(AuditLog.user_id == user_id)
if action:
filters.append(AuditLog.action == action)
if resource_type:
filters.append(AuditLog.resource_type == resource_type)
if severity:
filters.append(AuditLog.severity == severity)
if search:
filters.append(AuditLog.description.ilike(f"%{search}%"))
# Count total matching records
count_query = select(func.count()).select_from(AuditLog).where(and_(*filters))
total_result = await db.execute(count_query)
total = total_result.scalar() or 0
# Fetch paginated results
query = (
select(AuditLog)
.where(and_(*filters))
.order_by(AuditLog.created_at.desc())
.limit(limit)
.offset(offset)
)
result = await db.execute(query)
audit_logs = result.scalars().all()
# Convert to response models
items = [AuditLogResponse.from_orm(log) for log in audit_logs]
logger.info(
"Successfully retrieved audit logs",
tenant_id=tenant_id,
total=total,
returned=len(items)
)
return AuditLogListResponse(
items=items,
total=total,
limit=limit,
offset=offset,
has_more=(offset + len(items)) < total
)
except Exception as e:
logger.error(
"Failed to retrieve audit logs",
error=str(e),
tenant_id=tenant_id
)
raise HTTPException(
status_code=500,
detail=f"Failed to retrieve audit logs: {str(e)}"
)
@router.get(
route_builder.build_base_route("audit-logs/stats"),
response_model=AuditLogStatsResponse
)
@require_user_role(['admin', 'owner'])
async def get_audit_log_stats(
tenant_id: UUID = Path(..., description="Tenant ID"),
start_date: Optional[datetime] = Query(None, description="Filter logs from this date"),
end_date: Optional[datetime] = Query(None, description="Filter logs until this date"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Get audit log statistics for external service.
Requires admin or owner role.
"""
try:
logger.info(
"Retrieving audit log statistics",
tenant_id=tenant_id,
user_id=current_user.get("user_id")
)
# Build base filters
filters = [AuditLog.tenant_id == tenant_id]
if start_date:
filters.append(AuditLog.created_at >= start_date)
if end_date:
filters.append(AuditLog.created_at <= end_date)
# Total events
count_query = select(func.count()).select_from(AuditLog).where(and_(*filters))
total_result = await db.execute(count_query)
total_events = total_result.scalar() or 0
# Events by action
action_query = (
select(AuditLog.action, func.count().label('count'))
.where(and_(*filters))
.group_by(AuditLog.action)
)
action_result = await db.execute(action_query)
events_by_action = {row.action: row.count for row in action_result}
# Events by severity
severity_query = (
select(AuditLog.severity, func.count().label('count'))
.where(and_(*filters))
.group_by(AuditLog.severity)
)
severity_result = await db.execute(severity_query)
events_by_severity = {row.severity: row.count for row in severity_result}
# Events by resource type
resource_query = (
select(AuditLog.resource_type, func.count().label('count'))
.where(and_(*filters))
.group_by(AuditLog.resource_type)
)
resource_result = await db.execute(resource_query)
events_by_resource_type = {row.resource_type: row.count for row in resource_result}
# Date range
date_range_query = (
select(
func.min(AuditLog.created_at).label('min_date'),
func.max(AuditLog.created_at).label('max_date')
)
.where(and_(*filters))
)
date_result = await db.execute(date_range_query)
date_row = date_result.one()
logger.info(
"Successfully retrieved audit log statistics",
tenant_id=tenant_id,
total_events=total_events
)
return AuditLogStatsResponse(
total_events=total_events,
events_by_action=events_by_action,
events_by_severity=events_by_severity,
events_by_resource_type=events_by_resource_type,
date_range={
"min": date_row.min_date,
"max": date_row.max_date
}
)
except Exception as e:
logger.error(
"Failed to retrieve audit log statistics",
error=str(e),
tenant_id=tenant_id
)
raise HTTPException(
status_code=500,
detail=f"Failed to retrieve audit log statistics: {str(e)}"
)

View File

@@ -0,0 +1,387 @@
# services/external/app/api/calendar_operations.py
"""
Calendar Operations API - School calendars and tenant location context endpoints
"""
from fastapi import APIRouter, Depends, HTTPException, Query, Path, Body
from typing import List, Optional
from uuid import UUID
import structlog
from app.schemas.calendar import (
SchoolCalendarResponse,
SchoolCalendarListResponse,
TenantLocationContextResponse,
TenantLocationContextCreateRequest,
CalendarCheckResponse
)
from app.registry.calendar_registry import CalendarRegistry, SchoolType
from app.repositories.calendar_repository import CalendarRepository
from app.cache.redis_wrapper import ExternalDataCache
from shared.routing.route_builder import RouteBuilder
from shared.auth.decorators import get_current_user_dep
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.database import get_db
from datetime import datetime, date
route_builder = RouteBuilder('external')
router = APIRouter(tags=["calendar-operations"])
logger = structlog.get_logger()
# Initialize cache
cache = ExternalDataCache()
# ===== School Calendar Endpoints =====
@router.get(
route_builder.build_operations_route("cities/{city_id}/school-calendars"),
response_model=SchoolCalendarListResponse
)
async def list_school_calendars_for_city(
city_id: str = Path(..., description="City ID (e.g., 'madrid')"),
school_type: Optional[str] = Query(None, description="Filter by school type"),
academic_year: Optional[str] = Query(None, description="Filter by academic year"),
db: AsyncSession = Depends(get_db)
):
"""List all available school calendars for a city"""
try:
repo = CalendarRepository(db)
calendars = await repo.get_calendars_by_city(city_id, enabled_only=True)
# Apply filters if provided
if school_type:
calendars = [c for c in calendars if c.school_type == school_type]
if academic_year:
calendars = [c for c in calendars if c.academic_year == academic_year]
calendar_responses = [
SchoolCalendarResponse(
calendar_id=str(c.id),
calendar_name=c.calendar_name,
city_id=c.city_id,
school_type=c.school_type,
academic_year=c.academic_year,
holiday_periods=c.holiday_periods,
school_hours=c.school_hours,
source=c.source,
enabled=c.enabled
)
for c in calendars
]
return SchoolCalendarListResponse(
city_id=city_id,
calendars=calendar_responses,
total=len(calendar_responses)
)
except Exception as e:
logger.error(
"Error listing school calendars",
city_id=city_id,
error=str(e)
)
raise HTTPException(
status_code=500,
detail=f"Error retrieving school calendars: {str(e)}"
)
@router.get(
route_builder.build_operations_route("school-calendars/{calendar_id}"),
response_model=SchoolCalendarResponse
)
async def get_school_calendar(
calendar_id: UUID = Path(..., description="School calendar ID"),
db: AsyncSession = Depends(get_db)
):
"""Get detailed information about a specific school calendar (cached)"""
try:
calendar_id_str = str(calendar_id)
# Check cache first
cached = await cache.get_cached_calendar(calendar_id_str)
if cached:
logger.debug("Returning cached calendar", calendar_id=calendar_id_str)
return SchoolCalendarResponse(**cached)
# Cache miss - fetch from database
repo = CalendarRepository(db)
calendar = await repo.get_calendar_by_id(calendar_id)
if not calendar:
raise HTTPException(status_code=404, detail="School calendar not found")
response_data = {
"calendar_id": str(calendar.id),
"calendar_name": calendar.calendar_name,
"city_id": calendar.city_id,
"school_type": calendar.school_type,
"academic_year": calendar.academic_year,
"holiday_periods": calendar.holiday_periods,
"school_hours": calendar.school_hours,
"source": calendar.source,
"enabled": calendar.enabled
}
# Cache the result
await cache.set_cached_calendar(calendar_id_str, response_data)
return SchoolCalendarResponse(**response_data)
except HTTPException:
raise
except Exception as e:
logger.error(
"Error retrieving school calendar",
calendar_id=str(calendar_id),
error=str(e)
)
raise HTTPException(
status_code=500,
detail=f"Error retrieving school calendar: {str(e)}"
)
@router.get(
route_builder.build_operations_route("school-calendars/{calendar_id}/is-holiday"),
response_model=CalendarCheckResponse
)
async def check_is_school_holiday(
calendar_id: UUID = Path(..., description="School calendar ID"),
check_date: str = Query(..., description="Date to check (ISO format: YYYY-MM-DD)"),
db: AsyncSession = Depends(get_db)
):
"""Check if a specific date is a school holiday"""
try:
repo = CalendarRepository(db)
calendar = await repo.get_calendar_by_id(calendar_id)
if not calendar:
raise HTTPException(status_code=404, detail="School calendar not found")
# Parse the date
try:
date_obj = datetime.strptime(check_date, "%Y-%m-%d").date()
except ValueError:
raise HTTPException(
status_code=400,
detail="Invalid date format. Use YYYY-MM-DD"
)
# Check if date falls within any holiday period
is_holiday = False
holiday_name = None
for period in calendar.holiday_periods:
start = datetime.strptime(period["start_date"], "%Y-%m-%d").date()
end = datetime.strptime(period["end_date"], "%Y-%m-%d").date()
if start <= date_obj <= end:
is_holiday = True
holiday_name = period["name"]
break
return CalendarCheckResponse(
date=check_date,
is_holiday=is_holiday,
holiday_name=holiday_name,
calendar_id=str(calendar_id),
calendar_name=calendar.calendar_name
)
except HTTPException:
raise
except Exception as e:
logger.error(
"Error checking holiday status",
calendar_id=str(calendar_id),
date=check_date,
error=str(e)
)
raise HTTPException(
status_code=500,
detail=f"Error checking holiday status: {str(e)}"
)
# ===== Tenant Location Context Endpoints =====
@router.get(
route_builder.build_base_route("location-context"),
response_model=TenantLocationContextResponse
)
async def get_tenant_location_context(
tenant_id: UUID = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Get location context for a tenant including school calendar assignment (cached)"""
try:
tenant_id_str = str(tenant_id)
# Check cache first
cached = await cache.get_cached_tenant_context(tenant_id_str)
if cached:
logger.debug("Returning cached tenant context", tenant_id=tenant_id_str)
return TenantLocationContextResponse(**cached)
# Cache miss - fetch from database
repo = CalendarRepository(db)
context = await repo.get_tenant_with_calendar(tenant_id)
if not context:
raise HTTPException(
status_code=404,
detail="Location context not found for this tenant"
)
# Cache the result
await cache.set_cached_tenant_context(tenant_id_str, context)
return TenantLocationContextResponse(**context)
except HTTPException:
raise
except Exception as e:
logger.error(
"Error retrieving tenant location context",
tenant_id=str(tenant_id),
error=str(e)
)
raise HTTPException(
status_code=500,
detail=f"Error retrieving location context: {str(e)}"
)
@router.post(
route_builder.build_base_route("location-context"),
response_model=TenantLocationContextResponse
)
async def create_or_update_tenant_location_context(
request: TenantLocationContextCreateRequest,
tenant_id: UUID = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Create or update tenant location context"""
try:
repo = CalendarRepository(db)
# Validate calendar_id if provided
if request.school_calendar_id:
calendar = await repo.get_calendar_by_id(request.school_calendar_id)
if not calendar:
raise HTTPException(
status_code=400,
detail="Invalid school_calendar_id"
)
# Create or update context
context_obj = await repo.create_or_update_tenant_location_context(
tenant_id=tenant_id,
city_id=request.city_id,
school_calendar_id=request.school_calendar_id,
neighborhood=request.neighborhood,
local_events=request.local_events,
notes=request.notes
)
# Invalidate cache since context was updated
await cache.invalidate_tenant_context(str(tenant_id))
# Get full context with calendar details
context = await repo.get_tenant_with_calendar(tenant_id)
# Cache the new context
await cache.set_cached_tenant_context(str(tenant_id), context)
return TenantLocationContextResponse(**context)
except HTTPException:
raise
except Exception as e:
logger.error(
"Error creating/updating tenant location context",
tenant_id=str(tenant_id),
error=str(e)
)
raise HTTPException(
status_code=500,
detail=f"Error creating/updating location context: {str(e)}"
)
@router.delete(
route_builder.build_base_route("location-context"),
status_code=204
)
async def delete_tenant_location_context(
tenant_id: UUID = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Delete tenant location context"""
try:
repo = CalendarRepository(db)
deleted = await repo.delete_tenant_location_context(tenant_id)
if not deleted:
raise HTTPException(
status_code=404,
detail="Location context not found"
)
return None
except HTTPException:
raise
except Exception as e:
logger.error(
"Error deleting tenant location context",
tenant_id=str(tenant_id),
error=str(e)
)
raise HTTPException(
status_code=500,
detail=f"Error deleting location context: {str(e)}"
)
# ===== Helper Endpoints =====
@router.get(
route_builder.build_operations_route("calendars/registry"),
response_model=List[SchoolCalendarResponse]
)
async def list_registry_calendars():
"""List all calendars from the CalendarRegistry (static configuration)"""
calendars = CalendarRegistry.get_enabled_calendars()
return [
SchoolCalendarResponse(
calendar_id=cal.calendar_id,
calendar_name=cal.calendar_name,
city_id=cal.city_id,
school_type=cal.school_type.value,
academic_year=cal.academic_year,
holiday_periods=[
{
"name": hp.name,
"start_date": hp.start_date,
"end_date": hp.end_date,
"description": hp.description
}
for hp in cal.holiday_periods
],
school_hours={
"morning_start": cal.school_hours.morning_start,
"morning_end": cal.school_hours.morning_end,
"has_afternoon_session": cal.school_hours.has_afternoon_session,
"afternoon_start": cal.school_hours.afternoon_start,
"afternoon_end": cal.school_hours.afternoon_end
},
source=cal.source,
enabled=cal.enabled
)
for cal in calendars
]

View File

@@ -184,3 +184,115 @@ class ExternalDataCache:
except Exception as e:
logger.error("Error invalidating cache", error=str(e))
# ===== Calendar Caching Methods =====
def _calendar_cache_key(self, calendar_id: str) -> str:
"""Generate cache key for school calendar"""
return f"calendar:{calendar_id}"
def _tenant_context_cache_key(self, tenant_id: str) -> str:
"""Generate cache key for tenant location context"""
return f"tenant_context:{tenant_id}"
async def get_cached_calendar(
self,
calendar_id: str
) -> Optional[Dict[str, Any]]:
"""Get cached school calendar by ID"""
try:
key = self._calendar_cache_key(calendar_id)
client = await self._get_client()
cached = await client.get(key)
if cached:
logger.debug("Calendar cache hit", calendar_id=calendar_id)
return json.loads(cached)
logger.debug("Calendar cache miss", calendar_id=calendar_id)
return None
except Exception as e:
logger.error("Error reading calendar cache", error=str(e))
return None
async def set_cached_calendar(
self,
calendar_id: str,
calendar_data: Dict[str, Any]
):
"""Cache school calendar data (7 days TTL)"""
try:
key = self._calendar_cache_key(calendar_id)
client = await self._get_client()
# Calendars change rarely, use 7-day TTL
ttl = 86400 * 7
await client.setex(
key,
ttl,
json.dumps(calendar_data)
)
logger.debug("Calendar cached", calendar_id=calendar_id)
except Exception as e:
logger.error("Error caching calendar", error=str(e))
async def get_cached_tenant_context(
self,
tenant_id: str
) -> Optional[Dict[str, Any]]:
"""Get cached tenant location context"""
try:
key = self._tenant_context_cache_key(tenant_id)
client = await self._get_client()
cached = await client.get(key)
if cached:
logger.debug("Tenant context cache hit", tenant_id=tenant_id)
return json.loads(cached)
logger.debug("Tenant context cache miss", tenant_id=tenant_id)
return None
except Exception as e:
logger.error("Error reading tenant context cache", error=str(e))
return None
async def set_cached_tenant_context(
self,
tenant_id: str,
context_data: Dict[str, Any]
):
"""Cache tenant location context (24 hours TTL)"""
try:
key = self._tenant_context_cache_key(tenant_id)
client = await self._get_client()
# Tenant context changes less frequently, 24-hour TTL
ttl = 86400
await client.setex(
key,
ttl,
json.dumps(context_data)
)
logger.debug("Tenant context cached", tenant_id=tenant_id)
except Exception as e:
logger.error("Error caching tenant context", error=str(e))
async def invalidate_tenant_context(self, tenant_id: str):
"""Invalidate tenant context cache (called when context is updated)"""
try:
key = self._tenant_context_cache_key(tenant_id)
client = await self._get_client()
await client.delete(key)
logger.info("Tenant context cache invalidated", tenant_id=tenant_id)
except Exception as e:
logger.error("Error invalidating tenant context cache", error=str(e))

View File

@@ -9,8 +9,10 @@ import structlog
import asyncio
from app.registry.city_registry import CityRegistry
from app.registry.calendar_registry import CalendarRegistry
from .adapters import get_adapter
from app.repositories.city_data_repository import CityDataRepository
from app.repositories.calendar_repository import CalendarRepository
from app.core.database import database_manager
logger = structlog.get_logger()
@@ -266,3 +268,99 @@ class DataIngestionManager:
error=str(e)
)
return False
async def seed_school_calendars(self) -> bool:
"""
Seed school calendars from CalendarRegistry into database
Called during initialization - idempotent
"""
try:
logger.info("Starting school calendar seeding...")
# Get all calendars from registry
calendars = CalendarRegistry.get_all_calendars()
logger.info(f"Found {len(calendars)} calendars in registry")
async with self.database_manager.get_session() as session:
repo = CalendarRepository(session)
seeded_count = 0
skipped_count = 0
for cal_def in calendars:
logger.info(
"Processing calendar",
calendar_id=cal_def.calendar_id,
city=cal_def.city_id,
type=cal_def.school_type.value,
year=cal_def.academic_year
)
# Check if calendar already exists (idempotency)
existing = await repo.get_calendar_by_city_type_year(
city_id=cal_def.city_id,
school_type=cal_def.school_type.value,
academic_year=cal_def.academic_year
)
if existing:
logger.info(
"Calendar already exists, skipping",
calendar_id=cal_def.calendar_id
)
skipped_count += 1
continue
# Convert holiday periods to dict format
holiday_periods = [
{
"name": hp.name,
"start_date": hp.start_date,
"end_date": hp.end_date,
"description": hp.description
}
for hp in cal_def.holiday_periods
]
# Convert school hours to dict format
school_hours = {
"morning_start": cal_def.school_hours.morning_start,
"morning_end": cal_def.school_hours.morning_end,
"has_afternoon_session": cal_def.school_hours.has_afternoon_session,
"afternoon_start": cal_def.school_hours.afternoon_start,
"afternoon_end": cal_def.school_hours.afternoon_end
}
# Create calendar in database
created_calendar = await repo.create_school_calendar(
city_id=cal_def.city_id,
calendar_name=cal_def.calendar_name,
school_type=cal_def.school_type.value,
academic_year=cal_def.academic_year,
holiday_periods=holiday_periods,
school_hours=school_hours,
source=cal_def.source,
enabled=cal_def.enabled
)
logger.info(
"Calendar seeded successfully",
calendar_id=str(created_calendar.id),
city=cal_def.city_id,
type=cal_def.school_type.value,
year=cal_def.academic_year
)
seeded_count += 1
logger.info(
"School calendar seeding completed",
seeded=seeded_count,
skipped=skipped_count,
total=len(calendars)
)
return True
except Exception as e:
logger.error("Error seeding school calendars", error=str(e))
return False

View File

@@ -16,18 +16,30 @@ logger = structlog.get_logger()
async def main(months: int = 24):
"""Initialize historical data for all enabled cities"""
"""Initialize historical data for all enabled cities and seed calendars"""
logger.info("Starting data initialization job", months=months)
try:
manager = DataIngestionManager()
success = await manager.initialize_all_cities(months=months)
if success:
logger.info("✅ Data initialization completed successfully")
# Initialize weather and traffic data
weather_traffic_success = await manager.initialize_all_cities(months=months)
# Seed school calendars
logger.info("Proceeding to seed school calendars...")
calendar_success = await manager.seed_school_calendars()
# Both must succeed
overall_success = weather_traffic_success and calendar_success
if overall_success:
logger.info("✅ Data initialization completed successfully (weather, traffic, calendars)")
sys.exit(0)
else:
logger.error("❌ Data initialization failed")
if not weather_traffic_success:
logger.error("❌ Weather/traffic initialization failed")
if not calendar_success:
logger.error("❌ Calendar seeding failed")
sys.exit(1)
except Exception as e:

View File

@@ -10,7 +10,7 @@ from app.core.database import database_manager
from app.services.messaging import setup_messaging, cleanup_messaging
from shared.service_base import StandardFastAPIService
# Include routers
from app.api import weather_data, traffic_data, city_operations
from app.api import weather_data, traffic_data, city_operations, calendar_operations, audit
class ExternalService(StandardFastAPIService):
@@ -177,6 +177,9 @@ app = service.create_app()
service.setup_standard_endpoints()
# Include routers
# IMPORTANT: Register audit router FIRST to avoid route matching conflicts
service.add_router(audit.router)
service.add_router(weather_data.router)
service.add_router(traffic_data.router)
service.add_router(city_operations.router) # New v2.0 city-based optimized endpoints
service.add_router(city_operations.router) # New v2.0 city-based optimized endpoints
service.add_router(calendar_operations.router) # School calendars and hyperlocal data

View File

@@ -25,6 +25,7 @@ from .weather import (
from .city_weather import CityWeatherData
from .city_traffic import CityTrafficData
from .calendar import SchoolCalendar, TenantLocationContext
# List all models for easier access
__all__ = [
@@ -38,5 +39,8 @@ __all__ = [
# City-based models (new)
"CityWeatherData",
"CityTrafficData",
# Calendar models (hyperlocal)
"SchoolCalendar",
"TenantLocationContext",
"AuditLog",
]

View File

@@ -0,0 +1,86 @@
# services/external/app/models/calendar.py
"""
School Calendar and Tenant Location Context Models
Hyperlocal data for demand forecasting
"""
from sqlalchemy import Column, String, DateTime, Index, Boolean
from sqlalchemy.dialects.postgresql import UUID, JSONB
from datetime import datetime
import uuid
from app.core.database import Base
class SchoolCalendar(Base):
"""City-based school calendar data for forecasting"""
__tablename__ = "school_calendars"
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
city_id = Column(String(50), nullable=False, index=True)
calendar_name = Column(String(100), nullable=False)
school_type = Column(String(20), nullable=False) # primary, secondary, university
academic_year = Column(String(10), nullable=False) # e.g., "2024-2025"
# Holiday periods as array of date ranges
# Example: [
# {"name": "Christmas", "start": "2024-12-20", "end": "2025-01-08"},
# {"name": "Easter", "start": "2025-04-10", "end": "2025-04-21"},
# {"name": "Summer", "start": "2025-06-23", "end": "2025-09-09"}
# ]
holiday_periods = Column(JSONB, nullable=False, default=list)
# School hours configuration
# Example: {
# "morning_start": "09:00",
# "morning_end": "14:00",
# "afternoon_start": "15:00", # if applicable
# "afternoon_end": "17:00",
# "has_afternoon_session": false
# }
school_hours = Column(JSONB, nullable=False, default=dict)
# Metadata
source = Column(String(100), nullable=True) # e.g., "madrid_education_dept"
enabled = Column(Boolean, default=True, nullable=False)
created_at = Column(DateTime(timezone=True), default=datetime.utcnow)
updated_at = Column(DateTime(timezone=True), default=datetime.utcnow, onupdate=datetime.utcnow)
__table_args__ = (
Index('idx_school_calendar_city_year', 'city_id', 'academic_year'),
Index('idx_school_calendar_city_type', 'city_id', 'school_type'),
)
class TenantLocationContext(Base):
"""Tenant-specific location context for hyperlocal forecasting"""
__tablename__ = "tenant_location_contexts"
tenant_id = Column(UUID(as_uuid=True), primary_key=True)
city_id = Column(String(50), nullable=False, index=True)
# School calendar assignment
school_calendar_id = Column(UUID(as_uuid=True), nullable=True, index=True)
# Hyperlocal context
neighborhood = Column(String(100), nullable=True)
# Custom local events specific to this tenant's location
# Example: [
# {"name": "Neighborhood Festival", "date": "2025-06-15", "impact": "high"},
# {"name": "Local Market Day", "date": "2025-05-20", "impact": "medium"}
# ]
local_events = Column(JSONB, nullable=True, default=list)
# Additional metadata
notes = Column(String(500), nullable=True)
created_at = Column(DateTime(timezone=True), default=datetime.utcnow)
updated_at = Column(DateTime(timezone=True), default=datetime.utcnow, onupdate=datetime.utcnow)
__table_args__ = (
Index('idx_tenant_location_calendar', 'school_calendar_id'),
)

View File

@@ -0,0 +1,377 @@
# services/external/app/registry/calendar_registry.py
"""
Calendar Registry - Pre-configured school calendars and local events
"""
from dataclasses import dataclass
from typing import List, Optional, Dict, Any
from datetime import date
from enum import Enum
class SchoolType(str, Enum):
PRIMARY = "primary"
SECONDARY = "secondary"
UNIVERSITY = "university"
@dataclass
class HolidayPeriod:
"""School holiday period definition"""
name: str
start_date: str # ISO format: "2024-12-20"
end_date: str # ISO format: "2025-01-08"
description: Optional[str] = None
@dataclass
class SchoolHours:
"""School operating hours configuration"""
morning_start: str # "09:00"
morning_end: str # "14:00"
has_afternoon_session: bool # True/False
afternoon_start: Optional[str] = None # "15:00" if has_afternoon_session
afternoon_end: Optional[str] = None # "17:00" if has_afternoon_session
@dataclass
class CalendarDefinition:
"""School calendar configuration for a specific city and school type"""
calendar_id: str
calendar_name: str
city_id: str
school_type: SchoolType
academic_year: str # "2024-2025"
holiday_periods: List[HolidayPeriod]
school_hours: SchoolHours
source: str
enabled: bool = True
class CalendarRegistry:
"""Central registry of school calendars for forecasting"""
# Madrid Primary School Calendar 2024-2025 (Official Comunidad de Madrid - ORDEN 1177/2024)
MADRID_PRIMARY_2024_2025 = CalendarDefinition(
calendar_id="madrid_primary_2024_2025",
calendar_name="Madrid Primary School Calendar 2024-2025",
city_id="madrid",
school_type=SchoolType.PRIMARY,
academic_year="2024-2025",
holiday_periods=[
HolidayPeriod(
name="Christmas Holiday",
start_date="2024-12-21",
end_date="2025-01-07",
description="Official Christmas break - Comunidad de Madrid (Dec 21 - Jan 7)"
),
HolidayPeriod(
name="Easter Holiday (Semana Santa)",
start_date="2025-04-11",
end_date="2025-04-21",
description="Official Easter break - Comunidad de Madrid (Apr 11-21)"
),
HolidayPeriod(
name="Summer Holiday",
start_date="2025-06-21",
end_date="2025-09-08",
description="Summer vacation (Last day Jun 20, classes resume Sep 9)"
),
HolidayPeriod(
name="All Saints Long Weekend",
start_date="2024-10-31",
end_date="2024-11-03",
description="October 31 - November 3 non-working days"
),
HolidayPeriod(
name="February Long Weekend",
start_date="2025-02-28",
end_date="2025-03-03",
description="February 28 - March 3 non-working days"
),
],
school_hours=SchoolHours(
morning_start="09:00",
morning_end="14:00",
has_afternoon_session=False
),
source="comunidad_madrid_orden_1177_2024",
enabled=True
)
# Madrid Secondary School Calendar 2024-2025 (Official Comunidad de Madrid - ORDEN 1177/2024)
MADRID_SECONDARY_2024_2025 = CalendarDefinition(
calendar_id="madrid_secondary_2024_2025",
calendar_name="Madrid Secondary School Calendar 2024-2025",
city_id="madrid",
school_type=SchoolType.SECONDARY,
academic_year="2024-2025",
holiday_periods=[
HolidayPeriod(
name="Christmas Holiday",
start_date="2024-12-21",
end_date="2025-01-07",
description="Official Christmas break - Comunidad de Madrid (Dec 21 - Jan 7)"
),
HolidayPeriod(
name="Easter Holiday (Semana Santa)",
start_date="2025-04-11",
end_date="2025-04-21",
description="Official Easter break - Comunidad de Madrid (Apr 11-21)"
),
HolidayPeriod(
name="Summer Holiday",
start_date="2025-06-21",
end_date="2025-09-09",
description="Summer vacation (Last day Jun 20, classes resume Sep 10)"
),
HolidayPeriod(
name="All Saints Long Weekend",
start_date="2024-10-31",
end_date="2024-11-03",
description="October 31 - November 3 non-working days"
),
HolidayPeriod(
name="February Long Weekend",
start_date="2025-02-28",
end_date="2025-03-03",
description="February 28 - March 3 non-working days"
),
],
school_hours=SchoolHours(
morning_start="09:00",
morning_end="14:00",
has_afternoon_session=False
),
source="comunidad_madrid_orden_1177_2024",
enabled=True
)
# Madrid Primary School Calendar 2025-2026 (Official Comunidad de Madrid - ORDEN 1476/2025)
MADRID_PRIMARY_2025_2026 = CalendarDefinition(
calendar_id="madrid_primary_2025_2026",
calendar_name="Madrid Primary School Calendar 2025-2026",
city_id="madrid",
school_type=SchoolType.PRIMARY,
academic_year="2025-2026",
holiday_periods=[
HolidayPeriod(
name="Christmas Holiday",
start_date="2025-12-20",
end_date="2026-01-07",
description="Official Christmas break - Comunidad de Madrid (Dec 20 - Jan 7)"
),
HolidayPeriod(
name="Easter Holiday (Semana Santa)",
start_date="2026-03-27",
end_date="2026-04-06",
description="Official Easter break - Comunidad de Madrid (Mar 27 - Apr 6)"
),
HolidayPeriod(
name="Summer Holiday",
start_date="2026-06-21",
end_date="2026-09-08",
description="Summer vacation (classes resume Sep 9)"
),
HolidayPeriod(
name="October Long Weekend",
start_date="2025-10-13",
end_date="2025-10-13",
description="October 13 non-working day (after Día de la Hispanidad)"
),
HolidayPeriod(
name="All Saints Long Weekend",
start_date="2025-11-03",
end_date="2025-11-03",
description="November 3 non-working day (after All Saints)"
),
],
school_hours=SchoolHours(
morning_start="09:00",
morning_end="14:00",
has_afternoon_session=False
),
source="comunidad_madrid_orden_1476_2025",
enabled=True
)
# Madrid Secondary School Calendar 2025-2026 (Official Comunidad de Madrid - ORDEN 1476/2025)
MADRID_SECONDARY_2025_2026 = CalendarDefinition(
calendar_id="madrid_secondary_2025_2026",
calendar_name="Madrid Secondary School Calendar 2025-2026",
city_id="madrid",
school_type=SchoolType.SECONDARY,
academic_year="2025-2026",
holiday_periods=[
HolidayPeriod(
name="Christmas Holiday",
start_date="2025-12-20",
end_date="2026-01-07",
description="Official Christmas break - Comunidad de Madrid (Dec 20 - Jan 7)"
),
HolidayPeriod(
name="Easter Holiday (Semana Santa)",
start_date="2026-03-27",
end_date="2026-04-06",
description="Official Easter break - Comunidad de Madrid (Mar 27 - Apr 6)"
),
HolidayPeriod(
name="Summer Holiday",
start_date="2026-06-21",
end_date="2026-09-09",
description="Summer vacation (classes resume Sep 10)"
),
HolidayPeriod(
name="October Long Weekend",
start_date="2025-10-13",
end_date="2025-10-13",
description="October 13 non-working day (after Día de la Hispanidad)"
),
HolidayPeriod(
name="All Saints Long Weekend",
start_date="2025-11-03",
end_date="2025-11-03",
description="November 3 non-working day (after All Saints)"
),
],
school_hours=SchoolHours(
morning_start="09:00",
morning_end="14:00",
has_afternoon_session=False
),
source="comunidad_madrid_orden_1476_2025",
enabled=True
)
# Registry of all calendars
CALENDARS: List[CalendarDefinition] = [
MADRID_PRIMARY_2024_2025,
MADRID_SECONDARY_2024_2025,
MADRID_PRIMARY_2025_2026,
MADRID_SECONDARY_2025_2026,
]
@classmethod
def get_all_calendars(cls) -> List[CalendarDefinition]:
"""Get all calendars"""
return cls.CALENDARS
@classmethod
def get_enabled_calendars(cls) -> List[CalendarDefinition]:
"""Get all enabled calendars"""
return [cal for cal in cls.CALENDARS if cal.enabled]
@classmethod
def get_calendar(cls, calendar_id: str) -> Optional[CalendarDefinition]:
"""Get calendar by ID"""
for cal in cls.CALENDARS:
if cal.calendar_id == calendar_id:
return cal
return None
@classmethod
def get_calendars_for_city(cls, city_id: str) -> List[CalendarDefinition]:
"""Get all enabled calendars for a specific city"""
return [
cal for cal in cls.CALENDARS
if cal.city_id == city_id and cal.enabled
]
@classmethod
def get_calendar_for_city_and_type(
cls,
city_id: str,
school_type: SchoolType,
academic_year: Optional[str] = None
) -> Optional[CalendarDefinition]:
"""Get specific calendar for city, type, and optionally year"""
for cal in cls.CALENDARS:
if (cal.city_id == city_id and
cal.school_type == school_type and
cal.enabled and
(academic_year is None or cal.academic_year == academic_year)):
return cal
return None
@classmethod
def to_dict(cls, calendar: CalendarDefinition) -> Dict[str, Any]:
"""Convert calendar definition to dictionary for JSON serialization"""
return {
"calendar_id": calendar.calendar_id,
"calendar_name": calendar.calendar_name,
"city_id": calendar.city_id,
"school_type": calendar.school_type.value,
"academic_year": calendar.academic_year,
"holiday_periods": [
{
"name": hp.name,
"start_date": hp.start_date,
"end_date": hp.end_date,
"description": hp.description
}
for hp in calendar.holiday_periods
],
"school_hours": {
"morning_start": calendar.school_hours.morning_start,
"morning_end": calendar.school_hours.morning_end,
"has_afternoon_session": calendar.school_hours.has_afternoon_session,
"afternoon_start": calendar.school_hours.afternoon_start,
"afternoon_end": calendar.school_hours.afternoon_end,
},
"source": calendar.source,
"enabled": calendar.enabled
}
# Local Events Registry for Madrid
@dataclass
class LocalEventDefinition:
"""Local event that impacts demand"""
event_id: str
name: str
city_id: str
date: str # ISO format or "annual-MM-DD" for recurring
impact_level: str # "low", "medium", "high"
description: Optional[str] = None
recurring: bool = False # True for annual events
class LocalEventsRegistry:
"""Registry of local events and festivals"""
MADRID_EVENTS = [
LocalEventDefinition(
event_id="madrid_san_isidro",
name="San Isidro Festival",
city_id="madrid",
date="annual-05-15",
impact_level="high",
description="Madrid's patron saint festival - major citywide celebration",
recurring=True
),
LocalEventDefinition(
event_id="madrid_dos_de_mayo",
name="Dos de Mayo",
city_id="madrid",
date="annual-05-02",
impact_level="medium",
description="Madrid regional holiday",
recurring=True
),
LocalEventDefinition(
event_id="madrid_almudena",
name="Virgen de la Almudena",
city_id="madrid",
date="annual-11-09",
impact_level="medium",
description="Madrid patron saint day",
recurring=True
),
]
@classmethod
def get_events_for_city(cls, city_id: str) -> List[LocalEventDefinition]:
"""Get all local events for a city"""
if city_id == "madrid":
return cls.MADRID_EVENTS
return []

View File

@@ -0,0 +1,329 @@
# services/external/app/repositories/calendar_repository.py
"""
Calendar Repository - Manages school calendars and tenant location contexts
"""
from typing import List, Dict, Any, Optional
from datetime import datetime
from sqlalchemy import select, and_, or_
from sqlalchemy.ext.asyncio import AsyncSession
import structlog
import uuid
from app.models.calendar import SchoolCalendar, TenantLocationContext
logger = structlog.get_logger()
class CalendarRepository:
"""Repository for school calendar and tenant location data"""
def __init__(self, session: AsyncSession):
self.session = session
# ===== School Calendar Operations =====
async def create_school_calendar(
self,
city_id: str,
calendar_name: str,
school_type: str,
academic_year: str,
holiday_periods: List[Dict[str, Any]],
school_hours: Dict[str, Any],
source: Optional[str] = None,
enabled: bool = True
) -> SchoolCalendar:
"""Create a new school calendar"""
try:
calendar = SchoolCalendar(
id=uuid.uuid4(),
city_id=city_id,
calendar_name=calendar_name,
school_type=school_type,
academic_year=academic_year,
holiday_periods=holiday_periods,
school_hours=school_hours,
source=source,
enabled=enabled
)
self.session.add(calendar)
await self.session.commit()
await self.session.refresh(calendar)
logger.info(
"School calendar created",
calendar_id=str(calendar.id),
city_id=city_id,
school_type=school_type
)
return calendar
except Exception as e:
await self.session.rollback()
logger.error(
"Error creating school calendar",
city_id=city_id,
error=str(e)
)
raise
async def get_calendar_by_id(
self,
calendar_id: uuid.UUID
) -> Optional[SchoolCalendar]:
"""Get school calendar by ID"""
stmt = select(SchoolCalendar).where(SchoolCalendar.id == calendar_id)
result = await self.session.execute(stmt)
return result.scalar_one_or_none()
async def get_calendars_by_city(
self,
city_id: str,
enabled_only: bool = True
) -> List[SchoolCalendar]:
"""Get all school calendars for a city"""
stmt = select(SchoolCalendar).where(SchoolCalendar.city_id == city_id)
if enabled_only:
stmt = stmt.where(SchoolCalendar.enabled == True)
stmt = stmt.order_by(SchoolCalendar.academic_year.desc(), SchoolCalendar.school_type)
result = await self.session.execute(stmt)
return list(result.scalars().all())
async def get_calendar_by_city_type_year(
self,
city_id: str,
school_type: str,
academic_year: str
) -> Optional[SchoolCalendar]:
"""Get specific calendar by city, type, and year"""
stmt = select(SchoolCalendar).where(
and_(
SchoolCalendar.city_id == city_id,
SchoolCalendar.school_type == school_type,
SchoolCalendar.academic_year == academic_year,
SchoolCalendar.enabled == True
)
)
result = await self.session.execute(stmt)
return result.scalar_one_or_none()
async def update_calendar(
self,
calendar_id: uuid.UUID,
**kwargs
) -> Optional[SchoolCalendar]:
"""Update school calendar"""
try:
calendar = await self.get_calendar_by_id(calendar_id)
if not calendar:
return None
for key, value in kwargs.items():
if hasattr(calendar, key):
setattr(calendar, key, value)
calendar.updated_at = datetime.utcnow()
await self.session.commit()
await self.session.refresh(calendar)
logger.info(
"School calendar updated",
calendar_id=str(calendar_id),
fields=list(kwargs.keys())
)
return calendar
except Exception as e:
await self.session.rollback()
logger.error(
"Error updating school calendar",
calendar_id=str(calendar_id),
error=str(e)
)
raise
async def delete_calendar(self, calendar_id: uuid.UUID) -> bool:
"""Delete school calendar"""
try:
calendar = await self.get_calendar_by_id(calendar_id)
if not calendar:
return False
await self.session.delete(calendar)
await self.session.commit()
logger.info("School calendar deleted", calendar_id=str(calendar_id))
return True
except Exception as e:
await self.session.rollback()
logger.error(
"Error deleting school calendar",
calendar_id=str(calendar_id),
error=str(e)
)
raise
# ===== Tenant Location Context Operations =====
async def create_or_update_tenant_location_context(
self,
tenant_id: uuid.UUID,
city_id: str,
school_calendar_id: Optional[uuid.UUID] = None,
neighborhood: Optional[str] = None,
local_events: Optional[List[Dict[str, Any]]] = None,
notes: Optional[str] = None
) -> TenantLocationContext:
"""Create or update tenant location context"""
try:
# Check if context exists
existing = await self.get_tenant_location_context(tenant_id)
if existing:
# Update existing
existing.city_id = city_id
if school_calendar_id is not None:
existing.school_calendar_id = school_calendar_id
if neighborhood is not None:
existing.neighborhood = neighborhood
if local_events is not None:
existing.local_events = local_events
if notes is not None:
existing.notes = notes
existing.updated_at = datetime.utcnow()
await self.session.commit()
await self.session.refresh(existing)
logger.info(
"Tenant location context updated",
tenant_id=str(tenant_id)
)
return existing
else:
# Create new
context = TenantLocationContext(
tenant_id=tenant_id,
city_id=city_id,
school_calendar_id=school_calendar_id,
neighborhood=neighborhood,
local_events=local_events or [],
notes=notes
)
self.session.add(context)
await self.session.commit()
await self.session.refresh(context)
logger.info(
"Tenant location context created",
tenant_id=str(tenant_id),
city_id=city_id
)
return context
except Exception as e:
await self.session.rollback()
logger.error(
"Error creating/updating tenant location context",
tenant_id=str(tenant_id),
error=str(e)
)
raise
async def get_tenant_location_context(
self,
tenant_id: uuid.UUID
) -> Optional[TenantLocationContext]:
"""Get tenant location context"""
stmt = select(TenantLocationContext).where(
TenantLocationContext.tenant_id == tenant_id
)
result = await self.session.execute(stmt)
return result.scalar_one_or_none()
async def get_tenant_with_calendar(
self,
tenant_id: uuid.UUID
) -> Optional[Dict[str, Any]]:
"""Get tenant location context with full calendar details"""
context = await self.get_tenant_location_context(tenant_id)
if not context:
return None
result = {
"tenant_id": str(context.tenant_id),
"city_id": context.city_id,
"neighborhood": context.neighborhood,
"local_events": context.local_events,
"notes": context.notes,
"calendar": None
}
if context.school_calendar_id:
calendar = await self.get_calendar_by_id(context.school_calendar_id)
if calendar:
result["calendar"] = {
"calendar_id": str(calendar.id),
"calendar_name": calendar.calendar_name,
"school_type": calendar.school_type,
"academic_year": calendar.academic_year,
"holiday_periods": calendar.holiday_periods,
"school_hours": calendar.school_hours,
"source": calendar.source
}
return result
async def delete_tenant_location_context(
self,
tenant_id: uuid.UUID
) -> bool:
"""Delete tenant location context"""
try:
context = await self.get_tenant_location_context(tenant_id)
if not context:
return False
await self.session.delete(context)
await self.session.commit()
logger.info(
"Tenant location context deleted",
tenant_id=str(tenant_id)
)
return True
except Exception as e:
await self.session.rollback()
logger.error(
"Error deleting tenant location context",
tenant_id=str(tenant_id),
error=str(e)
)
raise
# ===== Helper Methods =====
async def get_all_tenants_for_calendar(
self,
calendar_id: uuid.UUID
) -> List[TenantLocationContext]:
"""Get all tenants using a specific calendar"""
stmt = select(TenantLocationContext).where(
TenantLocationContext.school_calendar_id == calendar_id
)
result = await self.session.execute(stmt)
return list(result.scalars().all())

View File

@@ -0,0 +1,134 @@
# services/external/app/schemas/calendar.py
"""
Calendar Schemas - Request/Response types for school calendars and location context
"""
from pydantic import BaseModel, Field
from typing import Optional, List, Dict, Any
from uuid import UUID
class SchoolCalendarResponse(BaseModel):
"""School calendar information"""
calendar_id: str
calendar_name: str
city_id: str
school_type: str
academic_year: str
holiday_periods: List[Dict[str, Any]]
school_hours: Dict[str, Any]
source: Optional[str] = None
enabled: bool = True
class Config:
json_schema_extra = {
"example": {
"calendar_id": "madrid_primary_2024_2025",
"calendar_name": "Madrid Primary School Calendar 2024-2025",
"city_id": "madrid",
"school_type": "primary",
"academic_year": "2024-2025",
"holiday_periods": [
{
"name": "Christmas Holiday",
"start_date": "2024-12-23",
"end_date": "2025-01-07",
"description": "Christmas and New Year break"
}
],
"school_hours": {
"morning_start": "09:00",
"morning_end": "14:00",
"has_afternoon_session": False
},
"source": "madrid_education_dept_2024",
"enabled": True
}
}
class SchoolCalendarListResponse(BaseModel):
"""List of school calendars for a city"""
city_id: str
calendars: List[SchoolCalendarResponse]
total: int
class CalendarCheckResponse(BaseModel):
"""Response for holiday check"""
date: str = Field(..., description="Date checked (ISO format)")
is_holiday: bool = Field(..., description="Whether the date is a school holiday")
holiday_name: Optional[str] = Field(None, description="Name of the holiday if applicable")
calendar_id: str
calendar_name: str
class TenantLocationContextResponse(BaseModel):
"""Tenant location context with calendar details"""
tenant_id: str
city_id: str
neighborhood: Optional[str] = None
local_events: Optional[List[Dict[str, Any]]] = None
notes: Optional[str] = None
calendar: Optional[Dict[str, Any]] = Field(
None,
description="Full calendar details if assigned"
)
class Config:
json_schema_extra = {
"example": {
"tenant_id": "fbffcf18-d02a-4104-b6e3-0b32006e3e47",
"city_id": "madrid",
"neighborhood": "Chamberí",
"local_events": [
{
"name": "Neighborhood Festival",
"date": "2025-06-15",
"impact": "high"
}
],
"notes": "Bakery near primary school",
"calendar": {
"calendar_id": "uuid",
"calendar_name": "Madrid Primary School Calendar 2024-2025",
"school_type": "primary",
"academic_year": "2024-2025",
"holiday_periods": [],
"school_hours": {},
"source": "madrid_education_dept_2024"
}
}
}
class TenantLocationContextCreateRequest(BaseModel):
"""Request to create/update tenant location context"""
city_id: str = Field(..., description="City ID (e.g., 'madrid')")
school_calendar_id: Optional[UUID] = Field(
None,
description="School calendar ID to assign"
)
neighborhood: Optional[str] = Field(None, description="Neighborhood name")
local_events: Optional[List[Dict[str, Any]]] = Field(
None,
description="Local events specific to this location"
)
notes: Optional[str] = Field(None, description="Additional notes")
class Config:
json_schema_extra = {
"example": {
"city_id": "madrid",
"school_calendar_id": "123e4567-e89b-12d3-a456-426614174000",
"neighborhood": "Chamberí",
"local_events": [
{
"name": "Local Market Day",
"date": "2025-05-20",
"impact": "medium"
}
],
"notes": "Bakery located near primary school entrance"
}
}

View File

@@ -0,0 +1,69 @@
"""add_school_calendars_and_location_context
Revision ID: 693e0d98eaf9
Revises: b97bab14ac47
Create Date: 2025-11-02 08:56:45.463138+01:00
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision: str = '693e0d98eaf9'
down_revision: Union[str, None] = 'b97bab14ac47'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# Create school_calendars table
op.create_table(
'school_calendars',
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('city_id', sa.String(length=50), nullable=False),
sa.Column('calendar_name', sa.String(length=100), nullable=False),
sa.Column('school_type', sa.String(length=20), nullable=False),
sa.Column('academic_year', sa.String(length=10), nullable=False),
sa.Column('holiday_periods', postgresql.JSONB(astext_type=sa.Text()), nullable=False),
sa.Column('school_hours', postgresql.JSONB(astext_type=sa.Text()), nullable=False),
sa.Column('source', sa.String(length=100), nullable=True),
sa.Column('enabled', sa.Boolean(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_school_calendar_city_year', 'school_calendars', ['city_id', 'academic_year'], unique=False)
op.create_index('idx_school_calendar_city_type', 'school_calendars', ['city_id', 'school_type'], unique=False)
op.create_index(op.f('ix_school_calendars_city_id'), 'school_calendars', ['city_id'], unique=False)
# Create tenant_location_contexts table
op.create_table(
'tenant_location_contexts',
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('city_id', sa.String(length=50), nullable=False),
sa.Column('school_calendar_id', postgresql.UUID(as_uuid=True), nullable=True),
sa.Column('neighborhood', sa.String(length=100), nullable=True),
sa.Column('local_events', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('notes', sa.String(length=500), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('tenant_id')
)
op.create_index('idx_tenant_location_calendar', 'tenant_location_contexts', ['school_calendar_id'], unique=False)
op.create_index(op.f('ix_tenant_location_contexts_city_id'), 'tenant_location_contexts', ['city_id'], unique=False)
def downgrade() -> None:
# Drop tenant_location_contexts table
op.drop_index(op.f('ix_tenant_location_contexts_city_id'), table_name='tenant_location_contexts')
op.drop_index('idx_tenant_location_calendar', table_name='tenant_location_contexts')
op.drop_table('tenant_location_contexts')
# Drop school_calendars table
op.drop_index(op.f('ix_school_calendars_city_id'), table_name='school_calendars')
op.drop_index('idx_school_calendar_city_type', table_name='school_calendars')
op.drop_index('idx_school_calendar_city_year', table_name='school_calendars')
op.drop_table('school_calendars')

View File

@@ -0,0 +1,119 @@
#!/usr/bin/env python3
"""
Seed School Calendars Script
Loads school calendars from CalendarRegistry into the database
"""
import asyncio
import sys
import os
# Add parent directory to path to allow imports
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from app.core.database import database_manager
from app.repositories.calendar_repository import CalendarRepository
from app.registry.calendar_registry import CalendarRegistry
import structlog
logger = structlog.get_logger()
async def seed_calendars():
"""Seed school calendars from registry into database"""
logger.info("Starting school calendar seeding...")
# Get all calendars from registry
calendars = CalendarRegistry.get_all_calendars()
logger.info(f"Found {len(calendars)} calendars in registry")
# Initialize database
await database_manager.initialize()
try:
async with database_manager.get_session() as session:
repo = CalendarRepository(session)
seeded_count = 0
skipped_count = 0
for cal_def in calendars:
logger.info(
"Processing calendar",
calendar_id=cal_def.calendar_id,
city=cal_def.city_id,
type=cal_def.school_type.value
)
# Check if calendar already exists
existing = await repo.get_calendar_by_city_type_year(
city_id=cal_def.city_id,
school_type=cal_def.school_type.value,
academic_year=cal_def.academic_year
)
if existing:
logger.info(
"Calendar already exists, skipping",
calendar_id=cal_def.calendar_id
)
skipped_count += 1
continue
# Convert holiday periods to dict format
holiday_periods = [
{
"name": hp.name,
"start_date": hp.start_date,
"end_date": hp.end_date,
"description": hp.description
}
for hp in cal_def.holiday_periods
]
# Convert school hours to dict format
school_hours = {
"morning_start": cal_def.school_hours.morning_start,
"morning_end": cal_def.school_hours.morning_end,
"has_afternoon_session": cal_def.school_hours.has_afternoon_session,
"afternoon_start": cal_def.school_hours.afternoon_start,
"afternoon_end": cal_def.school_hours.afternoon_end
}
# Create calendar in database
created_calendar = await repo.create_school_calendar(
city_id=cal_def.city_id,
calendar_name=cal_def.calendar_name,
school_type=cal_def.school_type.value,
academic_year=cal_def.academic_year,
holiday_periods=holiday_periods,
school_hours=school_hours,
source=cal_def.source,
enabled=cal_def.enabled
)
logger.info(
"Calendar seeded successfully",
calendar_id=str(created_calendar.id),
city=cal_def.city_id,
type=cal_def.school_type.value
)
seeded_count += 1
logger.info(
"Calendar seeding completed",
seeded=seeded_count,
skipped=skipped_count,
total=len(calendars)
)
except Exception as e:
logger.error("Error seeding calendars", error=str(e))
raise
finally:
await database_manager.close()
if __name__ == "__main__":
asyncio.run(seed_calendars())

View File

@@ -0,0 +1,237 @@
# services/forecasting/app/api/audit.py
"""
Audit Logs API - Retrieve audit trail for forecasting service
"""
from fastapi import APIRouter, Depends, HTTPException, Query, Path, status
from typing import Optional, Dict, Any
from uuid import UUID
from datetime import datetime
import structlog
from sqlalchemy import select, func, and_
from sqlalchemy.ext.asyncio import AsyncSession
from app.models import AuditLog
from shared.auth.decorators import get_current_user_dep
from shared.auth.access_control import require_user_role
from shared.routing import RouteBuilder
from shared.models.audit_log_schemas import (
AuditLogResponse,
AuditLogListResponse,
AuditLogStatsResponse
)
from app.core.database import database_manager
route_builder = RouteBuilder('forecasting')
router = APIRouter(tags=["audit-logs"])
logger = structlog.get_logger()
async def get_db():
"""Database session dependency"""
async with database_manager.get_session() as session:
yield session
@router.get(
route_builder.build_base_route("audit-logs"),
response_model=AuditLogListResponse
)
@require_user_role(['admin', 'owner'])
async def get_audit_logs(
tenant_id: UUID = Path(..., description="Tenant ID"),
start_date: Optional[datetime] = Query(None, description="Filter logs from this date"),
end_date: Optional[datetime] = Query(None, description="Filter logs until this date"),
user_id: Optional[UUID] = Query(None, description="Filter by user ID"),
action: Optional[str] = Query(None, description="Filter by action type"),
resource_type: Optional[str] = Query(None, description="Filter by resource type"),
severity: Optional[str] = Query(None, description="Filter by severity level"),
search: Optional[str] = Query(None, description="Search in description field"),
limit: int = Query(100, ge=1, le=1000, description="Number of records to return"),
offset: int = Query(0, ge=0, description="Number of records to skip"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Get audit logs for forecasting service.
Requires admin or owner role.
"""
try:
logger.info(
"Retrieving audit logs",
tenant_id=tenant_id,
user_id=current_user.get("user_id"),
filters={
"start_date": start_date,
"end_date": end_date,
"action": action,
"resource_type": resource_type,
"severity": severity
}
)
# Build query filters
filters = [AuditLog.tenant_id == tenant_id]
if start_date:
filters.append(AuditLog.created_at >= start_date)
if end_date:
filters.append(AuditLog.created_at <= end_date)
if user_id:
filters.append(AuditLog.user_id == user_id)
if action:
filters.append(AuditLog.action == action)
if resource_type:
filters.append(AuditLog.resource_type == resource_type)
if severity:
filters.append(AuditLog.severity == severity)
if search:
filters.append(AuditLog.description.ilike(f"%{search}%"))
# Count total matching records
count_query = select(func.count()).select_from(AuditLog).where(and_(*filters))
total_result = await db.execute(count_query)
total = total_result.scalar() or 0
# Fetch paginated results
query = (
select(AuditLog)
.where(and_(*filters))
.order_by(AuditLog.created_at.desc())
.limit(limit)
.offset(offset)
)
result = await db.execute(query)
audit_logs = result.scalars().all()
# Convert to response models
items = [AuditLogResponse.from_orm(log) for log in audit_logs]
logger.info(
"Successfully retrieved audit logs",
tenant_id=tenant_id,
total=total,
returned=len(items)
)
return AuditLogListResponse(
items=items,
total=total,
limit=limit,
offset=offset,
has_more=(offset + len(items)) < total
)
except Exception as e:
logger.error(
"Failed to retrieve audit logs",
error=str(e),
tenant_id=tenant_id
)
raise HTTPException(
status_code=500,
detail=f"Failed to retrieve audit logs: {str(e)}"
)
@router.get(
route_builder.build_base_route("audit-logs/stats"),
response_model=AuditLogStatsResponse
)
@require_user_role(['admin', 'owner'])
async def get_audit_log_stats(
tenant_id: UUID = Path(..., description="Tenant ID"),
start_date: Optional[datetime] = Query(None, description="Filter logs from this date"),
end_date: Optional[datetime] = Query(None, description="Filter logs until this date"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Get audit log statistics for forecasting service.
Requires admin or owner role.
"""
try:
logger.info(
"Retrieving audit log statistics",
tenant_id=tenant_id,
user_id=current_user.get("user_id")
)
# Build base filters
filters = [AuditLog.tenant_id == tenant_id]
if start_date:
filters.append(AuditLog.created_at >= start_date)
if end_date:
filters.append(AuditLog.created_at <= end_date)
# Total events
count_query = select(func.count()).select_from(AuditLog).where(and_(*filters))
total_result = await db.execute(count_query)
total_events = total_result.scalar() or 0
# Events by action
action_query = (
select(AuditLog.action, func.count().label('count'))
.where(and_(*filters))
.group_by(AuditLog.action)
)
action_result = await db.execute(action_query)
events_by_action = {row.action: row.count for row in action_result}
# Events by severity
severity_query = (
select(AuditLog.severity, func.count().label('count'))
.where(and_(*filters))
.group_by(AuditLog.severity)
)
severity_result = await db.execute(severity_query)
events_by_severity = {row.severity: row.count for row in severity_result}
# Events by resource type
resource_query = (
select(AuditLog.resource_type, func.count().label('count'))
.where(and_(*filters))
.group_by(AuditLog.resource_type)
)
resource_result = await db.execute(resource_query)
events_by_resource_type = {row.resource_type: row.count for row in resource_result}
# Date range
date_range_query = (
select(
func.min(AuditLog.created_at).label('min_date'),
func.max(AuditLog.created_at).label('max_date')
)
.where(and_(*filters))
)
date_result = await db.execute(date_range_query)
date_row = date_result.one()
logger.info(
"Successfully retrieved audit log statistics",
tenant_id=tenant_id,
total_events=total_events
)
return AuditLogStatsResponse(
total_events=total_events,
events_by_action=events_by_action,
events_by_severity=events_by_severity,
events_by_resource_type=events_by_resource_type,
date_range={
"min": date_row.min_date,
"max": date_row.max_date
}
)
except Exception as e:
logger.error(
"Failed to retrieve audit log statistics",
error=str(e),
tenant_id=tenant_id
)
raise HTTPException(
status_code=500,
detail=f"Failed to retrieve audit log statistics: {str(e)}"
)

View File

@@ -15,7 +15,7 @@ from app.services.forecasting_alert_service import ForecastingAlertService
from shared.service_base import StandardFastAPIService
# Import API routers
from app.api import forecasts, forecasting_operations, analytics, scenario_operations, internal_demo
from app.api import forecasts, forecasting_operations, analytics, scenario_operations, internal_demo, audit
class ForecastingService(StandardFastAPIService):
@@ -163,6 +163,8 @@ service.setup_standard_endpoints()
service.setup_custom_endpoints()
# Include API routers
# IMPORTANT: Register audit router FIRST to avoid route matching conflicts
service.add_router(audit.router)
service.add_router(forecasts.router)
service.add_router(forecasting_operations.router)
service.add_router(analytics.router)

View File

@@ -0,0 +1,235 @@
"""
Calendar-based Feature Engineering for Forecasting Service
Generates calendar features for future date predictions
"""
import pandas as pd
import structlog
from typing import Dict, List, Any, Optional
from datetime import datetime, date, time, timedelta
from app.services.data_client import data_client
logger = structlog.get_logger()
class ForecastCalendarFeatures:
"""
Generates calendar-based features for future predictions
Optimized for forecasting service (future dates only)
"""
def __init__(self):
self.calendar_cache = {} # Cache calendar data per tenant
async def get_calendar_for_tenant(
self,
tenant_id: str
) -> Optional[Dict[str, Any]]:
"""Get cached calendar for tenant"""
if tenant_id in self.calendar_cache:
return self.calendar_cache[tenant_id]
calendar = await data_client.fetch_tenant_calendar(tenant_id)
if calendar:
self.calendar_cache[tenant_id] = calendar
return calendar
def _is_date_in_holiday_period(
self,
check_date: date,
holiday_periods: List[Dict[str, Any]]
) -> tuple[bool, Optional[str]]:
"""Check if date is within any holiday period"""
for period in holiday_periods:
start = datetime.strptime(period["start_date"], "%Y-%m-%d").date()
end = datetime.strptime(period["end_date"], "%Y-%m-%d").date()
if start <= check_date <= end:
return True, period["name"]
return False, None
def _is_school_hours_active(
self,
check_datetime: datetime,
school_hours: Dict[str, Any]
) -> bool:
"""Check if datetime falls during school operating hours"""
# Only weekdays
if check_datetime.weekday() >= 5:
return False
check_time = check_datetime.time()
# Morning session
morning_start = datetime.strptime(
school_hours["morning_start"], "%H:%M"
).time()
morning_end = datetime.strptime(
school_hours["morning_end"], "%H:%M"
).time()
if morning_start <= check_time <= morning_end:
return True
# Afternoon session if exists
if school_hours.get("has_afternoon_session", False):
afternoon_start = datetime.strptime(
school_hours["afternoon_start"], "%H:%M"
).time()
afternoon_end = datetime.strptime(
school_hours["afternoon_end"], "%H:%M"
).time()
if afternoon_start <= check_time <= afternoon_end:
return True
return False
def _calculate_school_proximity_intensity(
self,
check_datetime: datetime,
school_hours: Dict[str, Any]
) -> float:
"""
Calculate school proximity impact intensity
Returns 0.0-1.0 based on drop-off/pick-up times
"""
# Only weekdays
if check_datetime.weekday() >= 5:
return 0.0
check_time = check_datetime.time()
morning_start = datetime.strptime(
school_hours["morning_start"], "%H:%M"
).time()
morning_end = datetime.strptime(
school_hours["morning_end"], "%H:%M"
).time()
# Morning drop-off peak (30 min before to 15 min after start)
drop_off_start = (
datetime.combine(date.today(), morning_start) - timedelta(minutes=30)
).time()
drop_off_end = (
datetime.combine(date.today(), morning_start) + timedelta(minutes=15)
).time()
if drop_off_start <= check_time <= drop_off_end:
return 1.0 # Peak
# Morning pick-up peak (15 min before to 30 min after end)
pickup_start = (
datetime.combine(date.today(), morning_end) - timedelta(minutes=15)
).time()
pickup_end = (
datetime.combine(date.today(), morning_end) + timedelta(minutes=30)
).time()
if pickup_start <= check_time <= pickup_end:
return 1.0 # Peak
# During school hours (moderate)
if morning_start <= check_time <= morning_end:
return 0.3
return 0.0
async def add_calendar_features(
self,
df: pd.DataFrame,
tenant_id: str,
date_column: str = "ds"
) -> pd.DataFrame:
"""
Add calendar features to forecast dataframe
Args:
df: Forecast dataframe with future dates
tenant_id: Tenant ID to fetch calendar
date_column: Name of date column (default 'ds' for Prophet)
Returns:
DataFrame with calendar features added
"""
try:
logger.info(
"Adding calendar features to forecast",
tenant_id=tenant_id,
rows=len(df)
)
# Get calendar
calendar = await self.get_calendar_for_tenant(tenant_id)
if not calendar:
logger.info(
"No calendar available, using zero features",
tenant_id=tenant_id
)
df["is_school_holiday"] = 0
df["school_hours_active"] = 0
df["school_proximity_intensity"] = 0.0
return df
holiday_periods = calendar.get("holiday_periods", [])
school_hours = calendar.get("school_hours", {})
# Initialize feature lists
school_holidays = []
hours_active = []
proximity_intensity = []
# Process each row
for idx, row in df.iterrows():
row_date = pd.to_datetime(row[date_column])
# Check holiday
is_holiday, _ = self._is_date_in_holiday_period(
row_date.date(),
holiday_periods
)
school_holidays.append(1 if is_holiday else 0)
# Check school hours and proximity (if datetime has time component)
if hasattr(row_date, 'hour'):
hours_active.append(
1 if self._is_school_hours_active(row_date, school_hours) else 0
)
proximity_intensity.append(
self._calculate_school_proximity_intensity(row_date, school_hours)
)
else:
hours_active.append(0)
proximity_intensity.append(0.0)
# Add features
df["is_school_holiday"] = school_holidays
df["school_hours_active"] = hours_active
df["school_proximity_intensity"] = proximity_intensity
logger.info(
"Calendar features added to forecast",
tenant_id=tenant_id,
holidays_in_forecast=sum(school_holidays)
)
return df
except Exception as e:
logger.error(
"Error adding calendar features to forecast",
tenant_id=tenant_id,
error=str(e)
)
# Return with zero features on error
df["is_school_holiday"] = 0
df["school_hours_active"] = 0
df["school_proximity_intensity"] = 0.0
return df
# Global instance
forecast_calendar_features = ForecastCalendarFeatures()

View File

@@ -61,5 +61,72 @@ class DataClient:
logger.error(f"Error fetching weather data: {e}", tenant_id=tenant_id)
return []
async def fetch_tenant_calendar(
self,
tenant_id: str
) -> Optional[Dict[str, Any]]:
"""
Fetch tenant's assigned school calendar
Returns None if no calendar assigned
"""
try:
location_context = await self.external_client.get_tenant_location_context(
tenant_id=tenant_id
)
if location_context and location_context.get("calendar"):
logger.info(
"Fetched calendar for tenant",
tenant_id=tenant_id,
calendar_name=location_context["calendar"].get("calendar_name")
)
return location_context["calendar"]
else:
logger.info("No calendar assigned to tenant", tenant_id=tenant_id)
return None
except Exception as e:
logger.error(f"Error fetching calendar: {e}", tenant_id=tenant_id)
return None
async def check_school_holiday(
self,
calendar_id: str,
check_date: str,
tenant_id: str
) -> bool:
"""
Check if a date is a school holiday
Args:
calendar_id: School calendar UUID
check_date: Date in ISO format (YYYY-MM-DD)
tenant_id: Tenant ID for auth
Returns:
True if school holiday, False otherwise
"""
try:
result = await self.external_client.check_is_school_holiday(
calendar_id=calendar_id,
check_date=check_date,
tenant_id=tenant_id
)
if result:
is_holiday = result.get("is_holiday", False)
if is_holiday:
logger.debug(
"School holiday detected",
date=check_date,
holiday_name=result.get("holiday_name")
)
return is_holiday
return False
except Exception as e:
logger.error(f"Error checking school holiday: {e}", date=check_date)
return False
# Global instance - same as before, but much simpler implementation
data_client = DataClient()

View File

@@ -0,0 +1,237 @@
# services/inventory/app/api/audit.py
"""
Audit Logs API - Retrieve audit trail for inventory service
"""
from fastapi import APIRouter, Depends, HTTPException, Query, Path, status
from typing import Optional, Dict, Any
from uuid import UUID
from datetime import datetime
import structlog
from sqlalchemy import select, func, and_
from sqlalchemy.ext.asyncio import AsyncSession
from app.models import AuditLog
from shared.auth.decorators import get_current_user_dep
from shared.auth.access_control import require_user_role
from shared.routing import RouteBuilder
from shared.models.audit_log_schemas import (
AuditLogResponse,
AuditLogListResponse,
AuditLogStatsResponse
)
from app.core.database import database_manager
route_builder = RouteBuilder('inventory')
router = APIRouter(tags=["audit-logs"])
logger = structlog.get_logger()
async def get_db():
"""Database session dependency"""
async with database_manager.get_session() as session:
yield session
@router.get(
route_builder.build_base_route("audit-logs"),
response_model=AuditLogListResponse
)
@require_user_role(['admin', 'owner'])
async def get_audit_logs(
tenant_id: UUID = Path(..., description="Tenant ID"),
start_date: Optional[datetime] = Query(None, description="Filter logs from this date"),
end_date: Optional[datetime] = Query(None, description="Filter logs until this date"),
user_id: Optional[UUID] = Query(None, description="Filter by user ID"),
action: Optional[str] = Query(None, description="Filter by action type"),
resource_type: Optional[str] = Query(None, description="Filter by resource type"),
severity: Optional[str] = Query(None, description="Filter by severity level"),
search: Optional[str] = Query(None, description="Search in description field"),
limit: int = Query(100, ge=1, le=1000, description="Number of records to return"),
offset: int = Query(0, ge=0, description="Number of records to skip"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Get audit logs for inventory service.
Requires admin or owner role.
"""
try:
logger.info(
"Retrieving audit logs",
tenant_id=tenant_id,
user_id=current_user.get("user_id"),
filters={
"start_date": start_date,
"end_date": end_date,
"action": action,
"resource_type": resource_type,
"severity": severity
}
)
# Build query filters
filters = [AuditLog.tenant_id == tenant_id]
if start_date:
filters.append(AuditLog.created_at >= start_date)
if end_date:
filters.append(AuditLog.created_at <= end_date)
if user_id:
filters.append(AuditLog.user_id == user_id)
if action:
filters.append(AuditLog.action == action)
if resource_type:
filters.append(AuditLog.resource_type == resource_type)
if severity:
filters.append(AuditLog.severity == severity)
if search:
filters.append(AuditLog.description.ilike(f"%{search}%"))
# Count total matching records
count_query = select(func.count()).select_from(AuditLog).where(and_(*filters))
total_result = await db.execute(count_query)
total = total_result.scalar() or 0
# Fetch paginated results
query = (
select(AuditLog)
.where(and_(*filters))
.order_by(AuditLog.created_at.desc())
.limit(limit)
.offset(offset)
)
result = await db.execute(query)
audit_logs = result.scalars().all()
# Convert to response models
items = [AuditLogResponse.from_orm(log) for log in audit_logs]
logger.info(
"Successfully retrieved audit logs",
tenant_id=tenant_id,
total=total,
returned=len(items)
)
return AuditLogListResponse(
items=items,
total=total,
limit=limit,
offset=offset,
has_more=(offset + len(items)) < total
)
except Exception as e:
logger.error(
"Failed to retrieve audit logs",
error=str(e),
tenant_id=tenant_id
)
raise HTTPException(
status_code=500,
detail=f"Failed to retrieve audit logs: {str(e)}"
)
@router.get(
route_builder.build_base_route("audit-logs/stats"),
response_model=AuditLogStatsResponse
)
@require_user_role(['admin', 'owner'])
async def get_audit_log_stats(
tenant_id: UUID = Path(..., description="Tenant ID"),
start_date: Optional[datetime] = Query(None, description="Filter logs from this date"),
end_date: Optional[datetime] = Query(None, description="Filter logs until this date"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Get audit log statistics for inventory service.
Requires admin or owner role.
"""
try:
logger.info(
"Retrieving audit log statistics",
tenant_id=tenant_id,
user_id=current_user.get("user_id")
)
# Build base filters
filters = [AuditLog.tenant_id == tenant_id]
if start_date:
filters.append(AuditLog.created_at >= start_date)
if end_date:
filters.append(AuditLog.created_at <= end_date)
# Total events
count_query = select(func.count()).select_from(AuditLog).where(and_(*filters))
total_result = await db.execute(count_query)
total_events = total_result.scalar() or 0
# Events by action
action_query = (
select(AuditLog.action, func.count().label('count'))
.where(and_(*filters))
.group_by(AuditLog.action)
)
action_result = await db.execute(action_query)
events_by_action = {row.action: row.count for row in action_result}
# Events by severity
severity_query = (
select(AuditLog.severity, func.count().label('count'))
.where(and_(*filters))
.group_by(AuditLog.severity)
)
severity_result = await db.execute(severity_query)
events_by_severity = {row.severity: row.count for row in severity_result}
# Events by resource type
resource_query = (
select(AuditLog.resource_type, func.count().label('count'))
.where(and_(*filters))
.group_by(AuditLog.resource_type)
)
resource_result = await db.execute(resource_query)
events_by_resource_type = {row.resource_type: row.count for row in resource_result}
# Date range
date_range_query = (
select(
func.min(AuditLog.created_at).label('min_date'),
func.max(AuditLog.created_at).label('max_date')
)
.where(and_(*filters))
)
date_result = await db.execute(date_range_query)
date_row = date_result.one()
logger.info(
"Successfully retrieved audit log statistics",
tenant_id=tenant_id,
total_events=total_events
)
return AuditLogStatsResponse(
total_events=total_events,
events_by_action=events_by_action,
events_by_severity=events_by_severity,
events_by_resource_type=events_by_resource_type,
date_range={
"min": date_row.min_date,
"max": date_row.max_date
}
)
except Exception as e:
logger.error(
"Failed to retrieve audit log statistics",
error=str(e),
tenant_id=tenant_id
)
raise HTTPException(
status_code=500,
detail=f"Failed to retrieve audit log statistics: {str(e)}"
)

View File

@@ -25,7 +25,8 @@ from app.api import (
dashboard,
analytics,
sustainability,
internal_demo
internal_demo,
audit
)
@@ -122,6 +123,8 @@ app = service.create_app()
service.setup_standard_endpoints()
# Include new standardized routers
# IMPORTANT: Register audit router FIRST to avoid route matching conflicts
service.add_router(audit.router)
service.add_router(ingredients.router)
service.add_router(stock_entries.router)
service.add_router(transformations.router)

View File

@@ -16,7 +16,7 @@ from shared.routing.route_builder import RouteBuilder
router = APIRouter()
logger = structlog.get_logger()
route_builder = RouteBuilder('notification')
route_builder = RouteBuilder('notifications')
@router.get(

View File

@@ -0,0 +1,237 @@
# services/notification/app/api/audit.py
"""
Audit Logs API - Retrieve audit trail for notification service
"""
from fastapi import APIRouter, Depends, HTTPException, Query, Path, status
from typing import Optional, Dict, Any
from uuid import UUID
from datetime import datetime
import structlog
from sqlalchemy import select, func, and_
from sqlalchemy.ext.asyncio import AsyncSession
from app.models import AuditLog
from shared.auth.decorators import get_current_user_dep
from shared.auth.access_control import require_user_role
from shared.routing import RouteBuilder
from shared.models.audit_log_schemas import (
AuditLogResponse,
AuditLogListResponse,
AuditLogStatsResponse
)
from app.core.database import database_manager
route_builder = RouteBuilder('notifications')
router = APIRouter(tags=["audit-logs"])
logger = structlog.get_logger()
async def get_db():
"""Database session dependency"""
async with database_manager.get_session() as session:
yield session
@router.get(
route_builder.build_base_route("audit-logs"),
response_model=AuditLogListResponse
)
@require_user_role(['admin', 'owner'])
async def get_audit_logs(
tenant_id: UUID = Path(..., description="Tenant ID"),
start_date: Optional[datetime] = Query(None, description="Filter logs from this date"),
end_date: Optional[datetime] = Query(None, description="Filter logs until this date"),
user_id: Optional[UUID] = Query(None, description="Filter by user ID"),
action: Optional[str] = Query(None, description="Filter by action type"),
resource_type: Optional[str] = Query(None, description="Filter by resource type"),
severity: Optional[str] = Query(None, description="Filter by severity level"),
search: Optional[str] = Query(None, description="Search in description field"),
limit: int = Query(100, ge=1, le=1000, description="Number of records to return"),
offset: int = Query(0, ge=0, description="Number of records to skip"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Get audit logs for notification service.
Requires admin or owner role.
"""
try:
logger.info(
"Retrieving audit logs",
tenant_id=tenant_id,
user_id=current_user.get("user_id"),
filters={
"start_date": start_date,
"end_date": end_date,
"action": action,
"resource_type": resource_type,
"severity": severity
}
)
# Build query filters
filters = [AuditLog.tenant_id == tenant_id]
if start_date:
filters.append(AuditLog.created_at >= start_date)
if end_date:
filters.append(AuditLog.created_at <= end_date)
if user_id:
filters.append(AuditLog.user_id == user_id)
if action:
filters.append(AuditLog.action == action)
if resource_type:
filters.append(AuditLog.resource_type == resource_type)
if severity:
filters.append(AuditLog.severity == severity)
if search:
filters.append(AuditLog.description.ilike(f"%{search}%"))
# Count total matching records
count_query = select(func.count()).select_from(AuditLog).where(and_(*filters))
total_result = await db.execute(count_query)
total = total_result.scalar() or 0
# Fetch paginated results
query = (
select(AuditLog)
.where(and_(*filters))
.order_by(AuditLog.created_at.desc())
.limit(limit)
.offset(offset)
)
result = await db.execute(query)
audit_logs = result.scalars().all()
# Convert to response models
items = [AuditLogResponse.from_orm(log) for log in audit_logs]
logger.info(
"Successfully retrieved audit logs",
tenant_id=tenant_id,
total=total,
returned=len(items)
)
return AuditLogListResponse(
items=items,
total=total,
limit=limit,
offset=offset,
has_more=(offset + len(items)) < total
)
except Exception as e:
logger.error(
"Failed to retrieve audit logs",
error=str(e),
tenant_id=tenant_id
)
raise HTTPException(
status_code=500,
detail=f"Failed to retrieve audit logs: {str(e)}"
)
@router.get(
route_builder.build_base_route("audit-logs/stats"),
response_model=AuditLogStatsResponse
)
@require_user_role(['admin', 'owner'])
async def get_audit_log_stats(
tenant_id: UUID = Path(..., description="Tenant ID"),
start_date: Optional[datetime] = Query(None, description="Filter logs from this date"),
end_date: Optional[datetime] = Query(None, description="Filter logs until this date"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Get audit log statistics for notification service.
Requires admin or owner role.
"""
try:
logger.info(
"Retrieving audit log statistics",
tenant_id=tenant_id,
user_id=current_user.get("user_id")
)
# Build base filters
filters = [AuditLog.tenant_id == tenant_id]
if start_date:
filters.append(AuditLog.created_at >= start_date)
if end_date:
filters.append(AuditLog.created_at <= end_date)
# Total events
count_query = select(func.count()).select_from(AuditLog).where(and_(*filters))
total_result = await db.execute(count_query)
total_events = total_result.scalar() or 0
# Events by action
action_query = (
select(AuditLog.action, func.count().label('count'))
.where(and_(*filters))
.group_by(AuditLog.action)
)
action_result = await db.execute(action_query)
events_by_action = {row.action: row.count for row in action_result}
# Events by severity
severity_query = (
select(AuditLog.severity, func.count().label('count'))
.where(and_(*filters))
.group_by(AuditLog.severity)
)
severity_result = await db.execute(severity_query)
events_by_severity = {row.severity: row.count for row in severity_result}
# Events by resource type
resource_query = (
select(AuditLog.resource_type, func.count().label('count'))
.where(and_(*filters))
.group_by(AuditLog.resource_type)
)
resource_result = await db.execute(resource_query)
events_by_resource_type = {row.resource_type: row.count for row in resource_result}
# Date range
date_range_query = (
select(
func.min(AuditLog.created_at).label('min_date'),
func.max(AuditLog.created_at).label('max_date')
)
.where(and_(*filters))
)
date_result = await db.execute(date_range_query)
date_row = date_result.one()
logger.info(
"Successfully retrieved audit log statistics",
tenant_id=tenant_id,
total_events=total_events
)
return AuditLogStatsResponse(
total_events=total_events,
events_by_action=events_by_action,
events_by_severity=events_by_severity,
events_by_resource_type=events_by_resource_type,
date_range={
"min": date_row.min_date,
"max": date_row.max_date
}
)
except Exception as e:
logger.error(
"Failed to retrieve audit log statistics",
error=str(e),
tenant_id=tenant_id
)
raise HTTPException(
status_code=500,
detail=f"Failed to retrieve audit log statistics: {str(e)}"
)

View File

@@ -30,7 +30,7 @@ from shared.security import create_audit_logger, AuditSeverity, AuditAction
logger = structlog.get_logger()
audit_logger = create_audit_logger("notification-service", AuditLog)
router = APIRouter()
route_builder = RouteBuilder("notification")
route_builder = RouteBuilder('notifications')
# Dependency injection for enhanced notification service
def get_enhanced_notification_service():

View File

@@ -21,7 +21,7 @@ from shared.monitoring.metrics import track_endpoint_metrics
logger = structlog.get_logger()
router = APIRouter()
route_builder = RouteBuilder("notification")
route_builder = RouteBuilder('notifications')
# Dependency injection for enhanced notification service
def get_enhanced_notification_service():

View File

@@ -13,6 +13,7 @@ from app.core.database import database_manager
from app.api.notifications import router as notification_router
from app.api.notification_operations import router as notification_operations_router
from app.api.analytics import router as analytics_router
from app.api.audit import router as audit_router
from app.services.messaging import setup_messaging, cleanup_messaging
from app.services.sse_service import SSEService
from app.services.notification_orchestrator import NotificationOrchestrator
@@ -252,9 +253,12 @@ service.setup_standard_endpoints()
service.setup_custom_endpoints()
# Include routers
service.add_router(notification_router, tags=["notifications"])
# IMPORTANT: Register audit router FIRST to avoid route matching conflicts
# where {notification_id} would match literal paths like "audit-logs"
service.add_router(audit_router, tags=["audit-logs"])
service.add_router(notification_operations_router, tags=["notification-operations"])
service.add_router(analytics_router, tags=["notifications-analytics"])
service.add_router(notification_router, tags=["notifications"])
if __name__ == "__main__":
import uvicorn

View File

@@ -0,0 +1,237 @@
# services/orders/app/api/audit.py
"""
Audit Logs API - Retrieve audit trail for orders service
"""
from fastapi import APIRouter, Depends, HTTPException, Query, Path, status
from typing import Optional, Dict, Any
from uuid import UUID
from datetime import datetime
import structlog
from sqlalchemy import select, func, and_
from sqlalchemy.ext.asyncio import AsyncSession
from app.models import AuditLog
from shared.auth.decorators import get_current_user_dep
from shared.auth.access_control import require_user_role
from shared.routing import RouteBuilder
from shared.models.audit_log_schemas import (
AuditLogResponse,
AuditLogListResponse,
AuditLogStatsResponse
)
from app.core.database import database_manager
route_builder = RouteBuilder('orders')
router = APIRouter(tags=["audit-logs"])
logger = structlog.get_logger()
async def get_db():
"""Database session dependency"""
async with database_manager.get_session() as session:
yield session
@router.get(
route_builder.build_base_route("audit-logs"),
response_model=AuditLogListResponse
)
@require_user_role(['admin', 'owner'])
async def get_audit_logs(
tenant_id: UUID = Path(..., description="Tenant ID"),
start_date: Optional[datetime] = Query(None, description="Filter logs from this date"),
end_date: Optional[datetime] = Query(None, description="Filter logs until this date"),
user_id: Optional[UUID] = Query(None, description="Filter by user ID"),
action: Optional[str] = Query(None, description="Filter by action type"),
resource_type: Optional[str] = Query(None, description="Filter by resource type"),
severity: Optional[str] = Query(None, description="Filter by severity level"),
search: Optional[str] = Query(None, description="Search in description field"),
limit: int = Query(100, ge=1, le=1000, description="Number of records to return"),
offset: int = Query(0, ge=0, description="Number of records to skip"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Get audit logs for orders service.
Requires admin or owner role.
"""
try:
logger.info(
"Retrieving audit logs",
tenant_id=tenant_id,
user_id=current_user.get("user_id"),
filters={
"start_date": start_date,
"end_date": end_date,
"action": action,
"resource_type": resource_type,
"severity": severity
}
)
# Build query filters
filters = [AuditLog.tenant_id == tenant_id]
if start_date:
filters.append(AuditLog.created_at >= start_date)
if end_date:
filters.append(AuditLog.created_at <= end_date)
if user_id:
filters.append(AuditLog.user_id == user_id)
if action:
filters.append(AuditLog.action == action)
if resource_type:
filters.append(AuditLog.resource_type == resource_type)
if severity:
filters.append(AuditLog.severity == severity)
if search:
filters.append(AuditLog.description.ilike(f"%{search}%"))
# Count total matching records
count_query = select(func.count()).select_from(AuditLog).where(and_(*filters))
total_result = await db.execute(count_query)
total = total_result.scalar() or 0
# Fetch paginated results
query = (
select(AuditLog)
.where(and_(*filters))
.order_by(AuditLog.created_at.desc())
.limit(limit)
.offset(offset)
)
result = await db.execute(query)
audit_logs = result.scalars().all()
# Convert to response models
items = [AuditLogResponse.from_orm(log) for log in audit_logs]
logger.info(
"Successfully retrieved audit logs",
tenant_id=tenant_id,
total=total,
returned=len(items)
)
return AuditLogListResponse(
items=items,
total=total,
limit=limit,
offset=offset,
has_more=(offset + len(items)) < total
)
except Exception as e:
logger.error(
"Failed to retrieve audit logs",
error=str(e),
tenant_id=tenant_id
)
raise HTTPException(
status_code=500,
detail=f"Failed to retrieve audit logs: {str(e)}"
)
@router.get(
route_builder.build_base_route("audit-logs/stats"),
response_model=AuditLogStatsResponse
)
@require_user_role(['admin', 'owner'])
async def get_audit_log_stats(
tenant_id: UUID = Path(..., description="Tenant ID"),
start_date: Optional[datetime] = Query(None, description="Filter logs from this date"),
end_date: Optional[datetime] = Query(None, description="Filter logs until this date"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Get audit log statistics for orders service.
Requires admin or owner role.
"""
try:
logger.info(
"Retrieving audit log statistics",
tenant_id=tenant_id,
user_id=current_user.get("user_id")
)
# Build base filters
filters = [AuditLog.tenant_id == tenant_id]
if start_date:
filters.append(AuditLog.created_at >= start_date)
if end_date:
filters.append(AuditLog.created_at <= end_date)
# Total events
count_query = select(func.count()).select_from(AuditLog).where(and_(*filters))
total_result = await db.execute(count_query)
total_events = total_result.scalar() or 0
# Events by action
action_query = (
select(AuditLog.action, func.count().label('count'))
.where(and_(*filters))
.group_by(AuditLog.action)
)
action_result = await db.execute(action_query)
events_by_action = {row.action: row.count for row in action_result}
# Events by severity
severity_query = (
select(AuditLog.severity, func.count().label('count'))
.where(and_(*filters))
.group_by(AuditLog.severity)
)
severity_result = await db.execute(severity_query)
events_by_severity = {row.severity: row.count for row in severity_result}
# Events by resource type
resource_query = (
select(AuditLog.resource_type, func.count().label('count'))
.where(and_(*filters))
.group_by(AuditLog.resource_type)
)
resource_result = await db.execute(resource_query)
events_by_resource_type = {row.resource_type: row.count for row in resource_result}
# Date range
date_range_query = (
select(
func.min(AuditLog.created_at).label('min_date'),
func.max(AuditLog.created_at).label('max_date')
)
.where(and_(*filters))
)
date_result = await db.execute(date_range_query)
date_row = date_result.one()
logger.info(
"Successfully retrieved audit log statistics",
tenant_id=tenant_id,
total_events=total_events
)
return AuditLogStatsResponse(
total_events=total_events,
events_by_action=events_by_action,
events_by_severity=events_by_severity,
events_by_resource_type=events_by_resource_type,
date_range={
"min": date_row.min_date,
"max": date_row.max_date
}
)
except Exception as e:
logger.error(
"Failed to retrieve audit log statistics",
error=str(e),
tenant_id=tenant_id
)
raise HTTPException(
status_code=500,
detail=f"Failed to retrieve audit log statistics: {str(e)}"
)

View File

@@ -13,7 +13,7 @@ from app.core.database import database_manager
from app.api.orders import router as orders_router
from app.api.customers import router as customers_router
from app.api.order_operations import router as order_operations_router
from app.api import internal_demo
from app.api import internal_demo, audit
from shared.service_base import StandardFastAPIService
@@ -89,6 +89,12 @@ app = service.create_app()
service.setup_standard_endpoints()
# Include routers - organized by ATOMIC and BUSINESS operations
# IMPORTANT: Register specific routes (audit, customers) BEFORE parameterized routes (orders)
# to avoid route matching conflicts where {order_id} would match literal paths like "audit-logs"
# AUDIT: Audit log retrieval endpoints - Must be registered FIRST
service.add_router(audit.router)
# ATOMIC: Direct CRUD operations
# NOTE: Register customers_router BEFORE orders_router to ensure /customers
# matches before the parameterized /{order_id} route

View File

@@ -0,0 +1,237 @@
# services/pos/app/api/audit.py
"""
Audit Logs API - Retrieve audit trail for pos service
"""
from fastapi import APIRouter, Depends, HTTPException, Query, Path, status
from typing import Optional, Dict, Any
from uuid import UUID
from datetime import datetime
import structlog
from sqlalchemy import select, func, and_
from sqlalchemy.ext.asyncio import AsyncSession
from app.models import AuditLog
from shared.auth.decorators import get_current_user_dep
from shared.auth.access_control import require_user_role
from shared.routing import RouteBuilder
from shared.models.audit_log_schemas import (
AuditLogResponse,
AuditLogListResponse,
AuditLogStatsResponse
)
from app.core.database import database_manager
route_builder = RouteBuilder('pos')
router = APIRouter(tags=["audit-logs"])
logger = structlog.get_logger()
async def get_db():
"""Database session dependency"""
async with database_manager.get_session() as session:
yield session
@router.get(
route_builder.build_base_route("audit-logs"),
response_model=AuditLogListResponse
)
@require_user_role(['admin', 'owner'])
async def get_audit_logs(
tenant_id: UUID = Path(..., description="Tenant ID"),
start_date: Optional[datetime] = Query(None, description="Filter logs from this date"),
end_date: Optional[datetime] = Query(None, description="Filter logs until this date"),
user_id: Optional[UUID] = Query(None, description="Filter by user ID"),
action: Optional[str] = Query(None, description="Filter by action type"),
resource_type: Optional[str] = Query(None, description="Filter by resource type"),
severity: Optional[str] = Query(None, description="Filter by severity level"),
search: Optional[str] = Query(None, description="Search in description field"),
limit: int = Query(100, ge=1, le=1000, description="Number of records to return"),
offset: int = Query(0, ge=0, description="Number of records to skip"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Get audit logs for pos service.
Requires admin or owner role.
"""
try:
logger.info(
"Retrieving audit logs",
tenant_id=tenant_id,
user_id=current_user.get("user_id"),
filters={
"start_date": start_date,
"end_date": end_date,
"action": action,
"resource_type": resource_type,
"severity": severity
}
)
# Build query filters
filters = [AuditLog.tenant_id == tenant_id]
if start_date:
filters.append(AuditLog.created_at >= start_date)
if end_date:
filters.append(AuditLog.created_at <= end_date)
if user_id:
filters.append(AuditLog.user_id == user_id)
if action:
filters.append(AuditLog.action == action)
if resource_type:
filters.append(AuditLog.resource_type == resource_type)
if severity:
filters.append(AuditLog.severity == severity)
if search:
filters.append(AuditLog.description.ilike(f"%{search}%"))
# Count total matching records
count_query = select(func.count()).select_from(AuditLog).where(and_(*filters))
total_result = await db.execute(count_query)
total = total_result.scalar() or 0
# Fetch paginated results
query = (
select(AuditLog)
.where(and_(*filters))
.order_by(AuditLog.created_at.desc())
.limit(limit)
.offset(offset)
)
result = await db.execute(query)
audit_logs = result.scalars().all()
# Convert to response models
items = [AuditLogResponse.from_orm(log) for log in audit_logs]
logger.info(
"Successfully retrieved audit logs",
tenant_id=tenant_id,
total=total,
returned=len(items)
)
return AuditLogListResponse(
items=items,
total=total,
limit=limit,
offset=offset,
has_more=(offset + len(items)) < total
)
except Exception as e:
logger.error(
"Failed to retrieve audit logs",
error=str(e),
tenant_id=tenant_id
)
raise HTTPException(
status_code=500,
detail=f"Failed to retrieve audit logs: {str(e)}"
)
@router.get(
route_builder.build_base_route("audit-logs/stats"),
response_model=AuditLogStatsResponse
)
@require_user_role(['admin', 'owner'])
async def get_audit_log_stats(
tenant_id: UUID = Path(..., description="Tenant ID"),
start_date: Optional[datetime] = Query(None, description="Filter logs from this date"),
end_date: Optional[datetime] = Query(None, description="Filter logs until this date"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Get audit log statistics for pos service.
Requires admin or owner role.
"""
try:
logger.info(
"Retrieving audit log statistics",
tenant_id=tenant_id,
user_id=current_user.get("user_id")
)
# Build base filters
filters = [AuditLog.tenant_id == tenant_id]
if start_date:
filters.append(AuditLog.created_at >= start_date)
if end_date:
filters.append(AuditLog.created_at <= end_date)
# Total events
count_query = select(func.count()).select_from(AuditLog).where(and_(*filters))
total_result = await db.execute(count_query)
total_events = total_result.scalar() or 0
# Events by action
action_query = (
select(AuditLog.action, func.count().label('count'))
.where(and_(*filters))
.group_by(AuditLog.action)
)
action_result = await db.execute(action_query)
events_by_action = {row.action: row.count for row in action_result}
# Events by severity
severity_query = (
select(AuditLog.severity, func.count().label('count'))
.where(and_(*filters))
.group_by(AuditLog.severity)
)
severity_result = await db.execute(severity_query)
events_by_severity = {row.severity: row.count for row in severity_result}
# Events by resource type
resource_query = (
select(AuditLog.resource_type, func.count().label('count'))
.where(and_(*filters))
.group_by(AuditLog.resource_type)
)
resource_result = await db.execute(resource_query)
events_by_resource_type = {row.resource_type: row.count for row in resource_result}
# Date range
date_range_query = (
select(
func.min(AuditLog.created_at).label('min_date'),
func.max(AuditLog.created_at).label('max_date')
)
.where(and_(*filters))
)
date_result = await db.execute(date_range_query)
date_row = date_result.one()
logger.info(
"Successfully retrieved audit log statistics",
tenant_id=tenant_id,
total_events=total_events
)
return AuditLogStatsResponse(
total_events=total_events,
events_by_action=events_by_action,
events_by_severity=events_by_severity,
events_by_resource_type=events_by_resource_type,
date_range={
"min": date_row.min_date,
"max": date_row.max_date
}
)
except Exception as e:
logger.error(
"Failed to retrieve audit log statistics",
error=str(e),
tenant_id=tenant_id
)
raise HTTPException(
status_code=500,
detail=f"Failed to retrieve audit log statistics: {str(e)}"
)

View File

@@ -12,6 +12,7 @@ from app.api.transactions import router as transactions_router
from app.api.pos_operations import router as pos_operations_router
from app.api.analytics import router as analytics_router
from app.api.internal_demo import router as internal_demo_router
from app.api.audit import router as audit_router
from app.core.database import database_manager
from shared.service_base import StandardFastAPIService
@@ -170,6 +171,8 @@ service.setup_custom_middleware()
service.setup_custom_endpoints()
# Include routers
# IMPORTANT: Register audit router FIRST to avoid route matching conflicts
service.add_router(audit_router, tags=["audit-logs"])
service.add_router(configurations_router, tags=["pos-configurations"])
service.add_router(transactions_router, tags=["pos-transactions"])
service.add_router(pos_operations_router, tags=["pos-operations"])

View File

@@ -0,0 +1,237 @@
# services/production/app/api/audit.py
"""
Audit Logs API - Retrieve audit trail for production service
"""
from fastapi import APIRouter, Depends, HTTPException, Query, Path, status
from typing import Optional, Dict, Any
from uuid import UUID
from datetime import datetime
import structlog
from sqlalchemy import select, func, and_
from sqlalchemy.ext.asyncio import AsyncSession
from app.models import AuditLog
from shared.auth.decorators import get_current_user_dep
from shared.auth.access_control import require_user_role
from shared.routing import RouteBuilder
from shared.models.audit_log_schemas import (
AuditLogResponse,
AuditLogListResponse,
AuditLogStatsResponse
)
from app.core.database import database_manager
route_builder = RouteBuilder('production')
router = APIRouter(tags=["audit-logs"])
logger = structlog.get_logger()
async def get_db():
"""Database session dependency"""
async with database_manager.get_session() as session:
yield session
@router.get(
route_builder.build_base_route("audit-logs"),
response_model=AuditLogListResponse
)
@require_user_role(['admin', 'owner'])
async def get_audit_logs(
tenant_id: UUID = Path(..., description="Tenant ID"),
start_date: Optional[datetime] = Query(None, description="Filter logs from this date"),
end_date: Optional[datetime] = Query(None, description="Filter logs until this date"),
user_id: Optional[UUID] = Query(None, description="Filter by user ID"),
action: Optional[str] = Query(None, description="Filter by action type"),
resource_type: Optional[str] = Query(None, description="Filter by resource type"),
severity: Optional[str] = Query(None, description="Filter by severity level"),
search: Optional[str] = Query(None, description="Search in description field"),
limit: int = Query(100, ge=1, le=1000, description="Number of records to return"),
offset: int = Query(0, ge=0, description="Number of records to skip"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Get audit logs for production service.
Requires admin or owner role.
"""
try:
logger.info(
"Retrieving audit logs",
tenant_id=tenant_id,
user_id=current_user.get("user_id"),
filters={
"start_date": start_date,
"end_date": end_date,
"action": action,
"resource_type": resource_type,
"severity": severity
}
)
# Build query filters
filters = [AuditLog.tenant_id == tenant_id]
if start_date:
filters.append(AuditLog.created_at >= start_date)
if end_date:
filters.append(AuditLog.created_at <= end_date)
if user_id:
filters.append(AuditLog.user_id == user_id)
if action:
filters.append(AuditLog.action == action)
if resource_type:
filters.append(AuditLog.resource_type == resource_type)
if severity:
filters.append(AuditLog.severity == severity)
if search:
filters.append(AuditLog.description.ilike(f"%{search}%"))
# Count total matching records
count_query = select(func.count()).select_from(AuditLog).where(and_(*filters))
total_result = await db.execute(count_query)
total = total_result.scalar() or 0
# Fetch paginated results
query = (
select(AuditLog)
.where(and_(*filters))
.order_by(AuditLog.created_at.desc())
.limit(limit)
.offset(offset)
)
result = await db.execute(query)
audit_logs = result.scalars().all()
# Convert to response models
items = [AuditLogResponse.from_orm(log) for log in audit_logs]
logger.info(
"Successfully retrieved audit logs",
tenant_id=tenant_id,
total=total,
returned=len(items)
)
return AuditLogListResponse(
items=items,
total=total,
limit=limit,
offset=offset,
has_more=(offset + len(items)) < total
)
except Exception as e:
logger.error(
"Failed to retrieve audit logs",
error=str(e),
tenant_id=tenant_id
)
raise HTTPException(
status_code=500,
detail=f"Failed to retrieve audit logs: {str(e)}"
)
@router.get(
route_builder.build_base_route("audit-logs/stats"),
response_model=AuditLogStatsResponse
)
@require_user_role(['admin', 'owner'])
async def get_audit_log_stats(
tenant_id: UUID = Path(..., description="Tenant ID"),
start_date: Optional[datetime] = Query(None, description="Filter logs from this date"),
end_date: Optional[datetime] = Query(None, description="Filter logs until this date"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Get audit log statistics for production service.
Requires admin or owner role.
"""
try:
logger.info(
"Retrieving audit log statistics",
tenant_id=tenant_id,
user_id=current_user.get("user_id")
)
# Build base filters
filters = [AuditLog.tenant_id == tenant_id]
if start_date:
filters.append(AuditLog.created_at >= start_date)
if end_date:
filters.append(AuditLog.created_at <= end_date)
# Total events
count_query = select(func.count()).select_from(AuditLog).where(and_(*filters))
total_result = await db.execute(count_query)
total_events = total_result.scalar() or 0
# Events by action
action_query = (
select(AuditLog.action, func.count().label('count'))
.where(and_(*filters))
.group_by(AuditLog.action)
)
action_result = await db.execute(action_query)
events_by_action = {row.action: row.count for row in action_result}
# Events by severity
severity_query = (
select(AuditLog.severity, func.count().label('count'))
.where(and_(*filters))
.group_by(AuditLog.severity)
)
severity_result = await db.execute(severity_query)
events_by_severity = {row.severity: row.count for row in severity_result}
# Events by resource type
resource_query = (
select(AuditLog.resource_type, func.count().label('count'))
.where(and_(*filters))
.group_by(AuditLog.resource_type)
)
resource_result = await db.execute(resource_query)
events_by_resource_type = {row.resource_type: row.count for row in resource_result}
# Date range
date_range_query = (
select(
func.min(AuditLog.created_at).label('min_date'),
func.max(AuditLog.created_at).label('max_date')
)
.where(and_(*filters))
)
date_result = await db.execute(date_range_query)
date_row = date_result.one()
logger.info(
"Successfully retrieved audit log statistics",
tenant_id=tenant_id,
total_events=total_events
)
return AuditLogStatsResponse(
total_events=total_events,
events_by_action=events_by_action,
events_by_severity=events_by_severity,
events_by_resource_type=events_by_resource_type,
date_range={
"min": date_row.min_date,
"max": date_row.max_date
}
)
except Exception as e:
logger.error(
"Failed to retrieve audit log statistics",
error=str(e),
tenant_id=tenant_id
)
raise HTTPException(
status_code=500,
detail=f"Failed to retrieve audit log statistics: {str(e)}"
)

View File

@@ -25,7 +25,8 @@ from app.api import (
equipment,
internal_demo,
orchestrator, # NEW: Orchestrator integration endpoint
production_orders_operations # Tenant deletion endpoints
production_orders_operations, # Tenant deletion endpoints
audit
)
@@ -151,6 +152,8 @@ service.setup_custom_middleware()
# Include standardized routers
# NOTE: Register more specific routes before generic parameterized routes
# IMPORTANT: Register audit router FIRST to avoid route matching conflicts
service.add_router(audit.router)
service.add_router(orchestrator.router) # NEW: Orchestrator integration endpoint
service.add_router(production_orders_operations.router) # Tenant deletion endpoints
service.add_router(quality_templates.router) # Register first to avoid route conflicts

View File

@@ -0,0 +1,237 @@
# services/recipes/app/api/audit.py
"""
Audit Logs API - Retrieve audit trail for recipes service
"""
from fastapi import APIRouter, Depends, HTTPException, Query, Path, status
from typing import Optional, Dict, Any
from uuid import UUID
from datetime import datetime
import structlog
from sqlalchemy import select, func, and_
from sqlalchemy.ext.asyncio import AsyncSession
from app.models import AuditLog
from shared.auth.decorators import get_current_user_dep
from shared.auth.access_control import require_user_role
from shared.routing import RouteBuilder
from shared.models.audit_log_schemas import (
AuditLogResponse,
AuditLogListResponse,
AuditLogStatsResponse
)
from app.core.database import db_manager
route_builder = RouteBuilder('recipes')
router = APIRouter(tags=["audit-logs"])
logger = structlog.get_logger()
async def get_db():
"""Database session dependency"""
async with db_manager.get_session() as session:
yield session
@router.get(
route_builder.build_base_route("audit-logs"),
response_model=AuditLogListResponse
)
@require_user_role(['admin', 'owner'])
async def get_audit_logs(
tenant_id: UUID = Path(..., description="Tenant ID"),
start_date: Optional[datetime] = Query(None, description="Filter logs from this date"),
end_date: Optional[datetime] = Query(None, description="Filter logs until this date"),
user_id: Optional[UUID] = Query(None, description="Filter by user ID"),
action: Optional[str] = Query(None, description="Filter by action type"),
resource_type: Optional[str] = Query(None, description="Filter by resource type"),
severity: Optional[str] = Query(None, description="Filter by severity level"),
search: Optional[str] = Query(None, description="Search in description field"),
limit: int = Query(100, ge=1, le=1000, description="Number of records to return"),
offset: int = Query(0, ge=0, description="Number of records to skip"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Get audit logs for recipes service.
Requires admin or owner role.
"""
try:
logger.info(
"Retrieving audit logs",
tenant_id=tenant_id,
user_id=current_user.get("user_id"),
filters={
"start_date": start_date,
"end_date": end_date,
"action": action,
"resource_type": resource_type,
"severity": severity
}
)
# Build query filters
filters = [AuditLog.tenant_id == tenant_id]
if start_date:
filters.append(AuditLog.created_at >= start_date)
if end_date:
filters.append(AuditLog.created_at <= end_date)
if user_id:
filters.append(AuditLog.user_id == user_id)
if action:
filters.append(AuditLog.action == action)
if resource_type:
filters.append(AuditLog.resource_type == resource_type)
if severity:
filters.append(AuditLog.severity == severity)
if search:
filters.append(AuditLog.description.ilike(f"%{search}%"))
# Count total matching records
count_query = select(func.count()).select_from(AuditLog).where(and_(*filters))
total_result = await db.execute(count_query)
total = total_result.scalar() or 0
# Fetch paginated results
query = (
select(AuditLog)
.where(and_(*filters))
.order_by(AuditLog.created_at.desc())
.limit(limit)
.offset(offset)
)
result = await db.execute(query)
audit_logs = result.scalars().all()
# Convert to response models
items = [AuditLogResponse.from_orm(log) for log in audit_logs]
logger.info(
"Successfully retrieved audit logs",
tenant_id=tenant_id,
total=total,
returned=len(items)
)
return AuditLogListResponse(
items=items,
total=total,
limit=limit,
offset=offset,
has_more=(offset + len(items)) < total
)
except Exception as e:
logger.error(
"Failed to retrieve audit logs",
error=str(e),
tenant_id=tenant_id
)
raise HTTPException(
status_code=500,
detail=f"Failed to retrieve audit logs: {str(e)}"
)
@router.get(
route_builder.build_base_route("audit-logs/stats"),
response_model=AuditLogStatsResponse
)
@require_user_role(['admin', 'owner'])
async def get_audit_log_stats(
tenant_id: UUID = Path(..., description="Tenant ID"),
start_date: Optional[datetime] = Query(None, description="Filter logs from this date"),
end_date: Optional[datetime] = Query(None, description="Filter logs until this date"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Get audit log statistics for recipes service.
Requires admin or owner role.
"""
try:
logger.info(
"Retrieving audit log statistics",
tenant_id=tenant_id,
user_id=current_user.get("user_id")
)
# Build base filters
filters = [AuditLog.tenant_id == tenant_id]
if start_date:
filters.append(AuditLog.created_at >= start_date)
if end_date:
filters.append(AuditLog.created_at <= end_date)
# Total events
count_query = select(func.count()).select_from(AuditLog).where(and_(*filters))
total_result = await db.execute(count_query)
total_events = total_result.scalar() or 0
# Events by action
action_query = (
select(AuditLog.action, func.count().label('count'))
.where(and_(*filters))
.group_by(AuditLog.action)
)
action_result = await db.execute(action_query)
events_by_action = {row.action: row.count for row in action_result}
# Events by severity
severity_query = (
select(AuditLog.severity, func.count().label('count'))
.where(and_(*filters))
.group_by(AuditLog.severity)
)
severity_result = await db.execute(severity_query)
events_by_severity = {row.severity: row.count for row in severity_result}
# Events by resource type
resource_query = (
select(AuditLog.resource_type, func.count().label('count'))
.where(and_(*filters))
.group_by(AuditLog.resource_type)
)
resource_result = await db.execute(resource_query)
events_by_resource_type = {row.resource_type: row.count for row in resource_result}
# Date range
date_range_query = (
select(
func.min(AuditLog.created_at).label('min_date'),
func.max(AuditLog.created_at).label('max_date')
)
.where(and_(*filters))
)
date_result = await db.execute(date_range_query)
date_row = date_result.one()
logger.info(
"Successfully retrieved audit log statistics",
tenant_id=tenant_id,
total_events=total_events
)
return AuditLogStatsResponse(
total_events=total_events,
events_by_action=events_by_action,
events_by_severity=events_by_severity,
events_by_resource_type=events_by_resource_type,
date_range={
"min": date_row.min_date,
"max": date_row.max_date
}
)
except Exception as e:
logger.error(
"Failed to retrieve audit log statistics",
error=str(e),
tenant_id=tenant_id
)
raise HTTPException(
status_code=500,
detail=f"Failed to retrieve audit log statistics: {str(e)}"
)

View File

@@ -14,7 +14,7 @@ from .core.database import db_manager
from shared.service_base import StandardFastAPIService
# Import API routers
from .api import recipes, recipe_quality_configs, recipe_operations, internal_demo
from .api import recipes, recipe_quality_configs, recipe_operations, internal_demo, audit
# Import models to register them with SQLAlchemy metadata
from .models import recipes as recipe_models
@@ -115,6 +115,9 @@ service.setup_standard_endpoints()
service.setup_custom_middleware()
# Include routers
# IMPORTANT: Register audit router FIRST to avoid route matching conflicts
# where {recipe_id} would match literal paths like "audit-logs"
service.add_router(audit.router)
service.add_router(recipes.router)
service.add_router(recipe_quality_configs.router)
service.add_router(recipe_operations.router)

View File

@@ -0,0 +1,237 @@
# services/sales/app/api/audit.py
"""
Audit Logs API - Retrieve audit trail for sales service
"""
from fastapi import APIRouter, Depends, HTTPException, Query, Path, status
from typing import Optional, Dict, Any
from uuid import UUID
from datetime import datetime
import structlog
from sqlalchemy import select, func, and_, or_
from sqlalchemy.ext.asyncio import AsyncSession
from app.models import AuditLog
from shared.auth.decorators import get_current_user_dep
from shared.auth.access_control import require_user_role
from shared.routing import RouteBuilder
from shared.models.audit_log_schemas import (
AuditLogResponse,
AuditLogListResponse,
AuditLogStatsResponse
)
from app.core.database import database_manager
route_builder = RouteBuilder('sales')
router = APIRouter(tags=["audit-logs"])
logger = structlog.get_logger()
async def get_db():
"""Database session dependency"""
async with database_manager.get_session() as session:
yield session
@router.get(
route_builder.build_base_route("audit-logs"),
response_model=AuditLogListResponse
)
@require_user_role(['admin', 'owner'])
async def get_audit_logs(
tenant_id: UUID = Path(..., description="Tenant ID"),
start_date: Optional[datetime] = Query(None, description="Filter logs from this date"),
end_date: Optional[datetime] = Query(None, description="Filter logs until this date"),
user_id: Optional[UUID] = Query(None, description="Filter by user ID"),
action: Optional[str] = Query(None, description="Filter by action type"),
resource_type: Optional[str] = Query(None, description="Filter by resource type"),
severity: Optional[str] = Query(None, description="Filter by severity level"),
search: Optional[str] = Query(None, description="Search in description field"),
limit: int = Query(100, ge=1, le=1000, description="Number of records to return"),
offset: int = Query(0, ge=0, description="Number of records to skip"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Get audit logs for sales service.
Requires admin or owner role.
"""
try:
logger.info(
"Retrieving audit logs",
tenant_id=tenant_id,
user_id=current_user.get("user_id"),
filters={
"start_date": start_date,
"end_date": end_date,
"action": action,
"resource_type": resource_type,
"severity": severity
}
)
# Build query filters
filters = [AuditLog.tenant_id == tenant_id]
if start_date:
filters.append(AuditLog.created_at >= start_date)
if end_date:
filters.append(AuditLog.created_at <= end_date)
if user_id:
filters.append(AuditLog.user_id == user_id)
if action:
filters.append(AuditLog.action == action)
if resource_type:
filters.append(AuditLog.resource_type == resource_type)
if severity:
filters.append(AuditLog.severity == severity)
if search:
filters.append(AuditLog.description.ilike(f"%{search}%"))
# Count total matching records
count_query = select(func.count()).select_from(AuditLog).where(and_(*filters))
total_result = await db.execute(count_query)
total = total_result.scalar() or 0
# Fetch paginated results
query = (
select(AuditLog)
.where(and_(*filters))
.order_by(AuditLog.created_at.desc())
.limit(limit)
.offset(offset)
)
result = await db.execute(query)
audit_logs = result.scalars().all()
# Convert to response models
items = [AuditLogResponse.from_orm(log) for log in audit_logs]
logger.info(
"Successfully retrieved audit logs",
tenant_id=tenant_id,
total=total,
returned=len(items)
)
return AuditLogListResponse(
items=items,
total=total,
limit=limit,
offset=offset,
has_more=(offset + len(items)) < total
)
except Exception as e:
logger.error(
"Failed to retrieve audit logs",
error=str(e),
tenant_id=tenant_id
)
raise HTTPException(
status_code=500,
detail=f"Failed to retrieve audit logs: {str(e)}"
)
@router.get(
route_builder.build_base_route("audit-logs/stats"),
response_model=AuditLogStatsResponse
)
@require_user_role(['admin', 'owner'])
async def get_audit_log_stats(
tenant_id: UUID = Path(..., description="Tenant ID"),
start_date: Optional[datetime] = Query(None, description="Filter logs from this date"),
end_date: Optional[datetime] = Query(None, description="Filter logs until this date"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Get audit log statistics for sales service.
Requires admin or owner role.
"""
try:
logger.info(
"Retrieving audit log statistics",
tenant_id=tenant_id,
user_id=current_user.get("user_id")
)
# Build base filters
filters = [AuditLog.tenant_id == tenant_id]
if start_date:
filters.append(AuditLog.created_at >= start_date)
if end_date:
filters.append(AuditLog.created_at <= end_date)
# Total events
count_query = select(func.count()).select_from(AuditLog).where(and_(*filters))
total_result = await db.execute(count_query)
total_events = total_result.scalar() or 0
# Events by action
action_query = (
select(AuditLog.action, func.count().label('count'))
.where(and_(*filters))
.group_by(AuditLog.action)
)
action_result = await db.execute(action_query)
events_by_action = {row.action: row.count for row in action_result}
# Events by severity
severity_query = (
select(AuditLog.severity, func.count().label('count'))
.where(and_(*filters))
.group_by(AuditLog.severity)
)
severity_result = await db.execute(severity_query)
events_by_severity = {row.severity: row.count for row in severity_result}
# Events by resource type
resource_query = (
select(AuditLog.resource_type, func.count().label('count'))
.where(and_(*filters))
.group_by(AuditLog.resource_type)
)
resource_result = await db.execute(resource_query)
events_by_resource_type = {row.resource_type: row.count for row in resource_result}
# Date range
date_range_query = (
select(
func.min(AuditLog.created_at).label('min_date'),
func.max(AuditLog.created_at).label('max_date')
)
.where(and_(*filters))
)
date_result = await db.execute(date_range_query)
date_row = date_result.one()
logger.info(
"Successfully retrieved audit log statistics",
tenant_id=tenant_id,
total_events=total_events
)
return AuditLogStatsResponse(
total_events=total_events,
events_by_action=events_by_action,
events_by_severity=events_by_severity,
events_by_resource_type=events_by_resource_type,
date_range={
"min": date_row.min_date,
"max": date_row.max_date
}
)
except Exception as e:
logger.error(
"Failed to retrieve audit log statistics",
error=str(e),
tenant_id=tenant_id
)
raise HTTPException(
status_code=500,
detail=f"Failed to retrieve audit log statistics: {str(e)}"
)

View File

@@ -10,7 +10,7 @@ from app.core.database import database_manager
from shared.service_base import StandardFastAPIService
# Import API routers
from app.api import sales_records, sales_operations, analytics, internal_demo
from app.api import sales_records, sales_operations, analytics, internal_demo, audit
class SalesService(StandardFastAPIService):
@@ -145,6 +145,8 @@ service.setup_standard_endpoints()
service.setup_custom_endpoints()
# Include routers
# IMPORTANT: Register audit router FIRST to avoid route matching conflicts
service.add_router(audit.router)
service.add_router(sales_records.router)
service.add_router(sales_operations.router)
service.add_router(analytics.router)

View File

@@ -0,0 +1,237 @@
# services/suppliers/app/api/audit.py
"""
Audit Logs API - Retrieve audit trail for suppliers service
"""
from fastapi import APIRouter, Depends, HTTPException, Query, Path, status
from typing import Optional, Dict, Any
from uuid import UUID
from datetime import datetime
import structlog
from sqlalchemy import select, func, and_
from sqlalchemy.ext.asyncio import AsyncSession
from app.models import AuditLog
from shared.auth.decorators import get_current_user_dep
from shared.auth.access_control import require_user_role
from shared.routing import RouteBuilder
from shared.models.audit_log_schemas import (
AuditLogResponse,
AuditLogListResponse,
AuditLogStatsResponse
)
from app.core.database import database_manager
route_builder = RouteBuilder('suppliers')
router = APIRouter(tags=["audit-logs"])
logger = structlog.get_logger()
async def get_db():
"""Database session dependency"""
async with database_manager.get_session() as session:
yield session
@router.get(
route_builder.build_base_route("audit-logs"),
response_model=AuditLogListResponse
)
@require_user_role(['admin', 'owner'])
async def get_audit_logs(
tenant_id: UUID = Path(..., description="Tenant ID"),
start_date: Optional[datetime] = Query(None, description="Filter logs from this date"),
end_date: Optional[datetime] = Query(None, description="Filter logs until this date"),
user_id: Optional[UUID] = Query(None, description="Filter by user ID"),
action: Optional[str] = Query(None, description="Filter by action type"),
resource_type: Optional[str] = Query(None, description="Filter by resource type"),
severity: Optional[str] = Query(None, description="Filter by severity level"),
search: Optional[str] = Query(None, description="Search in description field"),
limit: int = Query(100, ge=1, le=1000, description="Number of records to return"),
offset: int = Query(0, ge=0, description="Number of records to skip"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Get audit logs for suppliers service.
Requires admin or owner role.
"""
try:
logger.info(
"Retrieving audit logs",
tenant_id=tenant_id,
user_id=current_user.get("user_id"),
filters={
"start_date": start_date,
"end_date": end_date,
"action": action,
"resource_type": resource_type,
"severity": severity
}
)
# Build query filters
filters = [AuditLog.tenant_id == tenant_id]
if start_date:
filters.append(AuditLog.created_at >= start_date)
if end_date:
filters.append(AuditLog.created_at <= end_date)
if user_id:
filters.append(AuditLog.user_id == user_id)
if action:
filters.append(AuditLog.action == action)
if resource_type:
filters.append(AuditLog.resource_type == resource_type)
if severity:
filters.append(AuditLog.severity == severity)
if search:
filters.append(AuditLog.description.ilike(f"%{search}%"))
# Count total matching records
count_query = select(func.count()).select_from(AuditLog).where(and_(*filters))
total_result = await db.execute(count_query)
total = total_result.scalar() or 0
# Fetch paginated results
query = (
select(AuditLog)
.where(and_(*filters))
.order_by(AuditLog.created_at.desc())
.limit(limit)
.offset(offset)
)
result = await db.execute(query)
audit_logs = result.scalars().all()
# Convert to response models
items = [AuditLogResponse.from_orm(log) for log in audit_logs]
logger.info(
"Successfully retrieved audit logs",
tenant_id=tenant_id,
total=total,
returned=len(items)
)
return AuditLogListResponse(
items=items,
total=total,
limit=limit,
offset=offset,
has_more=(offset + len(items)) < total
)
except Exception as e:
logger.error(
"Failed to retrieve audit logs",
error=str(e),
tenant_id=tenant_id
)
raise HTTPException(
status_code=500,
detail=f"Failed to retrieve audit logs: {str(e)}"
)
@router.get(
route_builder.build_base_route("audit-logs/stats"),
response_model=AuditLogStatsResponse
)
@require_user_role(['admin', 'owner'])
async def get_audit_log_stats(
tenant_id: UUID = Path(..., description="Tenant ID"),
start_date: Optional[datetime] = Query(None, description="Filter logs from this date"),
end_date: Optional[datetime] = Query(None, description="Filter logs until this date"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Get audit log statistics for suppliers service.
Requires admin or owner role.
"""
try:
logger.info(
"Retrieving audit log statistics",
tenant_id=tenant_id,
user_id=current_user.get("user_id")
)
# Build base filters
filters = [AuditLog.tenant_id == tenant_id]
if start_date:
filters.append(AuditLog.created_at >= start_date)
if end_date:
filters.append(AuditLog.created_at <= end_date)
# Total events
count_query = select(func.count()).select_from(AuditLog).where(and_(*filters))
total_result = await db.execute(count_query)
total_events = total_result.scalar() or 0
# Events by action
action_query = (
select(AuditLog.action, func.count().label('count'))
.where(and_(*filters))
.group_by(AuditLog.action)
)
action_result = await db.execute(action_query)
events_by_action = {row.action: row.count for row in action_result}
# Events by severity
severity_query = (
select(AuditLog.severity, func.count().label('count'))
.where(and_(*filters))
.group_by(AuditLog.severity)
)
severity_result = await db.execute(severity_query)
events_by_severity = {row.severity: row.count for row in severity_result}
# Events by resource type
resource_query = (
select(AuditLog.resource_type, func.count().label('count'))
.where(and_(*filters))
.group_by(AuditLog.resource_type)
)
resource_result = await db.execute(resource_query)
events_by_resource_type = {row.resource_type: row.count for row in resource_result}
# Date range
date_range_query = (
select(
func.min(AuditLog.created_at).label('min_date'),
func.max(AuditLog.created_at).label('max_date')
)
.where(and_(*filters))
)
date_result = await db.execute(date_range_query)
date_row = date_result.one()
logger.info(
"Successfully retrieved audit log statistics",
tenant_id=tenant_id,
total_events=total_events
)
return AuditLogStatsResponse(
total_events=total_events,
events_by_action=events_by_action,
events_by_severity=events_by_severity,
events_by_resource_type=events_by_resource_type,
date_range={
"min": date_row.min_date,
"max": date_row.max_date
}
)
except Exception as e:
logger.error(
"Failed to retrieve audit log statistics",
error=str(e),
tenant_id=tenant_id
)
raise HTTPException(
status_code=500,
detail=f"Failed to retrieve audit log statistics: {str(e)}"
)

View File

@@ -11,7 +11,7 @@ from app.core.database import database_manager
from shared.service_base import StandardFastAPIService
# Import API routers
from app.api import suppliers, supplier_operations, analytics, internal_demo
from app.api import suppliers, supplier_operations, analytics, internal_demo, audit
# REMOVED: purchase_orders, deliveries - PO and delivery management moved to Procurement Service
# from app.api import purchase_orders, deliveries
@@ -105,6 +105,7 @@ service.setup_standard_endpoints()
# IMPORTANT: Order matters! More specific routes must come first
# to avoid path parameter matching issues
# REMOVED: purchase_orders.router, deliveries.router - PO and delivery management moved to Procurement Service
service.add_router(audit.router) # /suppliers/audit-logs - must be FIRST
service.add_router(supplier_operations.router) # /suppliers/operations/...
service.add_router(analytics.router) # /suppliers/analytics/...
service.add_router(suppliers.router) # /suppliers/{supplier_id} - catch-all, must be last

View File

@@ -0,0 +1,237 @@
# services/training/app/api/audit.py
"""
Audit Logs API - Retrieve audit trail for training service
"""
from fastapi import APIRouter, Depends, HTTPException, Query, Path, status
from typing import Optional, Dict, Any
from uuid import UUID
from datetime import datetime
import structlog
from sqlalchemy import select, func, and_
from sqlalchemy.ext.asyncio import AsyncSession
from app.models import AuditLog
from shared.auth.decorators import get_current_user_dep
from shared.auth.access_control import require_user_role
from shared.routing import RouteBuilder
from shared.models.audit_log_schemas import (
AuditLogResponse,
AuditLogListResponse,
AuditLogStatsResponse
)
from app.core.database import database_manager
route_builder = RouteBuilder('training')
router = APIRouter(tags=["audit-logs"])
logger = structlog.get_logger()
async def get_db():
"""Database session dependency"""
async with database_manager.get_session() as session:
yield session
@router.get(
route_builder.build_base_route("audit-logs"),
response_model=AuditLogListResponse
)
@require_user_role(['admin', 'owner'])
async def get_audit_logs(
tenant_id: UUID = Path(..., description="Tenant ID"),
start_date: Optional[datetime] = Query(None, description="Filter logs from this date"),
end_date: Optional[datetime] = Query(None, description="Filter logs until this date"),
user_id: Optional[UUID] = Query(None, description="Filter by user ID"),
action: Optional[str] = Query(None, description="Filter by action type"),
resource_type: Optional[str] = Query(None, description="Filter by resource type"),
severity: Optional[str] = Query(None, description="Filter by severity level"),
search: Optional[str] = Query(None, description="Search in description field"),
limit: int = Query(100, ge=1, le=1000, description="Number of records to return"),
offset: int = Query(0, ge=0, description="Number of records to skip"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Get audit logs for training service.
Requires admin or owner role.
"""
try:
logger.info(
"Retrieving audit logs",
tenant_id=tenant_id,
user_id=current_user.get("user_id"),
filters={
"start_date": start_date,
"end_date": end_date,
"action": action,
"resource_type": resource_type,
"severity": severity
}
)
# Build query filters
filters = [AuditLog.tenant_id == tenant_id]
if start_date:
filters.append(AuditLog.created_at >= start_date)
if end_date:
filters.append(AuditLog.created_at <= end_date)
if user_id:
filters.append(AuditLog.user_id == user_id)
if action:
filters.append(AuditLog.action == action)
if resource_type:
filters.append(AuditLog.resource_type == resource_type)
if severity:
filters.append(AuditLog.severity == severity)
if search:
filters.append(AuditLog.description.ilike(f"%{search}%"))
# Count total matching records
count_query = select(func.count()).select_from(AuditLog).where(and_(*filters))
total_result = await db.execute(count_query)
total = total_result.scalar() or 0
# Fetch paginated results
query = (
select(AuditLog)
.where(and_(*filters))
.order_by(AuditLog.created_at.desc())
.limit(limit)
.offset(offset)
)
result = await db.execute(query)
audit_logs = result.scalars().all()
# Convert to response models
items = [AuditLogResponse.from_orm(log) for log in audit_logs]
logger.info(
"Successfully retrieved audit logs",
tenant_id=tenant_id,
total=total,
returned=len(items)
)
return AuditLogListResponse(
items=items,
total=total,
limit=limit,
offset=offset,
has_more=(offset + len(items)) < total
)
except Exception as e:
logger.error(
"Failed to retrieve audit logs",
error=str(e),
tenant_id=tenant_id
)
raise HTTPException(
status_code=500,
detail=f"Failed to retrieve audit logs: {str(e)}"
)
@router.get(
route_builder.build_base_route("audit-logs/stats"),
response_model=AuditLogStatsResponse
)
@require_user_role(['admin', 'owner'])
async def get_audit_log_stats(
tenant_id: UUID = Path(..., description="Tenant ID"),
start_date: Optional[datetime] = Query(None, description="Filter logs from this date"),
end_date: Optional[datetime] = Query(None, description="Filter logs until this date"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Get audit log statistics for training service.
Requires admin or owner role.
"""
try:
logger.info(
"Retrieving audit log statistics",
tenant_id=tenant_id,
user_id=current_user.get("user_id")
)
# Build base filters
filters = [AuditLog.tenant_id == tenant_id]
if start_date:
filters.append(AuditLog.created_at >= start_date)
if end_date:
filters.append(AuditLog.created_at <= end_date)
# Total events
count_query = select(func.count()).select_from(AuditLog).where(and_(*filters))
total_result = await db.execute(count_query)
total_events = total_result.scalar() or 0
# Events by action
action_query = (
select(AuditLog.action, func.count().label('count'))
.where(and_(*filters))
.group_by(AuditLog.action)
)
action_result = await db.execute(action_query)
events_by_action = {row.action: row.count for row in action_result}
# Events by severity
severity_query = (
select(AuditLog.severity, func.count().label('count'))
.where(and_(*filters))
.group_by(AuditLog.severity)
)
severity_result = await db.execute(severity_query)
events_by_severity = {row.severity: row.count for row in severity_result}
# Events by resource type
resource_query = (
select(AuditLog.resource_type, func.count().label('count'))
.where(and_(*filters))
.group_by(AuditLog.resource_type)
)
resource_result = await db.execute(resource_query)
events_by_resource_type = {row.resource_type: row.count for row in resource_result}
# Date range
date_range_query = (
select(
func.min(AuditLog.created_at).label('min_date'),
func.max(AuditLog.created_at).label('max_date')
)
.where(and_(*filters))
)
date_result = await db.execute(date_range_query)
date_row = date_result.one()
logger.info(
"Successfully retrieved audit log statistics",
tenant_id=tenant_id,
total_events=total_events
)
return AuditLogStatsResponse(
total_events=total_events,
events_by_action=events_by_action,
events_by_severity=events_by_severity,
events_by_resource_type=events_by_resource_type,
date_range={
"min": date_row.min_date,
"max": date_row.max_date
}
)
except Exception as e:
logger.error(
"Failed to retrieve audit log statistics",
error=str(e),
tenant_id=tenant_id
)
raise HTTPException(
status_code=500,
detail=f"Failed to retrieve audit log statistics: {str(e)}"
)

View File

@@ -11,7 +11,7 @@ from fastapi import FastAPI, Request
from sqlalchemy import text
from app.core.config import settings
from app.core.database import initialize_training_database, cleanup_training_database, database_manager
from app.api import training_jobs, training_operations, models, health, monitoring, websocket_operations
from app.api import training_jobs, training_operations, models, health, monitoring, websocket_operations, audit
from app.services.training_events import setup_messaging, cleanup_messaging
from app.websocket.events import setup_websocket_event_consumer, cleanup_websocket_consumers
from shared.service_base import StandardFastAPIService
@@ -163,6 +163,8 @@ service.setup_custom_middleware()
service.setup_custom_endpoints()
# Include API routers
# IMPORTANT: Register audit router FIRST to avoid route matching conflicts
service.add_router(audit.router)
service.add_router(training_jobs.router, tags=["training-jobs"])
service.add_router(training_operations.router, tags=["training-operations"])
service.add_router(models.router, tags=["models"])

View File

@@ -0,0 +1,307 @@
"""
Calendar-based Feature Engineering
Hyperlocal school calendar and event features for demand forecasting
"""
import pandas as pd
import structlog
from typing import Dict, List, Any, Optional
from datetime import datetime, date, time, timedelta
from shared.clients.external_client import ExternalServiceClient
logger = structlog.get_logger()
class CalendarFeatureEngine:
"""
Generates features based on school calendars and local events
for hyperlocal demand forecasting enhancement
"""
def __init__(self, external_client: ExternalServiceClient):
self.external_client = external_client
self.calendar_cache = {} # Cache calendar data to avoid repeated API calls
async def get_calendar_for_tenant(
self,
tenant_id: str,
city_id: Optional[str] = "madrid"
) -> Optional[Dict[str, Any]]:
"""
Get the assigned school calendar for a tenant
If tenant has no assignment, returns None
"""
try:
# Check cache first
cache_key = f"tenant_{tenant_id}_calendar"
if cache_key in self.calendar_cache:
logger.debug("Using cached calendar", tenant_id=tenant_id)
return self.calendar_cache[cache_key]
# Get tenant location context
context = await self.external_client.get_tenant_location_context(tenant_id)
if not context or not context.get("calendar"):
logger.info(
"No calendar assigned to tenant, using default if available",
tenant_id=tenant_id
)
return None
calendar = context["calendar"]
self.calendar_cache[cache_key] = calendar
logger.info(
"Retrieved calendar for tenant",
tenant_id=tenant_id,
calendar_name=calendar.get("calendar_name")
)
return calendar
except Exception as e:
logger.error(
"Error retrieving calendar for tenant",
tenant_id=tenant_id,
error=str(e)
)
return None
def _is_date_in_holiday_period(
self,
check_date: date,
holiday_periods: List[Dict[str, Any]]
) -> tuple[bool, Optional[str]]:
"""
Check if a date falls within any holiday period
Returns:
(is_holiday, holiday_name)
"""
for period in holiday_periods:
start = datetime.strptime(period["start_date"], "%Y-%m-%d").date()
end = datetime.strptime(period["end_date"], "%Y-%m-%d").date()
if start <= check_date <= end:
return True, period["name"]
return False, None
def _is_school_hours_active(
self,
check_datetime: datetime,
school_hours: Dict[str, Any]
) -> bool:
"""
Check if datetime falls during school operating hours
Args:
check_datetime: DateTime to check
school_hours: School hours configuration dict
Returns:
True if during school hours, False otherwise
"""
# Only check weekdays
if check_datetime.weekday() >= 5: # Saturday=5, Sunday=6
return False
check_time = check_datetime.time()
# Morning session
morning_start = datetime.strptime(
school_hours["morning_start"], "%H:%M"
).time()
morning_end = datetime.strptime(
school_hours["morning_end"], "%H:%M"
).time()
if morning_start <= check_time <= morning_end:
return True
# Afternoon session (if applicable)
if school_hours.get("has_afternoon_session", False):
afternoon_start = datetime.strptime(
school_hours["afternoon_start"], "%H:%M"
).time()
afternoon_end = datetime.strptime(
school_hours["afternoon_end"], "%H:%M"
).time()
if afternoon_start <= check_time <= afternoon_end:
return True
return False
def _calculate_school_proximity_intensity(
self,
check_datetime: datetime,
school_hours: Dict[str, Any]
) -> float:
"""
Calculate intensity of school-related foot traffic
Peaks during drop-off and pick-up times
Returns:
Float between 0.0 (no impact) and 1.0 (peak impact)
"""
# Only weekdays
if check_datetime.weekday() >= 5:
return 0.0
check_time = check_datetime.time()
# Define peak windows (30 minutes before and after school start/end)
morning_start = datetime.strptime(
school_hours["morning_start"], "%H:%M"
).time()
morning_end = datetime.strptime(
school_hours["morning_end"], "%H:%M"
).time()
# Morning drop-off peak (30 min before to 15 min after start)
drop_off_start = (
datetime.combine(date.today(), morning_start) - timedelta(minutes=30)
).time()
drop_off_end = (
datetime.combine(date.today(), morning_start) + timedelta(minutes=15)
).time()
if drop_off_start <= check_time <= drop_off_end:
return 1.0 # Peak morning traffic
# Morning pick-up peak (15 min before to 30 min after end)
pickup_start = (
datetime.combine(date.today(), morning_end) - timedelta(minutes=15)
).time()
pickup_end = (
datetime.combine(date.today(), morning_end) + timedelta(minutes=30)
).time()
if pickup_start <= check_time <= pickup_end:
return 1.0 # Peak afternoon traffic
# During school hours (moderate impact)
if morning_start <= check_time <= morning_end:
return 0.3
# Afternoon session if applicable
if school_hours.get("has_afternoon_session", False):
afternoon_start = datetime.strptime(
school_hours["afternoon_start"], "%H:%M"
).time()
afternoon_end = datetime.strptime(
school_hours["afternoon_end"], "%H:%M"
).time()
if afternoon_start <= check_time <= afternoon_end:
return 0.3
return 0.0
async def add_calendar_features(
self,
df: pd.DataFrame,
tenant_id: str,
date_column: str = "date"
) -> pd.DataFrame:
"""
Add calendar-based features to dataframe
Features added:
- is_school_holiday: Binary (1/0)
- school_holiday_name: String (name of holiday or None)
- school_hours_active: Binary (1/0) - if during school operating hours
- school_proximity_intensity: Float (0.0-1.0) - peak during drop-off/pick-up
Args:
df: DataFrame with date/datetime column
tenant_id: Tenant ID to get calendar assignment
date_column: Name of date column
Returns:
DataFrame with added calendar features
"""
try:
logger.info(
"Adding calendar-based features",
tenant_id=tenant_id,
rows=len(df)
)
# Get calendar for tenant
calendar = await self.get_calendar_for_tenant(tenant_id)
if not calendar:
logger.warning(
"No calendar available, using fallback features",
tenant_id=tenant_id
)
# Add default features (all zeros)
df["is_school_holiday"] = 0
df["school_holiday_name"] = None
df["school_hours_active"] = 0
df["school_proximity_intensity"] = 0.0
return df
holiday_periods = calendar.get("holiday_periods", [])
school_hours = calendar.get("school_hours", {})
# Initialize feature columns
school_holidays = []
holiday_names = []
hours_active = []
proximity_intensity = []
# Process each row
for idx, row in df.iterrows():
row_date = pd.to_datetime(row[date_column])
# Check if holiday
is_holiday, holiday_name = self._is_date_in_holiday_period(
row_date.date(),
holiday_periods
)
school_holidays.append(1 if is_holiday else 0)
holiday_names.append(holiday_name)
# Check if during school hours (requires time component)
if hasattr(row_date, 'hour'): # Has time component
hours_active.append(
1 if self._is_school_hours_active(row_date, school_hours) else 0
)
proximity_intensity.append(
self._calculate_school_proximity_intensity(row_date, school_hours)
)
else:
# Date only, no time component
hours_active.append(0)
proximity_intensity.append(0.0)
# Add features to dataframe
df["is_school_holiday"] = school_holidays
df["school_holiday_name"] = holiday_names
df["school_hours_active"] = hours_active
df["school_proximity_intensity"] = proximity_intensity
logger.info(
"Calendar features added successfully",
tenant_id=tenant_id,
holiday_periods_count=len(holiday_periods),
holidays_found=sum(school_holidays)
)
return df
except Exception as e:
logger.error(
"Error adding calendar features",
tenant_id=tenant_id,
error=str(e)
)
# Return df with default features on error
df["is_school_holiday"] = 0
df["school_holiday_name"] = None
df["school_hours_active"] = 0
df["school_proximity_intensity"] = 0.0
return df