Improve the frontend 5

This commit is contained in:
Urtzi Alfaro
2025-11-02 20:24:44 +01:00
parent 0220da1725
commit 5adb0e39c0
90 changed files with 10658 additions and 2548 deletions

View File

@@ -0,0 +1,237 @@
# services/forecasting/app/api/audit.py
"""
Audit Logs API - Retrieve audit trail for forecasting service
"""
from fastapi import APIRouter, Depends, HTTPException, Query, Path, status
from typing import Optional, Dict, Any
from uuid import UUID
from datetime import datetime
import structlog
from sqlalchemy import select, func, and_
from sqlalchemy.ext.asyncio import AsyncSession
from app.models import AuditLog
from shared.auth.decorators import get_current_user_dep
from shared.auth.access_control import require_user_role
from shared.routing import RouteBuilder
from shared.models.audit_log_schemas import (
AuditLogResponse,
AuditLogListResponse,
AuditLogStatsResponse
)
from app.core.database import database_manager
route_builder = RouteBuilder('forecasting')
router = APIRouter(tags=["audit-logs"])
logger = structlog.get_logger()
async def get_db():
"""Database session dependency"""
async with database_manager.get_session() as session:
yield session
@router.get(
route_builder.build_base_route("audit-logs"),
response_model=AuditLogListResponse
)
@require_user_role(['admin', 'owner'])
async def get_audit_logs(
tenant_id: UUID = Path(..., description="Tenant ID"),
start_date: Optional[datetime] = Query(None, description="Filter logs from this date"),
end_date: Optional[datetime] = Query(None, description="Filter logs until this date"),
user_id: Optional[UUID] = Query(None, description="Filter by user ID"),
action: Optional[str] = Query(None, description="Filter by action type"),
resource_type: Optional[str] = Query(None, description="Filter by resource type"),
severity: Optional[str] = Query(None, description="Filter by severity level"),
search: Optional[str] = Query(None, description="Search in description field"),
limit: int = Query(100, ge=1, le=1000, description="Number of records to return"),
offset: int = Query(0, ge=0, description="Number of records to skip"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Get audit logs for forecasting service.
Requires admin or owner role.
"""
try:
logger.info(
"Retrieving audit logs",
tenant_id=tenant_id,
user_id=current_user.get("user_id"),
filters={
"start_date": start_date,
"end_date": end_date,
"action": action,
"resource_type": resource_type,
"severity": severity
}
)
# Build query filters
filters = [AuditLog.tenant_id == tenant_id]
if start_date:
filters.append(AuditLog.created_at >= start_date)
if end_date:
filters.append(AuditLog.created_at <= end_date)
if user_id:
filters.append(AuditLog.user_id == user_id)
if action:
filters.append(AuditLog.action == action)
if resource_type:
filters.append(AuditLog.resource_type == resource_type)
if severity:
filters.append(AuditLog.severity == severity)
if search:
filters.append(AuditLog.description.ilike(f"%{search}%"))
# Count total matching records
count_query = select(func.count()).select_from(AuditLog).where(and_(*filters))
total_result = await db.execute(count_query)
total = total_result.scalar() or 0
# Fetch paginated results
query = (
select(AuditLog)
.where(and_(*filters))
.order_by(AuditLog.created_at.desc())
.limit(limit)
.offset(offset)
)
result = await db.execute(query)
audit_logs = result.scalars().all()
# Convert to response models
items = [AuditLogResponse.from_orm(log) for log in audit_logs]
logger.info(
"Successfully retrieved audit logs",
tenant_id=tenant_id,
total=total,
returned=len(items)
)
return AuditLogListResponse(
items=items,
total=total,
limit=limit,
offset=offset,
has_more=(offset + len(items)) < total
)
except Exception as e:
logger.error(
"Failed to retrieve audit logs",
error=str(e),
tenant_id=tenant_id
)
raise HTTPException(
status_code=500,
detail=f"Failed to retrieve audit logs: {str(e)}"
)
@router.get(
route_builder.build_base_route("audit-logs/stats"),
response_model=AuditLogStatsResponse
)
@require_user_role(['admin', 'owner'])
async def get_audit_log_stats(
tenant_id: UUID = Path(..., description="Tenant ID"),
start_date: Optional[datetime] = Query(None, description="Filter logs from this date"),
end_date: Optional[datetime] = Query(None, description="Filter logs until this date"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Get audit log statistics for forecasting service.
Requires admin or owner role.
"""
try:
logger.info(
"Retrieving audit log statistics",
tenant_id=tenant_id,
user_id=current_user.get("user_id")
)
# Build base filters
filters = [AuditLog.tenant_id == tenant_id]
if start_date:
filters.append(AuditLog.created_at >= start_date)
if end_date:
filters.append(AuditLog.created_at <= end_date)
# Total events
count_query = select(func.count()).select_from(AuditLog).where(and_(*filters))
total_result = await db.execute(count_query)
total_events = total_result.scalar() or 0
# Events by action
action_query = (
select(AuditLog.action, func.count().label('count'))
.where(and_(*filters))
.group_by(AuditLog.action)
)
action_result = await db.execute(action_query)
events_by_action = {row.action: row.count for row in action_result}
# Events by severity
severity_query = (
select(AuditLog.severity, func.count().label('count'))
.where(and_(*filters))
.group_by(AuditLog.severity)
)
severity_result = await db.execute(severity_query)
events_by_severity = {row.severity: row.count for row in severity_result}
# Events by resource type
resource_query = (
select(AuditLog.resource_type, func.count().label('count'))
.where(and_(*filters))
.group_by(AuditLog.resource_type)
)
resource_result = await db.execute(resource_query)
events_by_resource_type = {row.resource_type: row.count for row in resource_result}
# Date range
date_range_query = (
select(
func.min(AuditLog.created_at).label('min_date'),
func.max(AuditLog.created_at).label('max_date')
)
.where(and_(*filters))
)
date_result = await db.execute(date_range_query)
date_row = date_result.one()
logger.info(
"Successfully retrieved audit log statistics",
tenant_id=tenant_id,
total_events=total_events
)
return AuditLogStatsResponse(
total_events=total_events,
events_by_action=events_by_action,
events_by_severity=events_by_severity,
events_by_resource_type=events_by_resource_type,
date_range={
"min": date_row.min_date,
"max": date_row.max_date
}
)
except Exception as e:
logger.error(
"Failed to retrieve audit log statistics",
error=str(e),
tenant_id=tenant_id
)
raise HTTPException(
status_code=500,
detail=f"Failed to retrieve audit log statistics: {str(e)}"
)

View File

@@ -15,7 +15,7 @@ from app.services.forecasting_alert_service import ForecastingAlertService
from shared.service_base import StandardFastAPIService
# Import API routers
from app.api import forecasts, forecasting_operations, analytics, scenario_operations, internal_demo
from app.api import forecasts, forecasting_operations, analytics, scenario_operations, internal_demo, audit
class ForecastingService(StandardFastAPIService):
@@ -163,6 +163,8 @@ service.setup_standard_endpoints()
service.setup_custom_endpoints()
# Include API routers
# IMPORTANT: Register audit router FIRST to avoid route matching conflicts
service.add_router(audit.router)
service.add_router(forecasts.router)
service.add_router(forecasting_operations.router)
service.add_router(analytics.router)

View File

@@ -0,0 +1,235 @@
"""
Calendar-based Feature Engineering for Forecasting Service
Generates calendar features for future date predictions
"""
import pandas as pd
import structlog
from typing import Dict, List, Any, Optional
from datetime import datetime, date, time, timedelta
from app.services.data_client import data_client
logger = structlog.get_logger()
class ForecastCalendarFeatures:
"""
Generates calendar-based features for future predictions
Optimized for forecasting service (future dates only)
"""
def __init__(self):
self.calendar_cache = {} # Cache calendar data per tenant
async def get_calendar_for_tenant(
self,
tenant_id: str
) -> Optional[Dict[str, Any]]:
"""Get cached calendar for tenant"""
if tenant_id in self.calendar_cache:
return self.calendar_cache[tenant_id]
calendar = await data_client.fetch_tenant_calendar(tenant_id)
if calendar:
self.calendar_cache[tenant_id] = calendar
return calendar
def _is_date_in_holiday_period(
self,
check_date: date,
holiday_periods: List[Dict[str, Any]]
) -> tuple[bool, Optional[str]]:
"""Check if date is within any holiday period"""
for period in holiday_periods:
start = datetime.strptime(period["start_date"], "%Y-%m-%d").date()
end = datetime.strptime(period["end_date"], "%Y-%m-%d").date()
if start <= check_date <= end:
return True, period["name"]
return False, None
def _is_school_hours_active(
self,
check_datetime: datetime,
school_hours: Dict[str, Any]
) -> bool:
"""Check if datetime falls during school operating hours"""
# Only weekdays
if check_datetime.weekday() >= 5:
return False
check_time = check_datetime.time()
# Morning session
morning_start = datetime.strptime(
school_hours["morning_start"], "%H:%M"
).time()
morning_end = datetime.strptime(
school_hours["morning_end"], "%H:%M"
).time()
if morning_start <= check_time <= morning_end:
return True
# Afternoon session if exists
if school_hours.get("has_afternoon_session", False):
afternoon_start = datetime.strptime(
school_hours["afternoon_start"], "%H:%M"
).time()
afternoon_end = datetime.strptime(
school_hours["afternoon_end"], "%H:%M"
).time()
if afternoon_start <= check_time <= afternoon_end:
return True
return False
def _calculate_school_proximity_intensity(
self,
check_datetime: datetime,
school_hours: Dict[str, Any]
) -> float:
"""
Calculate school proximity impact intensity
Returns 0.0-1.0 based on drop-off/pick-up times
"""
# Only weekdays
if check_datetime.weekday() >= 5:
return 0.0
check_time = check_datetime.time()
morning_start = datetime.strptime(
school_hours["morning_start"], "%H:%M"
).time()
morning_end = datetime.strptime(
school_hours["morning_end"], "%H:%M"
).time()
# Morning drop-off peak (30 min before to 15 min after start)
drop_off_start = (
datetime.combine(date.today(), morning_start) - timedelta(minutes=30)
).time()
drop_off_end = (
datetime.combine(date.today(), morning_start) + timedelta(minutes=15)
).time()
if drop_off_start <= check_time <= drop_off_end:
return 1.0 # Peak
# Morning pick-up peak (15 min before to 30 min after end)
pickup_start = (
datetime.combine(date.today(), morning_end) - timedelta(minutes=15)
).time()
pickup_end = (
datetime.combine(date.today(), morning_end) + timedelta(minutes=30)
).time()
if pickup_start <= check_time <= pickup_end:
return 1.0 # Peak
# During school hours (moderate)
if morning_start <= check_time <= morning_end:
return 0.3
return 0.0
async def add_calendar_features(
self,
df: pd.DataFrame,
tenant_id: str,
date_column: str = "ds"
) -> pd.DataFrame:
"""
Add calendar features to forecast dataframe
Args:
df: Forecast dataframe with future dates
tenant_id: Tenant ID to fetch calendar
date_column: Name of date column (default 'ds' for Prophet)
Returns:
DataFrame with calendar features added
"""
try:
logger.info(
"Adding calendar features to forecast",
tenant_id=tenant_id,
rows=len(df)
)
# Get calendar
calendar = await self.get_calendar_for_tenant(tenant_id)
if not calendar:
logger.info(
"No calendar available, using zero features",
tenant_id=tenant_id
)
df["is_school_holiday"] = 0
df["school_hours_active"] = 0
df["school_proximity_intensity"] = 0.0
return df
holiday_periods = calendar.get("holiday_periods", [])
school_hours = calendar.get("school_hours", {})
# Initialize feature lists
school_holidays = []
hours_active = []
proximity_intensity = []
# Process each row
for idx, row in df.iterrows():
row_date = pd.to_datetime(row[date_column])
# Check holiday
is_holiday, _ = self._is_date_in_holiday_period(
row_date.date(),
holiday_periods
)
school_holidays.append(1 if is_holiday else 0)
# Check school hours and proximity (if datetime has time component)
if hasattr(row_date, 'hour'):
hours_active.append(
1 if self._is_school_hours_active(row_date, school_hours) else 0
)
proximity_intensity.append(
self._calculate_school_proximity_intensity(row_date, school_hours)
)
else:
hours_active.append(0)
proximity_intensity.append(0.0)
# Add features
df["is_school_holiday"] = school_holidays
df["school_hours_active"] = hours_active
df["school_proximity_intensity"] = proximity_intensity
logger.info(
"Calendar features added to forecast",
tenant_id=tenant_id,
holidays_in_forecast=sum(school_holidays)
)
return df
except Exception as e:
logger.error(
"Error adding calendar features to forecast",
tenant_id=tenant_id,
error=str(e)
)
# Return with zero features on error
df["is_school_holiday"] = 0
df["school_hours_active"] = 0
df["school_proximity_intensity"] = 0.0
return df
# Global instance
forecast_calendar_features = ForecastCalendarFeatures()

View File

@@ -61,5 +61,72 @@ class DataClient:
logger.error(f"Error fetching weather data: {e}", tenant_id=tenant_id)
return []
async def fetch_tenant_calendar(
self,
tenant_id: str
) -> Optional[Dict[str, Any]]:
"""
Fetch tenant's assigned school calendar
Returns None if no calendar assigned
"""
try:
location_context = await self.external_client.get_tenant_location_context(
tenant_id=tenant_id
)
if location_context and location_context.get("calendar"):
logger.info(
"Fetched calendar for tenant",
tenant_id=tenant_id,
calendar_name=location_context["calendar"].get("calendar_name")
)
return location_context["calendar"]
else:
logger.info("No calendar assigned to tenant", tenant_id=tenant_id)
return None
except Exception as e:
logger.error(f"Error fetching calendar: {e}", tenant_id=tenant_id)
return None
async def check_school_holiday(
self,
calendar_id: str,
check_date: str,
tenant_id: str
) -> bool:
"""
Check if a date is a school holiday
Args:
calendar_id: School calendar UUID
check_date: Date in ISO format (YYYY-MM-DD)
tenant_id: Tenant ID for auth
Returns:
True if school holiday, False otherwise
"""
try:
result = await self.external_client.check_is_school_holiday(
calendar_id=calendar_id,
check_date=check_date,
tenant_id=tenant_id
)
if result:
is_holiday = result.get("is_holiday", False)
if is_holiday:
logger.debug(
"School holiday detected",
date=check_date,
holiday_name=result.get("holiday_name")
)
return is_holiday
return False
except Exception as e:
logger.error(f"Error checking school holiday: {e}", date=check_date)
return False
# Global instance - same as before, but much simpler implementation
data_client = DataClient()