Improve AI logic
This commit is contained in:
@@ -4,8 +4,8 @@ Handles basic CRUD operations for tenants
|
||||
"""
|
||||
|
||||
import structlog
|
||||
from fastapi import APIRouter, Depends, HTTPException, status, Path
|
||||
from typing import Dict, Any
|
||||
from fastapi import APIRouter, Depends, HTTPException, status, Path, Query
|
||||
from typing import Dict, Any, List
|
||||
from uuid import UUID
|
||||
|
||||
from app.schemas.tenants import TenantResponse, TenantUpdate
|
||||
@@ -30,6 +30,47 @@ def get_enhanced_tenant_service():
|
||||
logger.error("Failed to create enhanced tenant service", error=str(e))
|
||||
raise HTTPException(status_code=500, detail="Service initialization failed")
|
||||
|
||||
@router.get(route_builder.build_base_route("", include_tenant_prefix=False), response_model=List[TenantResponse])
|
||||
@track_endpoint_metrics("tenants_list")
|
||||
async def get_active_tenants(
|
||||
skip: int = Query(0, ge=0, description="Number of records to skip"),
|
||||
limit: int = Query(100, ge=1, le=1000, description="Maximum number of records to return"),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
tenant_service: EnhancedTenantService = Depends(get_enhanced_tenant_service)
|
||||
):
|
||||
"""Get all active tenants - Available to service accounts and admins"""
|
||||
|
||||
logger.info(
|
||||
"Get active tenants request received",
|
||||
skip=skip,
|
||||
limit=limit,
|
||||
user_id=current_user.get("user_id"),
|
||||
user_type=current_user.get("type", "user"),
|
||||
is_service=current_user.get("type") == "service",
|
||||
role=current_user.get("role"),
|
||||
service_name=current_user.get("service", "none")
|
||||
)
|
||||
|
||||
# Allow service accounts to call this endpoint
|
||||
if current_user.get("type") != "service":
|
||||
# For non-service users, could add additional role checks here if needed
|
||||
logger.debug(
|
||||
"Non-service user requesting active tenants",
|
||||
user_id=current_user.get("user_id"),
|
||||
role=current_user.get("role")
|
||||
)
|
||||
|
||||
tenants = await tenant_service.get_active_tenants(skip=skip, limit=limit)
|
||||
|
||||
logger.debug(
|
||||
"Get active tenants successful",
|
||||
count=len(tenants),
|
||||
skip=skip,
|
||||
limit=limit
|
||||
)
|
||||
|
||||
return tenants
|
||||
|
||||
@router.get(route_builder.build_base_route("{tenant_id}", include_tenant_prefix=False), response_model=TenantResponse)
|
||||
@track_endpoint_metrics("tenant_get")
|
||||
async def get_tenant(
|
||||
|
||||
@@ -14,6 +14,7 @@ AuditLog = create_audit_log_model(Base)
|
||||
# Import all models to register them with the Base metadata
|
||||
from .tenants import Tenant, TenantMember, Subscription
|
||||
from .coupon import CouponModel, CouponRedemptionModel
|
||||
from .events import Event, EventTemplate
|
||||
|
||||
# List all models for easier access
|
||||
__all__ = [
|
||||
@@ -23,4 +24,6 @@ __all__ = [
|
||||
"AuditLog",
|
||||
"CouponModel",
|
||||
"CouponRedemptionModel",
|
||||
"Event",
|
||||
"EventTemplate",
|
||||
]
|
||||
|
||||
136
services/tenant/app/models/events.py
Normal file
136
services/tenant/app/models/events.py
Normal file
@@ -0,0 +1,136 @@
|
||||
"""
|
||||
Event Calendar Models
|
||||
Database models for tracking local events, promotions, and special occasions
|
||||
"""
|
||||
|
||||
from sqlalchemy import Column, Integer, String, DateTime, Text, Boolean, Float, Date
|
||||
from sqlalchemy.dialects.postgresql import UUID
|
||||
from shared.database.base import Base
|
||||
from datetime import datetime, timezone
|
||||
import uuid
|
||||
|
||||
|
||||
class Event(Base):
|
||||
"""
|
||||
Table to track events that affect bakery demand.
|
||||
|
||||
Events include:
|
||||
- Local events (festivals, markets, concerts)
|
||||
- Promotions and sales
|
||||
- Weather events (heat waves, storms)
|
||||
- School holidays and breaks
|
||||
- Special occasions
|
||||
"""
|
||||
__tablename__ = "events"
|
||||
|
||||
# Primary identification
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
|
||||
# Event information
|
||||
event_name = Column(String(500), nullable=False)
|
||||
event_type = Column(String(100), nullable=False, index=True) # promotion, festival, holiday, weather, school_break, sport_event, etc.
|
||||
description = Column(Text, nullable=True)
|
||||
|
||||
# Date and time
|
||||
event_date = Column(Date, nullable=False, index=True)
|
||||
start_time = Column(DateTime(timezone=True), nullable=True)
|
||||
end_time = Column(DateTime(timezone=True), nullable=True)
|
||||
is_all_day = Column(Boolean, default=True)
|
||||
|
||||
# Impact estimation
|
||||
expected_impact = Column(String(50), nullable=True) # low, medium, high, very_high
|
||||
impact_multiplier = Column(Float, nullable=True) # Expected demand multiplier (e.g., 1.5 = 50% increase)
|
||||
affected_product_categories = Column(String(500), nullable=True) # Comma-separated categories
|
||||
|
||||
# Location
|
||||
location = Column(String(500), nullable=True)
|
||||
is_local = Column(Boolean, default=True) # True if event is near bakery
|
||||
|
||||
# Status
|
||||
is_confirmed = Column(Boolean, default=False)
|
||||
is_recurring = Column(Boolean, default=False)
|
||||
recurrence_pattern = Column(String(200), nullable=True) # e.g., "weekly:monday", "monthly:first_saturday"
|
||||
|
||||
# Actual impact (filled after event)
|
||||
actual_impact_multiplier = Column(Float, nullable=True)
|
||||
actual_sales_increase_percent = Column(Float, nullable=True)
|
||||
|
||||
# Metadata
|
||||
created_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc))
|
||||
updated_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc), onupdate=lambda: datetime.now(timezone.utc))
|
||||
created_by = Column(String(255), nullable=True)
|
||||
notes = Column(Text, nullable=True)
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
"id": str(self.id),
|
||||
"tenant_id": str(self.tenant_id),
|
||||
"event_name": self.event_name,
|
||||
"event_type": self.event_type,
|
||||
"description": self.description,
|
||||
"event_date": self.event_date.isoformat() if self.event_date else None,
|
||||
"start_time": self.start_time.isoformat() if self.start_time else None,
|
||||
"end_time": self.end_time.isoformat() if self.end_time else None,
|
||||
"is_all_day": self.is_all_day,
|
||||
"expected_impact": self.expected_impact,
|
||||
"impact_multiplier": self.impact_multiplier,
|
||||
"affected_product_categories": self.affected_product_categories,
|
||||
"location": self.location,
|
||||
"is_local": self.is_local,
|
||||
"is_confirmed": self.is_confirmed,
|
||||
"is_recurring": self.is_recurring,
|
||||
"recurrence_pattern": self.recurrence_pattern,
|
||||
"actual_impact_multiplier": self.actual_impact_multiplier,
|
||||
"actual_sales_increase_percent": self.actual_sales_increase_percent,
|
||||
"created_at": self.created_at.isoformat() if self.created_at else None,
|
||||
"updated_at": self.updated_at.isoformat() if self.updated_at else None,
|
||||
"created_by": self.created_by,
|
||||
"notes": self.notes
|
||||
}
|
||||
|
||||
|
||||
class EventTemplate(Base):
|
||||
"""
|
||||
Template for recurring events.
|
||||
Allows easy creation of events based on patterns.
|
||||
"""
|
||||
__tablename__ = "event_templates"
|
||||
|
||||
# Primary identification
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
|
||||
# Template information
|
||||
template_name = Column(String(500), nullable=False)
|
||||
event_type = Column(String(100), nullable=False)
|
||||
description = Column(Text, nullable=True)
|
||||
|
||||
# Default values
|
||||
default_impact = Column(String(50), nullable=True)
|
||||
default_impact_multiplier = Column(Float, nullable=True)
|
||||
default_affected_categories = Column(String(500), nullable=True)
|
||||
|
||||
# Recurrence
|
||||
recurrence_pattern = Column(String(200), nullable=False) # e.g., "weekly:saturday", "monthly:last_sunday"
|
||||
is_active = Column(Boolean, default=True)
|
||||
|
||||
# Metadata
|
||||
created_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc))
|
||||
updated_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc), onupdate=lambda: datetime.now(timezone.utc))
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
"id": str(self.id),
|
||||
"tenant_id": str(self.tenant_id),
|
||||
"template_name": self.template_name,
|
||||
"event_type": self.event_type,
|
||||
"description": self.description,
|
||||
"default_impact": self.default_impact,
|
||||
"default_impact_multiplier": self.default_impact_multiplier,
|
||||
"default_affected_categories": self.default_affected_categories,
|
||||
"recurrence_pattern": self.recurrence_pattern,
|
||||
"is_active": self.is_active,
|
||||
"created_at": self.created_at.isoformat() if self.created_at else None,
|
||||
"updated_at": self.updated_at.isoformat() if self.updated_at else None
|
||||
}
|
||||
@@ -154,6 +154,32 @@ class TenantSettings(Base):
|
||||
"enable_supplier_score_optimization": True
|
||||
})
|
||||
|
||||
# ML Insights Settings (AI Insights Service)
|
||||
ml_insights_settings = Column(JSON, nullable=False, default=lambda: {
|
||||
# Inventory ML (Safety Stock Optimization)
|
||||
"inventory_lookback_days": 90,
|
||||
"inventory_min_history_days": 30,
|
||||
|
||||
# Production ML (Yield Prediction)
|
||||
"production_lookback_days": 90,
|
||||
"production_min_history_runs": 30,
|
||||
|
||||
# Procurement ML (Supplier Analysis & Price Forecasting)
|
||||
"supplier_analysis_lookback_days": 180,
|
||||
"supplier_analysis_min_orders": 10,
|
||||
"price_forecast_lookback_days": 180,
|
||||
"price_forecast_horizon_days": 30,
|
||||
|
||||
# Forecasting ML (Dynamic Rules)
|
||||
"rules_generation_lookback_days": 90,
|
||||
"rules_generation_min_samples": 10,
|
||||
|
||||
# Global ML Settings
|
||||
"enable_ml_insights": True,
|
||||
"ml_insights_auto_trigger": False,
|
||||
"ml_confidence_threshold": 0.80
|
||||
})
|
||||
|
||||
# Timestamps
|
||||
created_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc), nullable=False)
|
||||
updated_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc), onupdate=lambda: datetime.now(timezone.utc), nullable=False)
|
||||
@@ -280,5 +306,20 @@ class TenantSettings(Base):
|
||||
"diversification_threshold": 1000,
|
||||
"max_single_percentage": 0.70,
|
||||
"enable_supplier_score_optimization": True
|
||||
},
|
||||
"ml_insights_settings": {
|
||||
"inventory_lookback_days": 90,
|
||||
"inventory_min_history_days": 30,
|
||||
"production_lookback_days": 90,
|
||||
"production_min_history_runs": 30,
|
||||
"supplier_analysis_lookback_days": 180,
|
||||
"supplier_analysis_min_orders": 10,
|
||||
"price_forecast_lookback_days": 180,
|
||||
"price_forecast_horizon_days": 30,
|
||||
"rules_generation_lookback_days": 90,
|
||||
"rules_generation_min_samples": 10,
|
||||
"enable_ml_insights": True,
|
||||
"ml_insights_auto_trigger": False,
|
||||
"ml_confidence_threshold": 0.80
|
||||
}
|
||||
}
|
||||
|
||||
283
services/tenant/app/repositories/event_repository.py
Normal file
283
services/tenant/app/repositories/event_repository.py
Normal file
@@ -0,0 +1,283 @@
|
||||
"""
|
||||
Event Repository
|
||||
Data access layer for events
|
||||
"""
|
||||
|
||||
from typing import List, Optional, Dict, Any
|
||||
from datetime import date, datetime
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, and_, or_, func
|
||||
from uuid import UUID
|
||||
import structlog
|
||||
|
||||
from app.models.events import Event, EventTemplate
|
||||
from shared.database.repository import BaseRepository
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class EventRepository(BaseRepository[Event]):
|
||||
"""Repository for event management"""
|
||||
|
||||
def __init__(self, session: AsyncSession):
|
||||
super().__init__(Event, session)
|
||||
|
||||
async def get_events_by_date_range(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
start_date: date,
|
||||
end_date: date,
|
||||
event_types: List[str] = None,
|
||||
confirmed_only: bool = False
|
||||
) -> List[Event]:
|
||||
"""
|
||||
Get events within a date range.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
start_date: Start date (inclusive)
|
||||
end_date: End date (inclusive)
|
||||
event_types: Optional filter by event types
|
||||
confirmed_only: Only return confirmed events
|
||||
|
||||
Returns:
|
||||
List of Event objects
|
||||
"""
|
||||
try:
|
||||
query = select(Event).where(
|
||||
and_(
|
||||
Event.tenant_id == tenant_id,
|
||||
Event.event_date >= start_date,
|
||||
Event.event_date <= end_date
|
||||
)
|
||||
)
|
||||
|
||||
if event_types:
|
||||
query = query.where(Event.event_type.in_(event_types))
|
||||
|
||||
if confirmed_only:
|
||||
query = query.where(Event.is_confirmed == True)
|
||||
|
||||
query = query.order_by(Event.event_date)
|
||||
|
||||
result = await self.session.execute(query)
|
||||
events = result.scalars().all()
|
||||
|
||||
logger.debug("Retrieved events by date range",
|
||||
tenant_id=str(tenant_id),
|
||||
start_date=start_date.isoformat(),
|
||||
end_date=end_date.isoformat(),
|
||||
count=len(events))
|
||||
|
||||
return list(events)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get events by date range",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e))
|
||||
return []
|
||||
|
||||
async def get_events_for_date(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
event_date: date
|
||||
) -> List[Event]:
|
||||
"""
|
||||
Get all events for a specific date.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
event_date: Date to get events for
|
||||
|
||||
Returns:
|
||||
List of Event objects
|
||||
"""
|
||||
try:
|
||||
query = select(Event).where(
|
||||
and_(
|
||||
Event.tenant_id == tenant_id,
|
||||
Event.event_date == event_date
|
||||
)
|
||||
).order_by(Event.start_time)
|
||||
|
||||
result = await self.session.execute(query)
|
||||
events = result.scalars().all()
|
||||
|
||||
return list(events)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get events for date",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e))
|
||||
return []
|
||||
|
||||
async def get_upcoming_events(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
days_ahead: int = 30,
|
||||
limit: int = 100
|
||||
) -> List[Event]:
|
||||
"""
|
||||
Get upcoming events.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
days_ahead: Number of days to look ahead
|
||||
limit: Maximum number of events to return
|
||||
|
||||
Returns:
|
||||
List of upcoming Event objects
|
||||
"""
|
||||
try:
|
||||
from datetime import date, timedelta
|
||||
|
||||
today = date.today()
|
||||
future_date = today + timedelta(days=days_ahead)
|
||||
|
||||
query = select(Event).where(
|
||||
and_(
|
||||
Event.tenant_id == tenant_id,
|
||||
Event.event_date >= today,
|
||||
Event.event_date <= future_date
|
||||
)
|
||||
).order_by(Event.event_date).limit(limit)
|
||||
|
||||
result = await self.session.execute(query)
|
||||
events = result.scalars().all()
|
||||
|
||||
return list(events)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get upcoming events",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e))
|
||||
return []
|
||||
|
||||
async def create_event(self, event_data: Dict[str, Any]) -> Event:
|
||||
"""Create a new event"""
|
||||
try:
|
||||
event = Event(**event_data)
|
||||
self.session.add(event)
|
||||
await self.session.flush()
|
||||
|
||||
logger.info("Created event",
|
||||
event_id=str(event.id),
|
||||
event_name=event.event_name,
|
||||
event_date=event.event_date.isoformat())
|
||||
|
||||
return event
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to create event", error=str(e))
|
||||
raise
|
||||
|
||||
async def update_event_actual_impact(
|
||||
self,
|
||||
event_id: UUID,
|
||||
actual_impact_multiplier: float,
|
||||
actual_sales_increase_percent: float
|
||||
) -> Optional[Event]:
|
||||
"""
|
||||
Update event with actual impact after it occurs.
|
||||
|
||||
Args:
|
||||
event_id: Event UUID
|
||||
actual_impact_multiplier: Actual demand multiplier observed
|
||||
actual_sales_increase_percent: Actual sales increase percentage
|
||||
|
||||
Returns:
|
||||
Updated Event or None
|
||||
"""
|
||||
try:
|
||||
event = await self.get(event_id)
|
||||
if not event:
|
||||
return None
|
||||
|
||||
event.actual_impact_multiplier = actual_impact_multiplier
|
||||
event.actual_sales_increase_percent = actual_sales_increase_percent
|
||||
|
||||
await self.session.flush()
|
||||
|
||||
logger.info("Updated event actual impact",
|
||||
event_id=str(event_id),
|
||||
actual_multiplier=actual_impact_multiplier)
|
||||
|
||||
return event
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to update event actual impact",
|
||||
event_id=str(event_id),
|
||||
error=str(e))
|
||||
return None
|
||||
|
||||
async def get_events_by_type(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
event_type: str,
|
||||
limit: int = 100
|
||||
) -> List[Event]:
|
||||
"""Get events by type"""
|
||||
try:
|
||||
query = select(Event).where(
|
||||
and_(
|
||||
Event.tenant_id == tenant_id,
|
||||
Event.event_type == event_type
|
||||
)
|
||||
).order_by(Event.event_date.desc()).limit(limit)
|
||||
|
||||
result = await self.session.execute(query)
|
||||
events = result.scalars().all()
|
||||
|
||||
return list(events)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get events by type",
|
||||
tenant_id=str(tenant_id),
|
||||
event_type=event_type,
|
||||
error=str(e))
|
||||
return []
|
||||
|
||||
|
||||
class EventTemplateRepository(BaseRepository[EventTemplate]):
|
||||
"""Repository for event template management"""
|
||||
|
||||
def __init__(self, session: AsyncSession):
|
||||
super().__init__(EventTemplate, session)
|
||||
|
||||
async def get_active_templates(self, tenant_id: UUID) -> List[EventTemplate]:
|
||||
"""Get all active event templates for a tenant"""
|
||||
try:
|
||||
query = select(EventTemplate).where(
|
||||
and_(
|
||||
EventTemplate.tenant_id == tenant_id,
|
||||
EventTemplate.is_active == True
|
||||
)
|
||||
).order_by(EventTemplate.template_name)
|
||||
|
||||
result = await self.session.execute(query)
|
||||
templates = result.scalars().all()
|
||||
|
||||
return list(templates)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get active templates",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e))
|
||||
return []
|
||||
|
||||
async def create_template(self, template_data: Dict[str, Any]) -> EventTemplate:
|
||||
"""Create a new event template"""
|
||||
try:
|
||||
template = EventTemplate(**template_data)
|
||||
self.session.add(template)
|
||||
await self.session.flush()
|
||||
|
||||
logger.info("Created event template",
|
||||
template_id=str(template.id),
|
||||
template_name=template.template_name)
|
||||
|
||||
return template
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to create event template", error=str(e))
|
||||
raise
|
||||
@@ -184,7 +184,7 @@ class SupplierSelectionSettings(BaseModel):
|
||||
|
||||
@validator('price_weight', 'lead_time_weight', 'quality_weight', 'reliability_weight')
|
||||
def validate_weights_sum(cls, v, values):
|
||||
weights = [values.get('price_weight', 0.40), values.get('lead_time_weight', 0.20),
|
||||
weights = [values.get('price_weight', 0.40), values.get('lead_time_weight', 0.20),
|
||||
values.get('quality_weight', 0.20), values.get('reliability_weight', 0.20)]
|
||||
total = sum(weights)
|
||||
if total > 1.0:
|
||||
@@ -192,6 +192,32 @@ class SupplierSelectionSettings(BaseModel):
|
||||
return v
|
||||
|
||||
|
||||
class MLInsightsSettings(BaseModel):
|
||||
"""ML Insights configuration settings"""
|
||||
# Inventory ML (Safety Stock Optimization)
|
||||
inventory_lookback_days: int = Field(90, ge=30, le=365, description="Days of demand history for safety stock analysis")
|
||||
inventory_min_history_days: int = Field(30, ge=7, le=180, description="Minimum days of history required")
|
||||
|
||||
# Production ML (Yield Prediction)
|
||||
production_lookback_days: int = Field(90, ge=30, le=365, description="Days of production history for yield analysis")
|
||||
production_min_history_runs: int = Field(30, ge=10, le=100, description="Minimum production runs required")
|
||||
|
||||
# Procurement ML (Supplier Analysis & Price Forecasting)
|
||||
supplier_analysis_lookback_days: int = Field(180, ge=30, le=730, description="Days of order history for supplier analysis")
|
||||
supplier_analysis_min_orders: int = Field(10, ge=5, le=100, description="Minimum orders required for analysis")
|
||||
price_forecast_lookback_days: int = Field(180, ge=90, le=730, description="Days of price history for forecasting")
|
||||
price_forecast_horizon_days: int = Field(30, ge=7, le=90, description="Days to forecast ahead")
|
||||
|
||||
# Forecasting ML (Dynamic Rules)
|
||||
rules_generation_lookback_days: int = Field(90, ge=30, le=365, description="Days of sales history for rule learning")
|
||||
rules_generation_min_samples: int = Field(10, ge=5, le=100, description="Minimum samples required for rule generation")
|
||||
|
||||
# Global ML Settings
|
||||
enable_ml_insights: bool = Field(True, description="Enable/disable ML insights generation")
|
||||
ml_insights_auto_trigger: bool = Field(False, description="Automatically trigger ML insights in daily workflow")
|
||||
ml_confidence_threshold: float = Field(0.80, ge=0.0, le=1.0, description="Minimum confidence threshold for ML recommendations")
|
||||
|
||||
|
||||
# ================================================================
|
||||
# REQUEST/RESPONSE SCHEMAS
|
||||
# ================================================================
|
||||
@@ -210,6 +236,7 @@ class TenantSettingsResponse(BaseModel):
|
||||
safety_stock_settings: SafetyStockSettings
|
||||
moq_settings: MOQSettings
|
||||
supplier_selection_settings: SupplierSelectionSettings
|
||||
ml_insights_settings: MLInsightsSettings
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
|
||||
@@ -229,6 +256,7 @@ class TenantSettingsUpdate(BaseModel):
|
||||
safety_stock_settings: Optional[SafetyStockSettings] = None
|
||||
moq_settings: Optional[MOQSettings] = None
|
||||
supplier_selection_settings: Optional[SupplierSelectionSettings] = None
|
||||
ml_insights_settings: Optional[MLInsightsSettings] = None
|
||||
|
||||
|
||||
class CategoryUpdateRequest(BaseModel):
|
||||
|
||||
@@ -265,18 +265,34 @@ class EnhancedTenantService:
|
||||
|
||||
async def get_user_tenants(self, owner_id: str) -> List[TenantResponse]:
|
||||
"""Get all tenants owned by a user"""
|
||||
|
||||
|
||||
try:
|
||||
async with self.database_manager.get_session() as db_session:
|
||||
await self._init_repositories(db_session)
|
||||
tenants = await self.tenant_repo.get_tenants_by_owner(owner_id)
|
||||
return [TenantResponse.from_orm(tenant) for tenant in tenants]
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting user tenants",
|
||||
owner_id=owner_id,
|
||||
error=str(e))
|
||||
return []
|
||||
|
||||
async def get_active_tenants(self, skip: int = 0, limit: int = 100) -> List[TenantResponse]:
|
||||
"""Get all active tenants"""
|
||||
|
||||
try:
|
||||
async with self.database_manager.get_session() as db_session:
|
||||
await self._init_repositories(db_session)
|
||||
tenants = await self.tenant_repo.get_active_tenants(skip=skip, limit=limit)
|
||||
return [TenantResponse.from_orm(tenant) for tenant in tenants]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting active tenants",
|
||||
skip=skip,
|
||||
limit=limit,
|
||||
error=str(e))
|
||||
return []
|
||||
|
||||
async def search_tenants(
|
||||
self,
|
||||
|
||||
295
services/tenant/migrations/versions/001_initial_schema.py
Normal file
295
services/tenant/migrations/versions/001_initial_schema.py
Normal file
@@ -0,0 +1,295 @@
|
||||
"""Comprehensive initial schema with all tenant service tables and columns
|
||||
|
||||
Revision ID: initial_schema_comprehensive
|
||||
Revises:
|
||||
Create Date: 2025-11-05 13:30:00.000000+00:00
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
from sqlalchemy.dialects.postgresql import UUID
|
||||
import uuid
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '001_initial_schema'
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Create audit_logs table
|
||||
op.create_table('audit_logs',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('user_id', sa.UUID(), nullable=False),
|
||||
sa.Column('action', sa.String(length=100), nullable=False),
|
||||
sa.Column('resource_type', sa.String(length=100), nullable=False),
|
||||
sa.Column('resource_id', sa.String(length=255), nullable=True),
|
||||
sa.Column('severity', sa.String(length=20), nullable=False),
|
||||
sa.Column('service_name', sa.String(length=100), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('changes', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('audit_metadata', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('ip_address', sa.String(length=45), nullable=True),
|
||||
sa.Column('user_agent', sa.Text(), nullable=True),
|
||||
sa.Column('endpoint', sa.String(length=255), nullable=True),
|
||||
sa.Column('method', sa.String(length=10), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('idx_audit_resource_type_action', 'audit_logs', ['resource_type', 'action'], unique=False)
|
||||
op.create_index('idx_audit_service_created', 'audit_logs', ['service_name', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_severity_created', 'audit_logs', ['severity', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_tenant_created', 'audit_logs', ['tenant_id', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_user_created', 'audit_logs', ['user_id', 'created_at'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_action'), 'audit_logs', ['action'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_created_at'), 'audit_logs', ['created_at'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_resource_id'), 'audit_logs', ['resource_id'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_resource_type'), 'audit_logs', ['resource_type'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_service_name'), 'audit_logs', ['service_name'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_severity'), 'audit_logs', ['severity'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_tenant_id'), 'audit_logs', ['tenant_id'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_user_id'), 'audit_logs', ['user_id'], unique=False)
|
||||
|
||||
# Create tenants table
|
||||
op.create_table('tenants',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('name', sa.String(length=200), nullable=False),
|
||||
sa.Column('subdomain', sa.String(length=100), nullable=True),
|
||||
sa.Column('business_type', sa.String(length=100), nullable=True),
|
||||
sa.Column('business_model', sa.String(length=100), nullable=True),
|
||||
sa.Column('address', sa.Text(), nullable=False),
|
||||
sa.Column('city', sa.String(length=100), nullable=True),
|
||||
sa.Column('postal_code', sa.String(length=10), nullable=False),
|
||||
sa.Column('latitude', sa.Float(), nullable=True),
|
||||
sa.Column('longitude', sa.Float(), nullable=True),
|
||||
sa.Column('timezone', sa.String(length=50), nullable=False),
|
||||
sa.Column('phone', sa.String(length=20), nullable=True),
|
||||
sa.Column('email', sa.String(length=255), nullable=True),
|
||||
sa.Column('is_active', sa.Boolean(), nullable=True),
|
||||
sa.Column('is_demo', sa.Boolean(), nullable=True),
|
||||
sa.Column('is_demo_template', sa.Boolean(), nullable=True),
|
||||
sa.Column('base_demo_tenant_id', sa.UUID(), nullable=True),
|
||||
sa.Column('demo_session_id', sa.String(length=100), nullable=True),
|
||||
sa.Column('demo_expires_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('ml_model_trained', sa.Boolean(), nullable=True),
|
||||
sa.Column('last_training_date', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('metadata_', sa.JSON(), nullable=True),
|
||||
sa.Column('owner_id', sa.UUID(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False, default=sa.text('CURRENT_TIMESTAMP'), onupdate=sa.text('CURRENT_TIMESTAMP')),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('subdomain')
|
||||
)
|
||||
op.create_index(op.f('ix_tenants_base_demo_tenant_id'), 'tenants', ['base_demo_tenant_id'], unique=False)
|
||||
op.create_index(op.f('ix_tenants_demo_session_id'), 'tenants', ['demo_session_id'], unique=False)
|
||||
op.create_index(op.f('ix_tenants_is_demo'), 'tenants', ['is_demo'], unique=False)
|
||||
op.create_index(op.f('ix_tenants_is_demo_template'), 'tenants', ['is_demo_template'], unique=False)
|
||||
op.create_index(op.f('ix_tenants_owner_id'), 'tenants', ['owner_id'], unique=False)
|
||||
|
||||
# Create tenant_members table
|
||||
op.create_table('tenant_members',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('user_id', sa.UUID(), nullable=False),
|
||||
sa.Column('role', sa.String(length=50), nullable=True),
|
||||
sa.Column('permissions', sa.Text(), nullable=True),
|
||||
sa.Column('is_active', sa.Boolean(), nullable=True),
|
||||
sa.Column('invited_by', sa.UUID(), nullable=True),
|
||||
sa.Column('invited_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('joined_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False, default=sa.text('CURRENT_TIMESTAMP')),
|
||||
sa.ForeignKeyConstraint(['tenant_id'], ['tenants.id'], ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_tenant_members_user_id'), 'tenant_members', ['user_id'], unique=False)
|
||||
|
||||
# Create tenant_settings table with current model structure
|
||||
op.create_table('tenant_settings',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('procurement_settings', postgresql.JSON(astext_type=sa.Text()), nullable=False),
|
||||
sa.Column('inventory_settings', postgresql.JSON(astext_type=sa.Text()), nullable=False),
|
||||
sa.Column('production_settings', postgresql.JSON(astext_type=sa.Text()), nullable=False),
|
||||
sa.Column('supplier_settings', postgresql.JSON(astext_type=sa.Text()), nullable=False),
|
||||
sa.Column('pos_settings', postgresql.JSON(astext_type=sa.Text()), nullable=False),
|
||||
sa.Column('order_settings', postgresql.JSON(astext_type=sa.Text()), nullable=False),
|
||||
sa.Column('replenishment_settings', postgresql.JSON(astext_type=sa.Text()), nullable=False),
|
||||
sa.Column('safety_stock_settings', postgresql.JSON(astext_type=sa.Text()), nullable=False),
|
||||
sa.Column('moq_settings', postgresql.JSON(astext_type=sa.Text()), nullable=False),
|
||||
sa.Column('supplier_selection_settings', postgresql.JSON(astext_type=sa.Text()), nullable=False),
|
||||
sa.Column('ml_insights_settings', postgresql.JSON(astext_type=sa.Text()), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False, default=sa.text('CURRENT_TIMESTAMP')),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False, default=sa.text('CURRENT_TIMESTAMP'), onupdate=sa.text('CURRENT_TIMESTAMP')),
|
||||
sa.ForeignKeyConstraint(['tenant_id'], ['tenants.id'], ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('tenant_id')
|
||||
)
|
||||
|
||||
# Create subscriptions table with all current columns
|
||||
op.create_table('subscriptions',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('plan', sa.String(length=50), nullable=True),
|
||||
sa.Column('status', sa.String(length=50), nullable=True),
|
||||
sa.Column('monthly_price', sa.Float(), nullable=True),
|
||||
sa.Column('billing_cycle', sa.String(length=20), nullable=True),
|
||||
sa.Column('next_billing_date', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('trial_ends_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('cancelled_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('cancellation_effective_date', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('stripe_subscription_id', sa.String(255), nullable=True),
|
||||
sa.Column('stripe_customer_id', sa.String(255), nullable=True),
|
||||
sa.Column('max_users', sa.Integer(), nullable=True),
|
||||
sa.Column('max_locations', sa.Integer(), nullable=True),
|
||||
sa.Column('max_products', sa.Integer(), nullable=True),
|
||||
sa.Column('features', sa.JSON(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False, default=sa.text('CURRENT_TIMESTAMP')),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False, default=sa.text('CURRENT_TIMESTAMP'), onupdate=sa.text('CURRENT_TIMESTAMP')),
|
||||
sa.ForeignKeyConstraint(['tenant_id'], ['tenants.id'], ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
|
||||
# Create coupons table with current model structure
|
||||
op.create_table('coupons',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('code', sa.String(length=50), nullable=False),
|
||||
sa.Column('discount_type', sa.String(length=20), nullable=False),
|
||||
sa.Column('discount_value', sa.Integer(), nullable=False),
|
||||
sa.Column('max_redemptions', sa.Integer(), nullable=True),
|
||||
sa.Column('current_redemptions', sa.Integer(), nullable=False, default=0),
|
||||
sa.Column('valid_from', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('valid_until', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('active', sa.Boolean(), nullable=False, default=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False, default=sa.text('CURRENT_TIMESTAMP')),
|
||||
sa.Column('extra_data', sa.JSON(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['tenant_id'], ['tenants.id'], ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('code') # In the model, it's unique=True on the code column, so per tenant
|
||||
)
|
||||
op.create_index('idx_coupon_code_active', 'coupons', ['code', 'active'], unique=False)
|
||||
op.create_index('idx_coupon_valid_dates', 'coupons', ['valid_from', 'valid_until'], unique=False)
|
||||
|
||||
# Create coupon_redemptions table with current model structure
|
||||
op.create_table('coupon_redemptions',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.String(length=255), nullable=False),
|
||||
sa.Column('coupon_code', sa.String(length=50), nullable=False),
|
||||
sa.Column('redeemed_at', sa.DateTime(timezone=True), nullable=False, default=sa.text('CURRENT_TIMESTAMP')),
|
||||
sa.Column('discount_applied', sa.JSON(), nullable=False),
|
||||
sa.Column('extra_data', sa.JSON(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['coupon_code'], ['coupons.code'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('idx_redemption_tenant', 'coupon_redemptions', ['tenant_id'], unique=False)
|
||||
op.create_index('idx_redemption_coupon', 'coupon_redemptions', ['coupon_code'], unique=False)
|
||||
op.create_index('idx_redemption_tenant_coupon', 'coupon_redemptions', ['tenant_id', 'coupon_code'], unique=False)
|
||||
|
||||
# Create events table with current model structure
|
||||
op.create_table(
|
||||
'events',
|
||||
sa.Column('id', UUID(as_uuid=True), primary_key=True, default=uuid.uuid4),
|
||||
sa.Column('tenant_id', UUID(as_uuid=True), nullable=False, index=True),
|
||||
sa.Column('event_name', sa.String(500), nullable=False),
|
||||
sa.Column('event_type', sa.String(100), nullable=False, index=True),
|
||||
sa.Column('description', sa.Text, nullable=True),
|
||||
sa.Column('event_date', sa.Date, nullable=False, index=True),
|
||||
sa.Column('start_time', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('end_time', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('is_all_day', sa.Boolean, default=True),
|
||||
sa.Column('expected_impact', sa.String(50), nullable=True),
|
||||
sa.Column('impact_multiplier', sa.Float, nullable=True),
|
||||
sa.Column('affected_product_categories', sa.String(500), nullable=True),
|
||||
sa.Column('location', sa.String(500), nullable=True),
|
||||
sa.Column('is_local', sa.Boolean, default=True),
|
||||
sa.Column('is_confirmed', sa.Boolean, default=False),
|
||||
sa.Column('is_recurring', sa.Boolean, default=False),
|
||||
sa.Column('recurrence_pattern', sa.String(200), nullable=True),
|
||||
sa.Column('actual_impact_multiplier', sa.Float, nullable=True),
|
||||
sa.Column('actual_sales_increase_percent', sa.Float, nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False, default=sa.text('CURRENT_TIMESTAMP')),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False, default=sa.text('CURRENT_TIMESTAMP'), onupdate=sa.text('CURRENT_TIMESTAMP')),
|
||||
sa.Column('created_by', sa.String(255), nullable=True),
|
||||
sa.Column('notes', sa.Text, nullable=True),
|
||||
)
|
||||
|
||||
# Create event_templates table with current model structure
|
||||
op.create_table(
|
||||
'event_templates',
|
||||
sa.Column('id', UUID(as_uuid=True), primary_key=True, default=uuid.uuid4),
|
||||
sa.Column('tenant_id', UUID(as_uuid=True), nullable=False, index=True),
|
||||
sa.Column('template_name', sa.String(500), nullable=False),
|
||||
sa.Column('event_type', sa.String(100), nullable=False),
|
||||
sa.Column('description', sa.Text, nullable=True),
|
||||
sa.Column('default_impact', sa.String(50), nullable=True),
|
||||
sa.Column('default_impact_multiplier', sa.Float, nullable=True),
|
||||
sa.Column('default_affected_categories', sa.String(500), nullable=True),
|
||||
sa.Column('recurrence_pattern', sa.String(200), nullable=False),
|
||||
sa.Column('is_active', sa.Boolean, default=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False, default=sa.text('CURRENT_TIMESTAMP')),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False, default=sa.text('CURRENT_TIMESTAMP'), onupdate=sa.text('CURRENT_TIMESTAMP')),
|
||||
)
|
||||
|
||||
# Create indexes for better query performance on events
|
||||
op.create_index('ix_events_tenant_date', 'events', ['tenant_id', 'event_date'])
|
||||
op.create_index('ix_events_type_date', 'events', ['event_type', 'event_date'])
|
||||
op.create_index('ix_event_templates_tenant_active', 'event_templates', ['tenant_id', 'is_active'])
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# Drop indexes for events
|
||||
op.drop_index('ix_event_templates_tenant_active', table_name='event_templates')
|
||||
op.drop_index('ix_events_type_date', table_name='events')
|
||||
op.drop_index('ix_events_tenant_date', table_name='events')
|
||||
|
||||
# Drop event-related tables
|
||||
op.drop_table('event_templates')
|
||||
op.drop_table('events')
|
||||
|
||||
# Drop coupon-related tables
|
||||
op.drop_index('idx_redemption_tenant_coupon', table_name='coupon_redemptions')
|
||||
op.drop_index('idx_redemption_coupon', table_name='coupon_redemptions')
|
||||
op.drop_index('idx_redemption_tenant', table_name='coupon_redemptions')
|
||||
op.drop_table('coupon_redemptions')
|
||||
|
||||
op.drop_index('idx_coupon_valid_dates', table_name='coupons')
|
||||
op.drop_index('idx_coupon_code_active', table_name='coupons')
|
||||
op.drop_table('coupons')
|
||||
|
||||
# Drop subscriptions table
|
||||
op.drop_table('subscriptions')
|
||||
|
||||
# Drop tenant_settings table
|
||||
op.drop_table('tenant_settings')
|
||||
|
||||
# Drop other tables in reverse order
|
||||
op.drop_index(op.f('ix_tenant_members_user_id'), table_name='tenant_members')
|
||||
op.drop_table('tenant_members')
|
||||
|
||||
op.drop_index(op.f('ix_tenants_owner_id'), table_name='tenants')
|
||||
op.drop_index(op.f('ix_tenants_is_demo_template'), table_name='tenants')
|
||||
op.drop_index(op.f('ix_tenants_is_demo'), table_name='tenants')
|
||||
op.drop_index(op.f('ix_tenants_demo_session_id'), table_name='tenants')
|
||||
op.drop_index(op.f('ix_tenants_base_demo_tenant_id'), table_name='tenants')
|
||||
op.drop_table('tenants')
|
||||
|
||||
op.drop_index(op.f('ix_audit_logs_user_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_tenant_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_severity'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_service_name'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_resource_type'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_resource_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_created_at'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_action'), table_name='audit_logs')
|
||||
op.drop_index('idx_audit_user_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_tenant_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_severity_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_service_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_resource_type_action', table_name='audit_logs')
|
||||
op.drop_table('audit_logs')
|
||||
@@ -1,151 +0,0 @@
|
||||
"""initial_schema_20251015_1230
|
||||
|
||||
Revision ID: 4e1ddc13dd0f
|
||||
Revises:
|
||||
Create Date: 2025-10-15 12:30:04.847858+02:00
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '4e1ddc13dd0f'
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('audit_logs',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('user_id', sa.UUID(), nullable=False),
|
||||
sa.Column('action', sa.String(length=100), nullable=False),
|
||||
sa.Column('resource_type', sa.String(length=100), nullable=False),
|
||||
sa.Column('resource_id', sa.String(length=255), nullable=True),
|
||||
sa.Column('severity', sa.String(length=20), nullable=False),
|
||||
sa.Column('service_name', sa.String(length=100), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('changes', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('audit_metadata', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('ip_address', sa.String(length=45), nullable=True),
|
||||
sa.Column('user_agent', sa.Text(), nullable=True),
|
||||
sa.Column('endpoint', sa.String(length=255), nullable=True),
|
||||
sa.Column('method', sa.String(length=10), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('idx_audit_resource_type_action', 'audit_logs', ['resource_type', 'action'], unique=False)
|
||||
op.create_index('idx_audit_service_created', 'audit_logs', ['service_name', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_severity_created', 'audit_logs', ['severity', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_tenant_created', 'audit_logs', ['tenant_id', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_user_created', 'audit_logs', ['user_id', 'created_at'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_action'), 'audit_logs', ['action'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_created_at'), 'audit_logs', ['created_at'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_resource_id'), 'audit_logs', ['resource_id'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_resource_type'), 'audit_logs', ['resource_type'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_service_name'), 'audit_logs', ['service_name'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_severity'), 'audit_logs', ['severity'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_tenant_id'), 'audit_logs', ['tenant_id'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_user_id'), 'audit_logs', ['user_id'], unique=False)
|
||||
op.create_table('tenants',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('name', sa.String(length=200), nullable=False),
|
||||
sa.Column('subdomain', sa.String(length=100), nullable=True),
|
||||
sa.Column('business_type', sa.String(length=100), nullable=True),
|
||||
sa.Column('business_model', sa.String(length=100), nullable=True),
|
||||
sa.Column('address', sa.Text(), nullable=False),
|
||||
sa.Column('city', sa.String(length=100), nullable=True),
|
||||
sa.Column('postal_code', sa.String(length=10), nullable=False),
|
||||
sa.Column('latitude', sa.Float(), nullable=True),
|
||||
sa.Column('longitude', sa.Float(), nullable=True),
|
||||
sa.Column('timezone', sa.String(length=50), nullable=False),
|
||||
sa.Column('phone', sa.String(length=20), nullable=True),
|
||||
sa.Column('email', sa.String(length=255), nullable=True),
|
||||
sa.Column('is_active', sa.Boolean(), nullable=True),
|
||||
sa.Column('subscription_tier', sa.String(length=50), nullable=True),
|
||||
sa.Column('is_demo', sa.Boolean(), nullable=True),
|
||||
sa.Column('is_demo_template', sa.Boolean(), nullable=True),
|
||||
sa.Column('base_demo_tenant_id', sa.UUID(), nullable=True),
|
||||
sa.Column('demo_session_id', sa.String(length=100), nullable=True),
|
||||
sa.Column('demo_expires_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('ml_model_trained', sa.Boolean(), nullable=True),
|
||||
sa.Column('last_training_date', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('metadata_', sa.JSON(), nullable=True),
|
||||
sa.Column('owner_id', sa.UUID(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('subdomain')
|
||||
)
|
||||
op.create_index(op.f('ix_tenants_base_demo_tenant_id'), 'tenants', ['base_demo_tenant_id'], unique=False)
|
||||
op.create_index(op.f('ix_tenants_demo_session_id'), 'tenants', ['demo_session_id'], unique=False)
|
||||
op.create_index(op.f('ix_tenants_is_demo'), 'tenants', ['is_demo'], unique=False)
|
||||
op.create_index(op.f('ix_tenants_is_demo_template'), 'tenants', ['is_demo_template'], unique=False)
|
||||
op.create_index(op.f('ix_tenants_owner_id'), 'tenants', ['owner_id'], unique=False)
|
||||
op.create_table('subscriptions',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('plan', sa.String(length=50), nullable=True),
|
||||
sa.Column('status', sa.String(length=50), nullable=True),
|
||||
sa.Column('monthly_price', sa.Float(), nullable=True),
|
||||
sa.Column('billing_cycle', sa.String(length=20), nullable=True),
|
||||
sa.Column('next_billing_date', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('trial_ends_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('max_users', sa.Integer(), nullable=True),
|
||||
sa.Column('max_locations', sa.Integer(), nullable=True),
|
||||
sa.Column('max_products', sa.Integer(), nullable=True),
|
||||
sa.Column('features', sa.JSON(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.ForeignKeyConstraint(['tenant_id'], ['tenants.id'], ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_table('tenant_members',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('user_id', sa.UUID(), nullable=False),
|
||||
sa.Column('role', sa.String(length=50), nullable=True),
|
||||
sa.Column('permissions', sa.Text(), nullable=True),
|
||||
sa.Column('is_active', sa.Boolean(), nullable=True),
|
||||
sa.Column('invited_by', sa.UUID(), nullable=True),
|
||||
sa.Column('invited_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('joined_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.ForeignKeyConstraint(['tenant_id'], ['tenants.id'], ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_tenant_members_user_id'), 'tenant_members', ['user_id'], unique=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index(op.f('ix_tenant_members_user_id'), table_name='tenant_members')
|
||||
op.drop_table('tenant_members')
|
||||
op.drop_table('subscriptions')
|
||||
op.drop_index(op.f('ix_tenants_owner_id'), table_name='tenants')
|
||||
op.drop_index(op.f('ix_tenants_is_demo_template'), table_name='tenants')
|
||||
op.drop_index(op.f('ix_tenants_is_demo'), table_name='tenants')
|
||||
op.drop_index(op.f('ix_tenants_demo_session_id'), table_name='tenants')
|
||||
op.drop_index(op.f('ix_tenants_base_demo_tenant_id'), table_name='tenants')
|
||||
op.drop_table('tenants')
|
||||
op.drop_index(op.f('ix_audit_logs_user_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_tenant_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_severity'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_service_name'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_resource_type'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_resource_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_created_at'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_action'), table_name='audit_logs')
|
||||
op.drop_index('idx_audit_user_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_tenant_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_severity_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_service_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_resource_type_action', table_name='audit_logs')
|
||||
op.drop_table('audit_logs')
|
||||
# ### end Alembic commands ###
|
||||
@@ -1,32 +0,0 @@
|
||||
"""add_subscription_cancellation_fields
|
||||
|
||||
Revision ID: 20251016_0000
|
||||
Revises: 4e1ddc13dd0f
|
||||
Create Date: 2025-10-16 00:00:00.000000
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '20251016_0000'
|
||||
down_revision = '4e1ddc13dd0f'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Add new columns to subscriptions table
|
||||
op.add_column('subscriptions', sa.Column('cancelled_at', sa.DateTime(timezone=True), nullable=True))
|
||||
op.add_column('subscriptions', sa.Column('cancellation_effective_date', sa.DateTime(timezone=True), nullable=True))
|
||||
op.add_column('subscriptions', sa.Column('stripe_subscription_id', sa.String(length=255), nullable=True))
|
||||
op.add_column('subscriptions', sa.Column('stripe_customer_id', sa.String(length=255), nullable=True))
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# Remove columns
|
||||
op.drop_column('subscriptions', 'stripe_customer_id')
|
||||
op.drop_column('subscriptions', 'stripe_subscription_id')
|
||||
op.drop_column('subscriptions', 'cancellation_effective_date')
|
||||
op.drop_column('subscriptions', 'cancelled_at')
|
||||
@@ -1,69 +0,0 @@
|
||||
"""add_coupon_system
|
||||
|
||||
Revision ID: 20251017_0000
|
||||
Revises: 20251016_0000
|
||||
Create Date: 2025-10-17 00:00:00.000000
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
import uuid
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '20251017_0000'
|
||||
down_revision = '20251016_0000'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Create coupons table
|
||||
op.create_table(
|
||||
'coupons',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), primary_key=True, default=uuid.uuid4),
|
||||
sa.Column('code', sa.String(50), nullable=False, unique=True),
|
||||
sa.Column('discount_type', sa.String(20), nullable=False),
|
||||
sa.Column('discount_value', sa.Integer(), nullable=False),
|
||||
sa.Column('max_redemptions', sa.Integer(), nullable=True),
|
||||
sa.Column('current_redemptions', sa.Integer(), nullable=False, server_default='0'),
|
||||
sa.Column('valid_from', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('valid_until', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('active', sa.Boolean(), nullable=False, server_default='true'),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
|
||||
sa.Column('extra_data', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
)
|
||||
|
||||
# Create indexes for coupons table
|
||||
op.create_index('idx_coupon_code_active', 'coupons', ['code', 'active'])
|
||||
op.create_index('idx_coupon_valid_dates', 'coupons', ['valid_from', 'valid_until'])
|
||||
|
||||
# Create coupon_redemptions table
|
||||
op.create_table(
|
||||
'coupon_redemptions',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), primary_key=True, default=uuid.uuid4),
|
||||
sa.Column('tenant_id', sa.String(255), nullable=False),
|
||||
sa.Column('coupon_code', sa.String(50), nullable=False),
|
||||
sa.Column('redeemed_at', sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
|
||||
sa.Column('discount_applied', postgresql.JSON(astext_type=sa.Text()), nullable=False),
|
||||
sa.Column('extra_data', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.ForeignKeyConstraint(['coupon_code'], ['coupons.code'], name='fk_coupon_redemption_code'),
|
||||
)
|
||||
|
||||
# Create indexes for coupon_redemptions table
|
||||
op.create_index('idx_redemption_tenant', 'coupon_redemptions', ['tenant_id'])
|
||||
op.create_index('idx_redemption_coupon', 'coupon_redemptions', ['coupon_code'])
|
||||
op.create_index('idx_redemption_tenant_coupon', 'coupon_redemptions', ['tenant_id', 'coupon_code'])
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# Drop indexes first
|
||||
op.drop_index('idx_redemption_tenant_coupon', table_name='coupon_redemptions')
|
||||
op.drop_index('idx_redemption_coupon', table_name='coupon_redemptions')
|
||||
op.drop_index('idx_redemption_tenant', table_name='coupon_redemptions')
|
||||
op.drop_index('idx_coupon_valid_dates', table_name='coupons')
|
||||
op.drop_index('idx_coupon_code_active', table_name='coupons')
|
||||
|
||||
# Drop tables
|
||||
op.drop_table('coupon_redemptions')
|
||||
op.drop_table('coupons')
|
||||
@@ -1,155 +0,0 @@
|
||||
"""add tenant_settings
|
||||
|
||||
Revision ID: 20251022_0000
|
||||
Revises: 20251017_0000
|
||||
Create Date: 2025-10-22
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
from uuid import uuid4
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '20251022_0000'
|
||||
down_revision = '20251017_0000'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def get_default_settings():
|
||||
"""Get default settings for all categories"""
|
||||
return {
|
||||
"procurement_settings": {
|
||||
"auto_approve_enabled": True,
|
||||
"auto_approve_threshold_eur": 500.0,
|
||||
"auto_approve_min_supplier_score": 0.80,
|
||||
"require_approval_new_suppliers": True,
|
||||
"require_approval_critical_items": True,
|
||||
"procurement_lead_time_days": 3,
|
||||
"demand_forecast_days": 14,
|
||||
"safety_stock_percentage": 20.0,
|
||||
"po_approval_reminder_hours": 24,
|
||||
"po_critical_escalation_hours": 12
|
||||
},
|
||||
"inventory_settings": {
|
||||
"low_stock_threshold": 10,
|
||||
"reorder_point": 20,
|
||||
"reorder_quantity": 50,
|
||||
"expiring_soon_days": 7,
|
||||
"expiration_warning_days": 3,
|
||||
"quality_score_threshold": 8.0,
|
||||
"temperature_monitoring_enabled": True,
|
||||
"refrigeration_temp_min": 1.0,
|
||||
"refrigeration_temp_max": 4.0,
|
||||
"freezer_temp_min": -20.0,
|
||||
"freezer_temp_max": -15.0,
|
||||
"room_temp_min": 18.0,
|
||||
"room_temp_max": 25.0,
|
||||
"temp_deviation_alert_minutes": 15,
|
||||
"critical_temp_deviation_minutes": 5
|
||||
},
|
||||
"production_settings": {
|
||||
"planning_horizon_days": 7,
|
||||
"minimum_batch_size": 1.0,
|
||||
"maximum_batch_size": 100.0,
|
||||
"production_buffer_percentage": 10.0,
|
||||
"working_hours_per_day": 12,
|
||||
"max_overtime_hours": 4,
|
||||
"capacity_utilization_target": 0.85,
|
||||
"capacity_warning_threshold": 0.95,
|
||||
"quality_check_enabled": True,
|
||||
"minimum_yield_percentage": 85.0,
|
||||
"quality_score_threshold": 8.0,
|
||||
"schedule_optimization_enabled": True,
|
||||
"prep_time_buffer_minutes": 30,
|
||||
"cleanup_time_buffer_minutes": 15,
|
||||
"labor_cost_per_hour_eur": 15.0,
|
||||
"overhead_cost_percentage": 20.0
|
||||
},
|
||||
"supplier_settings": {
|
||||
"default_payment_terms_days": 30,
|
||||
"default_delivery_days": 3,
|
||||
"excellent_delivery_rate": 95.0,
|
||||
"good_delivery_rate": 90.0,
|
||||
"excellent_quality_rate": 98.0,
|
||||
"good_quality_rate": 95.0,
|
||||
"critical_delivery_delay_hours": 24,
|
||||
"critical_quality_rejection_rate": 10.0,
|
||||
"high_cost_variance_percentage": 15.0
|
||||
},
|
||||
"pos_settings": {
|
||||
"sync_interval_minutes": 5,
|
||||
"auto_sync_products": True,
|
||||
"auto_sync_transactions": True
|
||||
},
|
||||
"order_settings": {
|
||||
"max_discount_percentage": 50.0,
|
||||
"default_delivery_window_hours": 48,
|
||||
"dynamic_pricing_enabled": False,
|
||||
"discount_enabled": True,
|
||||
"delivery_tracking_enabled": True
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def upgrade():
|
||||
"""Create tenant_settings table and seed existing tenants"""
|
||||
# Create tenant_settings table
|
||||
op.create_table(
|
||||
'tenant_settings',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), primary_key=True, default=uuid4),
|
||||
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('procurement_settings', postgresql.JSON(), nullable=False),
|
||||
sa.Column('inventory_settings', postgresql.JSON(), nullable=False),
|
||||
sa.Column('production_settings', postgresql.JSON(), nullable=False),
|
||||
sa.Column('supplier_settings', postgresql.JSON(), nullable=False),
|
||||
sa.Column('pos_settings', postgresql.JSON(), nullable=False),
|
||||
sa.Column('order_settings', postgresql.JSON(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.ForeignKeyConstraint(['tenant_id'], ['tenants.id'], ondelete='CASCADE'),
|
||||
sa.UniqueConstraint('tenant_id', name='uq_tenant_settings_tenant_id')
|
||||
)
|
||||
|
||||
# Create indexes
|
||||
op.create_index('ix_tenant_settings_tenant_id', 'tenant_settings', ['tenant_id'])
|
||||
|
||||
# Seed existing tenants with default settings
|
||||
connection = op.get_bind()
|
||||
|
||||
# Get all existing tenant IDs
|
||||
result = connection.execute(sa.text("SELECT id FROM tenants"))
|
||||
tenant_ids = [row[0] for row in result]
|
||||
|
||||
# Insert default settings for each existing tenant
|
||||
defaults = get_default_settings()
|
||||
for tenant_id in tenant_ids:
|
||||
connection.execute(
|
||||
sa.text("""
|
||||
INSERT INTO tenant_settings (
|
||||
id, tenant_id, procurement_settings, inventory_settings,
|
||||
production_settings, supplier_settings, pos_settings, order_settings
|
||||
) VALUES (
|
||||
:id, :tenant_id, :procurement_settings::jsonb, :inventory_settings::jsonb,
|
||||
:production_settings::jsonb, :supplier_settings::jsonb,
|
||||
:pos_settings::jsonb, :order_settings::jsonb
|
||||
)
|
||||
"""),
|
||||
{
|
||||
"id": str(uuid4()),
|
||||
"tenant_id": tenant_id,
|
||||
"procurement_settings": str(defaults["procurement_settings"]).replace("'", '"').replace("True", "true").replace("False", "false"),
|
||||
"inventory_settings": str(defaults["inventory_settings"]).replace("'", '"').replace("True", "true").replace("False", "false"),
|
||||
"production_settings": str(defaults["production_settings"]).replace("'", '"').replace("True", "true").replace("False", "false"),
|
||||
"supplier_settings": str(defaults["supplier_settings"]).replace("'", '"').replace("True", "true").replace("False", "false"),
|
||||
"pos_settings": str(defaults["pos_settings"]).replace("'", '"').replace("True", "true").replace("False", "false"),
|
||||
"order_settings": str(defaults["order_settings"]).replace("'", '"').replace("True", "true").replace("False", "false")
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
"""Drop tenant_settings table"""
|
||||
op.drop_index('ix_tenant_settings_tenant_id', table_name='tenant_settings')
|
||||
op.drop_table('tenant_settings')
|
||||
@@ -1,43 +0,0 @@
|
||||
"""add smart procurement settings to tenant settings
|
||||
|
||||
Revision ID: 20251025_procurement
|
||||
Revises: 20251022_0000
|
||||
Create Date: 2025-10-25
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
import json
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '20251025_procurement'
|
||||
down_revision = '20251022_0000'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
"""Add smart procurement flags to existing procurement_settings"""
|
||||
# Use a single SQL statement to update all rows at once
|
||||
# This avoids cursor lock issues and is more efficient
|
||||
# Note: Cast to jsonb for merge operator, then back to json
|
||||
op.execute("""
|
||||
UPDATE tenant_settings
|
||||
SET
|
||||
procurement_settings = (procurement_settings::jsonb ||
|
||||
'{"use_reorder_rules": true, "economic_rounding": true, "respect_storage_limits": true, "use_supplier_minimums": true, "optimize_price_tiers": true}'::jsonb)::json,
|
||||
updated_at = now()
|
||||
""")
|
||||
|
||||
|
||||
def downgrade():
|
||||
"""Remove smart procurement flags from procurement_settings"""
|
||||
# Use a single SQL statement to remove the keys from all rows
|
||||
# Note: Cast to jsonb for operator, then back to json
|
||||
op.execute("""
|
||||
UPDATE tenant_settings
|
||||
SET
|
||||
procurement_settings = (procurement_settings::jsonb - 'use_reorder_rules' - 'economic_rounding' - 'respect_storage_limits' - 'use_supplier_minimums' - 'optimize_price_tiers')::json,
|
||||
updated_at = now()
|
||||
""")
|
||||
@@ -1,43 +0,0 @@
|
||||
"""add supplier approval workflow settings to tenant settings
|
||||
|
||||
Revision ID: 20251025_supplier_approval
|
||||
Revises: 20251025_procurement
|
||||
Create Date: 2025-10-25
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
import json
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '20251025_supplier_approval'
|
||||
down_revision = '20251025_procurement'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
"""Add supplier approval workflow settings to existing supplier_settings"""
|
||||
# Use a single SQL statement to update all rows at once
|
||||
# This avoids cursor lock issues and is more efficient
|
||||
# Note: Cast to jsonb for merge operator, then back to json
|
||||
op.execute("""
|
||||
UPDATE tenant_settings
|
||||
SET
|
||||
supplier_settings = (supplier_settings::jsonb ||
|
||||
'{"require_supplier_approval": true, "auto_approve_for_admin_owner": true, "approval_required_roles": ["member", "viewer"]}'::jsonb)::json,
|
||||
updated_at = now()
|
||||
""")
|
||||
|
||||
|
||||
def downgrade():
|
||||
"""Remove supplier approval workflow settings from supplier_settings"""
|
||||
# Use a single SQL statement to remove the keys from all rows
|
||||
# Note: Cast to jsonb for operator, then back to json
|
||||
op.execute("""
|
||||
UPDATE tenant_settings
|
||||
SET
|
||||
supplier_settings = (supplier_settings::jsonb - 'require_supplier_approval' - 'auto_approve_for_admin_owner' - 'approval_required_roles')::json,
|
||||
updated_at = now()
|
||||
""")
|
||||
@@ -1,103 +0,0 @@
|
||||
"""remove subscription_tier from tenants
|
||||
|
||||
Revision ID: 20251028_remove_sub_tier
|
||||
Revises: 20251025_supplier_approval
|
||||
Create Date: 2025-10-28 12:00:00.000000
|
||||
|
||||
This migration removes the denormalized subscription_tier column from the tenants table.
|
||||
The subscription tier is now sourced exclusively from the subscriptions table (single source of truth).
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '20251028_remove_sub_tier'
|
||||
down_revision = '20251025_supplier_approval'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
"""
|
||||
Remove subscription_tier column from tenants table
|
||||
"""
|
||||
# Pre-flight check: Ensure all tenants have active subscriptions
|
||||
# This is important to avoid breaking the application
|
||||
connection = op.get_bind()
|
||||
|
||||
# Check for tenants without subscriptions
|
||||
result = connection.execute(sa.text("""
|
||||
SELECT COUNT(*) as count
|
||||
FROM tenants t
|
||||
LEFT JOIN subscriptions s ON t.id = s.tenant_id AND s.status = 'active'
|
||||
WHERE s.id IS NULL
|
||||
"""))
|
||||
|
||||
orphaned_count = result.fetchone()[0]
|
||||
|
||||
if orphaned_count > 0:
|
||||
# Create default subscriptions for orphaned tenants
|
||||
connection.execute(sa.text("""
|
||||
INSERT INTO subscriptions (
|
||||
id, tenant_id, plan, status, monthly_price, billing_cycle,
|
||||
max_users, max_locations, max_products, features, created_at, updated_at
|
||||
)
|
||||
SELECT
|
||||
gen_random_uuid(),
|
||||
t.id,
|
||||
'starter',
|
||||
'active',
|
||||
49.0,
|
||||
'monthly',
|
||||
5,
|
||||
1,
|
||||
50,
|
||||
'{"inventory_management": true, "demand_prediction": true}'::jsonb,
|
||||
NOW(),
|
||||
NOW()
|
||||
FROM tenants t
|
||||
LEFT JOIN subscriptions s ON t.id = s.tenant_id AND s.status = 'active'
|
||||
WHERE s.id IS NULL
|
||||
"""))
|
||||
|
||||
print(f"Created default subscriptions for {orphaned_count} tenants without subscriptions")
|
||||
|
||||
# Drop the subscription_tier column
|
||||
op.drop_column('tenants', 'subscription_tier')
|
||||
|
||||
print("Successfully removed subscription_tier column from tenants table")
|
||||
|
||||
|
||||
def downgrade():
|
||||
"""
|
||||
Re-add subscription_tier column and populate from subscriptions table
|
||||
|
||||
Note: This is for rollback purposes only. Going forward, always use subscriptions table.
|
||||
"""
|
||||
# Add the column back
|
||||
op.add_column('tenants',
|
||||
sa.Column('subscription_tier', sa.String(length=50), nullable=True)
|
||||
)
|
||||
|
||||
# Populate from subscriptions table
|
||||
connection = op.get_bind()
|
||||
connection.execute(sa.text("""
|
||||
UPDATE tenants t
|
||||
SET subscription_tier = s.plan
|
||||
FROM subscriptions s
|
||||
WHERE t.id = s.tenant_id
|
||||
AND s.status = 'active'
|
||||
"""))
|
||||
|
||||
# Set default for any tenants without active subscriptions
|
||||
connection.execute(sa.text("""
|
||||
UPDATE tenants
|
||||
SET subscription_tier = 'starter'
|
||||
WHERE subscription_tier IS NULL
|
||||
"""))
|
||||
|
||||
# Make it non-nullable after population
|
||||
op.alter_column('tenants', 'subscription_tier', nullable=False)
|
||||
|
||||
print("Restored subscription_tier column (downgrade)")
|
||||
@@ -1,102 +0,0 @@
|
||||
"""add missing settings columns to tenant settings
|
||||
|
||||
Revision ID: 20251030_add_missing_settings
|
||||
Revises: 20251028_remove_sub_tier
|
||||
Create Date: 2025-10-30
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
from uuid import uuid4
|
||||
import json
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '20251030_add_missing_settings'
|
||||
down_revision = '20251028_remove_sub_tier'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def get_default_settings():
|
||||
"""Get default settings for the new categories"""
|
||||
return {
|
||||
"replenishment_settings": {
|
||||
"projection_horizon_days": 7,
|
||||
"service_level": 0.95,
|
||||
"buffer_days": 1,
|
||||
"enable_auto_replenishment": True,
|
||||
"min_order_quantity": 1.0,
|
||||
"max_order_quantity": 1000.0,
|
||||
"demand_forecast_days": 14
|
||||
},
|
||||
"safety_stock_settings": {
|
||||
"service_level": 0.95,
|
||||
"method": "statistical",
|
||||
"min_safety_stock": 0.0,
|
||||
"max_safety_stock": 100.0,
|
||||
"reorder_point_calculation": "safety_stock_plus_lead_time_demand"
|
||||
},
|
||||
"moq_settings": {
|
||||
"consolidation_window_days": 7,
|
||||
"allow_early_ordering": True,
|
||||
"enable_batch_optimization": True,
|
||||
"min_batch_size": 1.0,
|
||||
"max_batch_size": 1000.0
|
||||
},
|
||||
"supplier_selection_settings": {
|
||||
"price_weight": 0.40,
|
||||
"lead_time_weight": 0.20,
|
||||
"quality_weight": 0.20,
|
||||
"reliability_weight": 0.20,
|
||||
"diversification_threshold": 1000,
|
||||
"max_single_percentage": 0.70,
|
||||
"enable_supplier_score_optimization": True
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def upgrade():
|
||||
"""Add missing settings columns to tenant_settings table"""
|
||||
# Add the missing columns with default values
|
||||
default_settings = get_default_settings()
|
||||
|
||||
# Add replenishment_settings column
|
||||
op.add_column('tenant_settings',
|
||||
sa.Column('replenishment_settings', postgresql.JSON(),
|
||||
nullable=False,
|
||||
server_default=str(default_settings["replenishment_settings"]).replace("'", '"').replace("True", "true").replace("False", "false"))
|
||||
)
|
||||
|
||||
# Add safety_stock_settings column
|
||||
op.add_column('tenant_settings',
|
||||
sa.Column('safety_stock_settings', postgresql.JSON(),
|
||||
nullable=False,
|
||||
server_default=str(default_settings["safety_stock_settings"]).replace("'", '"').replace("True", "true").replace("False", "false"))
|
||||
)
|
||||
|
||||
# Add moq_settings column
|
||||
op.add_column('tenant_settings',
|
||||
sa.Column('moq_settings', postgresql.JSON(),
|
||||
nullable=False,
|
||||
server_default=str(default_settings["moq_settings"]).replace("'", '"').replace("True", "true").replace("False", "false"))
|
||||
)
|
||||
|
||||
# Add supplier_selection_settings column
|
||||
op.add_column('tenant_settings',
|
||||
sa.Column('supplier_selection_settings', postgresql.JSON(),
|
||||
nullable=False,
|
||||
server_default=str(default_settings["supplier_selection_settings"]).replace("'", '"').replace("True", "true").replace("False", "false"))
|
||||
)
|
||||
|
||||
# Update the updated_at timestamp for all existing rows
|
||||
connection = op.get_bind()
|
||||
connection.execute(sa.text("UPDATE tenant_settings SET updated_at = now()"))
|
||||
|
||||
|
||||
def downgrade():
|
||||
"""Remove the added settings columns from tenant_settings table"""
|
||||
op.drop_column('tenant_settings', 'supplier_selection_settings')
|
||||
op.drop_column('tenant_settings', 'moq_settings')
|
||||
op.drop_column('tenant_settings', 'safety_stock_settings')
|
||||
op.drop_column('tenant_settings', 'replenishment_settings')
|
||||
@@ -176,15 +176,25 @@ async def seed_tenants(db: AsyncSession) -> dict:
|
||||
|
||||
# Create demo subscriptions for all tenants (enterprise tier for full demo access)
|
||||
from app.models.tenants import Subscription
|
||||
# 'select' is already imported at the top of the file, so no need to import locally
|
||||
|
||||
for tenant_data in TENANTS_DATA:
|
||||
tenant_id = tenant_data["id"]
|
||||
|
||||
# Check if subscription already exists
|
||||
result = await db.execute(
|
||||
select(Subscription).where(Subscription.tenant_id == tenant_id)
|
||||
)
|
||||
existing_subscription = result.scalars().first()
|
||||
try:
|
||||
result = await db.execute(
|
||||
select(Subscription).where(Subscription.tenant_id == tenant_id)
|
||||
)
|
||||
existing_subscription = result.scalars().first()
|
||||
except Exception as e:
|
||||
# If there's a column error (like missing cancellation_effective_date),
|
||||
# we need to ensure migrations are applied first
|
||||
if "does not exist" in str(e):
|
||||
logger.error("Database schema does not match model. Ensure migrations are applied first.")
|
||||
raise
|
||||
else:
|
||||
raise # Re-raise if it's a different error
|
||||
|
||||
if not existing_subscription:
|
||||
logger.info(
|
||||
|
||||
Reference in New Issue
Block a user