Add subcription feature 9

This commit is contained in:
Urtzi Alfaro
2026-01-16 20:25:45 +01:00
parent fa7b62bd6c
commit 3a7d57ef90
19 changed files with 1833 additions and 985 deletions

View File

@@ -1,78 +0,0 @@
"""add_gdpr_consent_tables
Revision ID: 510cf1184e0b
Revises: 13327ad46a4d
Create Date: 2025-10-15 21:55:40.584671+02:00
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision: str = '510cf1184e0b'
down_revision: Union[str, None] = '13327ad46a4d'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('user_consents',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('user_id', sa.UUID(), nullable=False),
sa.Column('terms_accepted', sa.Boolean(), nullable=False),
sa.Column('privacy_accepted', sa.Boolean(), nullable=False),
sa.Column('marketing_consent', sa.Boolean(), nullable=False),
sa.Column('analytics_consent', sa.Boolean(), nullable=False),
sa.Column('consent_version', sa.String(length=20), nullable=False),
sa.Column('consent_method', sa.String(length=50), nullable=False),
sa.Column('ip_address', sa.String(length=45), nullable=True),
sa.Column('user_agent', sa.Text(), nullable=True),
sa.Column('terms_text_hash', sa.String(length=64), nullable=True),
sa.Column('privacy_text_hash', sa.String(length=64), nullable=True),
sa.Column('consented_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('withdrawn_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('extra_data', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_user_consent_consented_at', 'user_consents', ['consented_at'], unique=False)
op.create_index('idx_user_consent_user_id', 'user_consents', ['user_id'], unique=False)
op.create_index(op.f('ix_user_consents_user_id'), 'user_consents', ['user_id'], unique=False)
op.create_table('consent_history',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('user_id', sa.UUID(), nullable=False),
sa.Column('consent_id', sa.UUID(), nullable=True),
sa.Column('action', sa.String(length=50), nullable=False),
sa.Column('consent_snapshot', postgresql.JSON(astext_type=sa.Text()), nullable=False),
sa.Column('ip_address', sa.String(length=45), nullable=True),
sa.Column('user_agent', sa.Text(), nullable=True),
sa.Column('consent_method', sa.String(length=50), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
sa.ForeignKeyConstraint(['consent_id'], ['user_consents.id'], ondelete='SET NULL'),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_consent_history_action', 'consent_history', ['action'], unique=False)
op.create_index('idx_consent_history_created_at', 'consent_history', ['created_at'], unique=False)
op.create_index('idx_consent_history_user_id', 'consent_history', ['user_id'], unique=False)
op.create_index(op.f('ix_consent_history_created_at'), 'consent_history', ['created_at'], unique=False)
op.create_index(op.f('ix_consent_history_user_id'), 'consent_history', ['user_id'], unique=False)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_consent_history_user_id'), table_name='consent_history')
op.drop_index(op.f('ix_consent_history_created_at'), table_name='consent_history')
op.drop_index('idx_consent_history_user_id', table_name='consent_history')
op.drop_index('idx_consent_history_created_at', table_name='consent_history')
op.drop_index('idx_consent_history_action', table_name='consent_history')
op.drop_table('consent_history')
op.drop_index(op.f('ix_user_consents_user_id'), table_name='user_consents')
op.drop_index('idx_user_consent_user_id', table_name='user_consents')
op.drop_index('idx_user_consent_consented_at', table_name='user_consents')
op.drop_table('user_consents')
# ### end Alembic commands ###

View File

@@ -1,41 +0,0 @@
"""add_payment_columns_to_users
Revision ID: 20260113_add_payment_columns
Revises: 510cf1184e0b
Create Date: 2026-01-13 13:30:00.000000+00:00
Add payment_customer_id and default_payment_method_id columns to users table
to support payment integration.
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '20260113_add_payment_columns'
down_revision: Union[str, None] = '510cf1184e0b'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# Add payment_customer_id column
op.add_column('users',
sa.Column('payment_customer_id', sa.String(length=255), nullable=True))
# Add default_payment_method_id column
op.add_column('users',
sa.Column('default_payment_method_id', sa.String(length=255), nullable=True))
# Create index for payment_customer_id
op.create_index(op.f('ix_users_payment_customer_id'), 'users', ['payment_customer_id'], unique=False)
def downgrade() -> None:
# Drop index first
op.drop_index(op.f('ix_users_payment_customer_id'), table_name='users')
# Drop columns
op.drop_column('users', 'default_payment_method_id')
op.drop_column('users', 'payment_customer_id')

View File

@@ -1,50 +0,0 @@
"""Add password reset token table
Revision ID: 20260116_add_password_reset_token_table
Revises: 20260113_add_payment_columns_to_users.py
Create Date: 2026-01-16 10:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers
revision = '20260116_add_password_reset_token_table'
down_revision = '20260113_add_payment_columns_to_users.py'
branch_labels = None
depends_on = None
def upgrade() -> None:
# Create password_reset_tokens table
op.create_table(
'password_reset_tokens',
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('user_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('token', sa.String(length=255), nullable=False),
sa.Column('expires_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('is_used', sa.Boolean(), nullable=False, default=False),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False,
server_default=sa.text("timezone('utc', CURRENT_TIMESTAMP)")),
sa.Column('used_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('token'),
)
# Create indexes
op.create_index('ix_password_reset_tokens_user_id', 'password_reset_tokens', ['user_id'])
op.create_index('ix_password_reset_tokens_token', 'password_reset_tokens', ['token'])
op.create_index('ix_password_reset_tokens_expires_at', 'password_reset_tokens', ['expires_at'])
op.create_index('ix_password_reset_tokens_is_used', 'password_reset_tokens', ['is_used'])
def downgrade() -> None:
# Drop indexes
op.drop_index('ix_password_reset_tokens_is_used', table_name='password_reset_tokens')
op.drop_index('ix_password_reset_tokens_expires_at', table_name='password_reset_tokens')
op.drop_index('ix_password_reset_tokens_token', table_name='password_reset_tokens')
op.drop_index('ix_password_reset_tokens_user_id', table_name='password_reset_tokens')
# Drop table
op.drop_table('password_reset_tokens')

View File

@@ -1,8 +1,15 @@
"""initial_schema_20251015_1229
"""Unified initial schema for auth service
Revision ID: 13327ad46a4d
This migration combines all previous migrations into a single initial schema:
- Initial tables (users, refresh_tokens, login_attempts, audit_logs, onboarding)
- GDPR consent tables (user_consents, consent_history)
- Payment columns added to users table
- Password reset tokens table
- Tenant ID made nullable in audit logs
Revision ID: initial_unified
Revises:
Create Date: 2025-10-15 12:29:13.886996+02:00
Create Date: 2026-01-16 14:00:00.000000
"""
from typing import Sequence, Union
@@ -12,17 +19,163 @@ import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision: str = '13327ad46a4d'
revision: str = 'initial_unified'
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
# Create all tables in the correct order (respecting foreign key dependencies)
# Base tables without dependencies
op.create_table('users',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('email', sa.String(length=255), nullable=False),
sa.Column('hashed_password', sa.String(length=255), nullable=False),
sa.Column('full_name', sa.String(length=255), nullable=False),
sa.Column('is_active', sa.Boolean(), nullable=True),
sa.Column('is_verified', sa.Boolean(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('last_login', sa.DateTime(timezone=True), nullable=True),
sa.Column('phone', sa.String(length=20), nullable=True),
sa.Column('language', sa.String(length=10), nullable=True),
sa.Column('timezone', sa.String(length=50), nullable=True),
sa.Column('role', sa.String(length=20), nullable=False),
# Payment-related columns
sa.Column('payment_customer_id', sa.String(length=255), nullable=True),
sa.Column('default_payment_method_id', sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_users_email'), 'users', ['email'], unique=True)
op.create_index(op.f('ix_users_payment_customer_id'), 'users', ['payment_customer_id'], unique=False)
op.create_table('login_attempts',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('email', sa.String(length=255), nullable=False),
sa.Column('ip_address', sa.String(length=45), nullable=False),
sa.Column('user_agent', sa.Text(), nullable=True),
sa.Column('success', sa.Boolean(), nullable=True),
sa.Column('failure_reason', sa.String(length=255), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_login_attempts_email'), 'login_attempts', ['email'], unique=False)
# Tables that reference users
op.create_table('refresh_tokens',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('user_id', sa.UUID(), nullable=False),
sa.Column('token', sa.Text(), nullable=False),
sa.Column('token_hash', sa.String(length=255), nullable=True),
sa.Column('expires_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('is_revoked', sa.Boolean(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('revoked_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('token_hash')
)
op.create_index('ix_refresh_tokens_expires_at', 'refresh_tokens', ['expires_at'], unique=False)
op.create_index('ix_refresh_tokens_token_hash', 'refresh_tokens', ['token_hash'], unique=False)
op.create_index(op.f('ix_refresh_tokens_user_id'), 'refresh_tokens', ['user_id'], unique=False)
op.create_index('ix_refresh_tokens_user_id_active', 'refresh_tokens', ['user_id', 'is_revoked'], unique=False)
op.create_table('user_onboarding_progress',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('user_id', sa.UUID(), nullable=False),
sa.Column('step_name', sa.String(length=50), nullable=False),
sa.Column('completed', sa.Boolean(), nullable=False),
sa.Column('completed_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('step_data', sa.JSON(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('user_id', 'step_name', name='uq_user_step')
)
op.create_index(op.f('ix_user_onboarding_progress_user_id'), 'user_onboarding_progress', ['user_id'], unique=False)
op.create_table('user_onboarding_summary',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('user_id', sa.UUID(), nullable=False),
sa.Column('current_step', sa.String(length=50), nullable=False),
sa.Column('next_step', sa.String(length=50), nullable=True),
sa.Column('completion_percentage', sa.String(length=50), nullable=True),
sa.Column('fully_completed', sa.Boolean(), nullable=True),
sa.Column('steps_completed_count', sa.String(length=50), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('last_activity_at', sa.DateTime(timezone=True), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_user_onboarding_summary_user_id'), 'user_onboarding_summary', ['user_id'], unique=True)
op.create_table('password_reset_tokens',
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('user_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('token', sa.String(length=255), nullable=False),
sa.Column('expires_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('is_used', sa.Boolean(), nullable=False, default=False),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False,
server_default=sa.text("timezone('utc', CURRENT_TIMESTAMP)")),
sa.Column('used_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('token'),
)
op.create_index('ix_password_reset_tokens_user_id', 'password_reset_tokens', ['user_id'])
op.create_index('ix_password_reset_tokens_token', 'password_reset_tokens', ['token'])
op.create_index('ix_password_reset_tokens_expires_at', 'password_reset_tokens', ['expires_at'])
op.create_index('ix_password_reset_tokens_is_used', 'password_reset_tokens', ['is_used'])
# GDPR consent tables
op.create_table('user_consents',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('user_id', sa.UUID(), nullable=False),
sa.Column('terms_accepted', sa.Boolean(), nullable=False),
sa.Column('privacy_accepted', sa.Boolean(), nullable=False),
sa.Column('marketing_consent', sa.Boolean(), nullable=False),
sa.Column('analytics_consent', sa.Boolean(), nullable=False),
sa.Column('consent_version', sa.String(length=20), nullable=False),
sa.Column('consent_method', sa.String(length=50), nullable=False),
sa.Column('ip_address', sa.String(length=45), nullable=True),
sa.Column('user_agent', sa.Text(), nullable=True),
sa.Column('terms_text_hash', sa.String(length=64), nullable=True),
sa.Column('privacy_text_hash', sa.String(length=64), nullable=True),
sa.Column('consented_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('withdrawn_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('extra_data', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_user_consent_consented_at', 'user_consents', ['consented_at'], unique=False)
op.create_index('idx_user_consent_user_id', 'user_consents', ['user_id'], unique=False)
op.create_index(op.f('ix_user_consents_user_id'), 'user_consents', ['user_id'], unique=False)
op.create_table('consent_history',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('user_id', sa.UUID(), nullable=False),
sa.Column('consent_id', sa.UUID(), nullable=True),
sa.Column('action', sa.String(length=50), nullable=False),
sa.Column('consent_snapshot', postgresql.JSON(astext_type=sa.Text()), nullable=False),
sa.Column('ip_address', sa.String(length=45), nullable=True),
sa.Column('user_agent', sa.Text(), nullable=True),
sa.Column('consent_method', sa.String(length=50), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
sa.ForeignKeyConstraint(['consent_id'], ['user_consents.id'], ondelete='SET NULL'),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_consent_history_action', 'consent_history', ['action'], unique=False)
op.create_index('idx_consent_history_created_at', 'consent_history', ['created_at'], unique=False)
op.create_index('idx_consent_history_user_id', 'consent_history', ['user_id'], unique=False)
op.create_index(op.f('ix_consent_history_created_at'), 'consent_history', ['created_at'], unique=False)
op.create_index(op.f('ix_consent_history_user_id'), 'consent_history', ['user_id'], unique=False)
# Audit logs table (with tenant_id nullable as per the last migration)
op.create_table('audit_logs',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=True), # Made nullable per last migration
sa.Column('user_id', sa.UUID(), nullable=False),
sa.Column('action', sa.String(length=100), nullable=False),
sa.Column('resource_type', sa.String(length=100), nullable=False),
@@ -52,97 +205,10 @@ def upgrade() -> None:
op.create_index(op.f('ix_audit_logs_severity'), 'audit_logs', ['severity'], unique=False)
op.create_index(op.f('ix_audit_logs_tenant_id'), 'audit_logs', ['tenant_id'], unique=False)
op.create_index(op.f('ix_audit_logs_user_id'), 'audit_logs', ['user_id'], unique=False)
op.create_table('login_attempts',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('email', sa.String(length=255), nullable=False),
sa.Column('ip_address', sa.String(length=45), nullable=False),
sa.Column('user_agent', sa.Text(), nullable=True),
sa.Column('success', sa.Boolean(), nullable=True),
sa.Column('failure_reason', sa.String(length=255), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_login_attempts_email'), 'login_attempts', ['email'], unique=False)
op.create_table('refresh_tokens',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('user_id', sa.UUID(), nullable=False),
sa.Column('token', sa.Text(), nullable=False),
sa.Column('token_hash', sa.String(length=255), nullable=True),
sa.Column('expires_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('is_revoked', sa.Boolean(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('revoked_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('token_hash')
)
op.create_index('ix_refresh_tokens_expires_at', 'refresh_tokens', ['expires_at'], unique=False)
op.create_index('ix_refresh_tokens_token_hash', 'refresh_tokens', ['token_hash'], unique=False)
op.create_index(op.f('ix_refresh_tokens_user_id'), 'refresh_tokens', ['user_id'], unique=False)
op.create_index('ix_refresh_tokens_user_id_active', 'refresh_tokens', ['user_id', 'is_revoked'], unique=False)
op.create_table('users',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('email', sa.String(length=255), nullable=False),
sa.Column('hashed_password', sa.String(length=255), nullable=False),
sa.Column('full_name', sa.String(length=255), nullable=False),
sa.Column('is_active', sa.Boolean(), nullable=True),
sa.Column('is_verified', sa.Boolean(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('last_login', sa.DateTime(timezone=True), nullable=True),
sa.Column('phone', sa.String(length=20), nullable=True),
sa.Column('language', sa.String(length=10), nullable=True),
sa.Column('timezone', sa.String(length=50), nullable=True),
sa.Column('role', sa.String(length=20), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_users_email'), 'users', ['email'], unique=True)
op.create_table('user_onboarding_progress',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('user_id', sa.UUID(), nullable=False),
sa.Column('step_name', sa.String(length=50), nullable=False),
sa.Column('completed', sa.Boolean(), nullable=False),
sa.Column('completed_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('step_data', sa.JSON(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('user_id', 'step_name', name='uq_user_step')
)
op.create_index(op.f('ix_user_onboarding_progress_user_id'), 'user_onboarding_progress', ['user_id'], unique=False)
op.create_table('user_onboarding_summary',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('user_id', sa.UUID(), nullable=False),
sa.Column('current_step', sa.String(length=50), nullable=False),
sa.Column('next_step', sa.String(length=50), nullable=True),
sa.Column('completion_percentage', sa.String(length=50), nullable=True),
sa.Column('fully_completed', sa.Boolean(), nullable=True),
sa.Column('steps_completed_count', sa.String(length=50), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('last_activity_at', sa.DateTime(timezone=True), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_user_onboarding_summary_user_id'), 'user_onboarding_summary', ['user_id'], unique=True)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_user_onboarding_summary_user_id'), table_name='user_onboarding_summary')
op.drop_table('user_onboarding_summary')
op.drop_index(op.f('ix_user_onboarding_progress_user_id'), table_name='user_onboarding_progress')
op.drop_table('user_onboarding_progress')
op.drop_index(op.f('ix_users_email'), table_name='users')
op.drop_table('users')
op.drop_index('ix_refresh_tokens_user_id_active', table_name='refresh_tokens')
op.drop_index(op.f('ix_refresh_tokens_user_id'), table_name='refresh_tokens')
op.drop_index('ix_refresh_tokens_token_hash', table_name='refresh_tokens')
op.drop_index('ix_refresh_tokens_expires_at', table_name='refresh_tokens')
op.drop_table('refresh_tokens')
op.drop_index(op.f('ix_login_attempts_email'), table_name='login_attempts')
op.drop_table('login_attempts')
# Drop tables in reverse order (respecting foreign key dependencies)
op.drop_index(op.f('ix_audit_logs_user_id'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_tenant_id'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_severity'), table_name='audit_logs')
@@ -157,4 +223,40 @@ def downgrade() -> None:
op.drop_index('idx_audit_service_created', table_name='audit_logs')
op.drop_index('idx_audit_resource_type_action', table_name='audit_logs')
op.drop_table('audit_logs')
# ### end Alembic commands ###
op.drop_index(op.f('ix_consent_history_user_id'), table_name='consent_history')
op.drop_index(op.f('ix_consent_history_created_at'), table_name='consent_history')
op.drop_index('idx_consent_history_user_id', table_name='consent_history')
op.drop_index('idx_consent_history_created_at', table_name='consent_history')
op.drop_index('idx_consent_history_action', table_name='consent_history')
op.drop_table('consent_history')
op.drop_index(op.f('ix_user_consents_user_id'), table_name='user_consents')
op.drop_index('idx_user_consent_user_id', table_name='user_consents')
op.drop_index('idx_user_consent_consented_at', table_name='user_consents')
op.drop_table('user_consents')
op.drop_index('ix_password_reset_tokens_is_used', table_name='password_reset_tokens')
op.drop_index('ix_password_reset_tokens_expires_at', table_name='password_reset_tokens')
op.drop_index('ix_password_reset_tokens_token', table_name='password_reset_tokens')
op.drop_index('ix_password_reset_tokens_user_id', table_name='password_reset_tokens')
op.drop_table('password_reset_tokens')
op.drop_index(op.f('ix_user_onboarding_summary_user_id'), table_name='user_onboarding_summary')
op.drop_table('user_onboarding_summary')
op.drop_index(op.f('ix_user_onboarding_progress_user_id'), table_name='user_onboarding_progress')
op.drop_table('user_onboarding_progress')
op.drop_index('ix_refresh_tokens_user_id_active', table_name='refresh_tokens')
op.drop_index(op.f('ix_refresh_tokens_user_id'), table_name='refresh_tokens')
op.drop_index('ix_refresh_tokens_token_hash', table_name='refresh_tokens')
op.drop_index('ix_refresh_tokens_expires_at', table_name='refresh_tokens')
op.drop_table('refresh_tokens')
op.drop_index(op.f('ix_login_attempts_email'), table_name='login_attempts')
op.drop_table('login_attempts')
op.drop_index(op.f('ix_users_payment_customer_id'), table_name='users')
op.drop_index(op.f('ix_users_email'), table_name='users')
op.drop_table('users')

View File

@@ -737,15 +737,27 @@ async def validate_plan_upgrade(
async def upgrade_subscription_plan(
tenant_id: str = Path(..., description="Tenant ID"),
new_plan: str = Query(..., description="New plan name"),
billing_cycle: str = Query("monthly", description="Billing cycle (monthly/yearly)"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
limit_service: SubscriptionLimitService = Depends(get_subscription_limit_service)
limit_service: SubscriptionLimitService = Depends(get_subscription_limit_service),
orchestration_service: SubscriptionOrchestrationService = Depends(get_subscription_orchestration_service)
) -> Dict[str, Any]:
"""
Upgrade subscription plan for a tenant
Includes validation, cache invalidation, and token refresh.
This endpoint handles:
- Plan upgrade validation
- Stripe subscription update (preserves trial status if in trial)
- Local database update
- Cache invalidation
- Token refresh for immediate UI update
Trial handling:
- If user is in trial, they remain in trial after upgrade
- The upgraded tier price will be charged when trial ends
"""
try:
# Step 1: Validate upgrade eligibility
validation = await limit_service.validate_plan_upgrade(tenant_id, new_plan)
if not validation.get("can_upgrade", False):
raise HTTPException(
@@ -768,22 +780,77 @@ async def upgrade_subscription_plan(
detail="No active subscription found for this tenant"
)
old_plan = active_subscription.plan
is_trialing = active_subscription.status == 'trialing'
trial_ends_at = active_subscription.trial_ends_at
logger.info("Starting subscription upgrade",
extra={
"tenant_id": tenant_id,
"subscription_id": str(active_subscription.id),
"stripe_subscription_id": active_subscription.subscription_id,
"old_plan": old_plan,
"new_plan": new_plan,
"is_trialing": is_trialing,
"trial_ends_at": str(trial_ends_at) if trial_ends_at else None,
"user_id": current_user["user_id"]
})
# Step 2: Update Stripe subscription if Stripe subscription ID exists
stripe_updated = False
if active_subscription.subscription_id:
try:
# Use orchestration service to handle Stripe update with trial preservation
upgrade_result = await orchestration_service.orchestrate_plan_upgrade(
tenant_id=tenant_id,
new_plan=new_plan,
proration_behavior="none" if is_trialing else "create_prorations",
immediate_change=not is_trialing, # Don't change billing anchor if trialing
billing_cycle=billing_cycle
)
stripe_updated = True
logger.info("Stripe subscription updated successfully",
extra={
"tenant_id": tenant_id,
"stripe_subscription_id": active_subscription.subscription_id,
"upgrade_result": upgrade_result
})
except Exception as stripe_error:
logger.error("Failed to update Stripe subscription, falling back to local update only",
extra={"tenant_id": tenant_id, "error": str(stripe_error)})
# Continue with local update even if Stripe fails
# This ensures the user gets access to features immediately
# Step 3: Update local database
updated_subscription = await subscription_repo.update_subscription_plan(
str(active_subscription.id),
new_plan
)
# Preserve trial status if was trialing
if is_trialing and trial_ends_at:
# Ensure trial_ends_at is preserved after plan update
await subscription_repo.update_subscription_status(
str(active_subscription.id),
'trialing',
{'trial_ends_at': trial_ends_at}
)
await session.commit()
logger.info("Subscription plan upgraded successfully",
logger.info("Subscription plan upgraded successfully in database",
extra={
"tenant_id": tenant_id,
"subscription_id": str(active_subscription.id),
"old_plan": active_subscription.plan,
"old_plan": old_plan,
"new_plan": new_plan,
"stripe_updated": stripe_updated,
"preserved_trial": is_trialing,
"user_id": current_user["user_id"]
})
# Step 4: Invalidate subscription cache
redis_client = None
try:
from app.services.subscription_cache import get_subscription_cache_service
@@ -797,14 +864,17 @@ async def upgrade_subscription_plan(
logger.error("Failed to invalidate subscription cache after upgrade",
extra={"tenant_id": tenant_id, "error": str(cache_error)})
# Step 5: Invalidate tokens for immediate UI refresh
try:
await _invalidate_tenant_tokens(tenant_id, redis_client)
logger.info("Invalidated all tokens for tenant after subscription upgrade",
extra={"tenant_id": tenant_id})
if redis_client:
await _invalidate_tenant_tokens(tenant_id, redis_client)
logger.info("Invalidated all tokens for tenant after subscription upgrade",
extra={"tenant_id": tenant_id})
except Exception as token_error:
logger.error("Failed to invalidate tenant tokens after upgrade",
extra={"tenant_id": tenant_id, "error": str(token_error)})
# Step 6: Publish subscription change event for other services
try:
from shared.messaging import UnifiedEventPublisher
event_publisher = UnifiedEventPublisher()
@@ -813,9 +883,12 @@ async def upgrade_subscription_plan(
tenant_id=tenant_id,
data={
"tenant_id": tenant_id,
"old_tier": active_subscription.plan,
"old_tier": old_plan,
"new_tier": new_plan,
"action": "upgrade"
"action": "upgrade",
"is_trialing": is_trialing,
"trial_ends_at": trial_ends_at.isoformat() if trial_ends_at else None,
"stripe_updated": stripe_updated
}
)
logger.info("Published subscription change event",
@@ -826,10 +899,13 @@ async def upgrade_subscription_plan(
return {
"success": True,
"message": f"Plan successfully upgraded to {new_plan}",
"old_plan": active_subscription.plan,
"message": f"Plan successfully upgraded to {new_plan}" + (" (trial preserved)" if is_trialing else ""),
"old_plan": old_plan,
"new_plan": new_plan,
"new_monthly_price": updated_subscription.monthly_price,
"is_trialing": is_trialing,
"trial_ends_at": trial_ends_at.isoformat() if trial_ends_at else None,
"stripe_updated": stripe_updated,
"validation": validation,
"requires_token_refresh": True
}

View File

@@ -114,10 +114,12 @@ async def register_bakery(
error=str(linking_error))
elif bakery_data.coupon_code:
coupon_validation = payment_service.validate_coupon_code(
from app.services.coupon_service import CouponService
coupon_service = CouponService(db)
coupon_validation = await coupon_service.validate_coupon_code(
bakery_data.coupon_code,
tenant_id,
db
tenant_id
)
if not coupon_validation["valid"]:
@@ -131,10 +133,10 @@ async def register_bakery(
detail=coupon_validation["error_message"]
)
success, discount, error = payment_service.redeem_coupon(
success, discount, error = await coupon_service.redeem_coupon(
bakery_data.coupon_code,
tenant_id,
db
base_trial_days=0
)
if success:
@@ -194,13 +196,15 @@ async def register_bakery(
if coupon_validation and coupon_validation["valid"]:
from app.core.config import settings
from app.services.coupon_service import CouponService
database_manager = create_database_manager(settings.DATABASE_URL, "tenant-service")
async with database_manager.get_session() as session:
success, discount, error = payment_service.redeem_coupon(
coupon_service = CouponService(session)
success, discount, error = await coupon_service.redeem_coupon(
bakery_data.coupon_code,
result.id,
session
base_trial_days=0
)
if success:

View File

@@ -247,6 +247,50 @@ class SubscriptionRepository(TenantBaseRepository):
error=str(e))
raise DatabaseError(f"Failed to update plan: {str(e)}")
async def update_subscription_status(
self,
subscription_id: str,
status: str,
additional_data: Dict[str, Any] = None
) -> Optional[Subscription]:
"""Update subscription status with optional additional data"""
try:
# Get subscription to find tenant_id for cache invalidation
subscription = await self.get_by_id(subscription_id)
if not subscription:
raise ValidationError(f"Subscription {subscription_id} not found")
update_data = {
"status": status,
"updated_at": datetime.utcnow()
}
# Merge additional data if provided
if additional_data:
update_data.update(additional_data)
updated_subscription = await self.update(subscription_id, update_data)
# Invalidate cache
if subscription.tenant_id:
await self._invalidate_cache(str(subscription.tenant_id))
logger.info("Subscription status updated",
subscription_id=subscription_id,
new_status=status,
additional_data=additional_data)
return updated_subscription
except ValidationError:
raise
except Exception as e:
logger.error("Failed to update subscription status",
subscription_id=subscription_id,
status=status,
error=str(e))
raise DatabaseError(f"Failed to update subscription status: {str(e)}")
async def cancel_subscription(
self,
subscription_id: str,

View File

@@ -852,7 +852,8 @@ class PaymentService:
proration_behavior: str = "create_prorations",
billing_cycle_anchor: Optional[str] = None,
payment_behavior: str = "error_if_incomplete",
immediate_change: bool = True
immediate_change: bool = True,
preserve_trial: bool = False
) -> Any:
"""
Update subscription price (plan upgrade/downgrade)
@@ -860,24 +861,33 @@ class PaymentService:
Args:
subscription_id: Stripe subscription ID
new_price_id: New Stripe price ID
proration_behavior: How to handle proration
proration_behavior: How to handle proration ('create_prorations', 'none', 'always_invoice')
billing_cycle_anchor: Billing cycle anchor ("now" or "unchanged")
payment_behavior: Payment behavior on update
immediate_change: Whether to apply changes immediately
preserve_trial: If True, preserves the trial period after upgrade
Returns:
Updated subscription object with .id, .status, etc. attributes
"""
try:
result = await retry_with_backoff(
lambda: self.stripe_client.update_subscription(subscription_id, new_price_id),
lambda: self.stripe_client.update_subscription(
subscription_id,
new_price_id,
proration_behavior=proration_behavior,
preserve_trial=preserve_trial
),
max_retries=3,
exceptions=(SubscriptionCreationFailed,)
)
logger.info("Subscription updated successfully",
subscription_id=subscription_id,
new_price_id=new_price_id)
new_price_id=new_price_id,
proration_behavior=proration_behavior,
preserve_trial=preserve_trial,
is_trialing=result.get('is_trialing', False))
# Create wrapper object for compatibility with callers expecting .id, .status etc.
class SubscriptionWrapper:
@@ -887,6 +897,8 @@ class PaymentService:
self.current_period_start = data.get('current_period_start')
self.current_period_end = data.get('current_period_end')
self.customer = data.get('customer_id')
self.trial_end = data.get('trial_end')
self.is_trialing = data.get('is_trialing', False)
return SubscriptionWrapper(result)

View File

@@ -638,17 +638,22 @@ class SubscriptionOrchestrationService:
billing_cycle: str = "monthly"
) -> Dict[str, Any]:
"""
Orchestrate plan upgrade workflow with proration
Orchestrate plan upgrade workflow with proration and trial preservation
Args:
tenant_id: Tenant ID
new_plan: New plan name
proration_behavior: Proration behavior
proration_behavior: Proration behavior ('create_prorations', 'none', 'always_invoice')
immediate_change: Whether to apply changes immediately
billing_cycle: Billing cycle for new plan
Returns:
Dictionary with upgrade results
Trial Handling:
- If subscription is in trial, the trial period is preserved
- No proration charges are created during trial
- After trial ends, the user is charged at the new tier price
"""
try:
logger.info("Starting plan upgrade orchestration",
@@ -665,33 +670,64 @@ class SubscriptionOrchestrationService:
if not subscription.subscription_id:
raise ValidationError(f"Tenant {tenant_id} does not have a Stripe subscription ID")
# Step 1.5: Check if subscription is in trial
is_trialing = subscription.status == 'trialing'
trial_ends_at = subscription.trial_ends_at
logger.info("Subscription trial status",
tenant_id=tenant_id,
is_trialing=is_trialing,
trial_ends_at=str(trial_ends_at) if trial_ends_at else None,
current_plan=subscription.plan)
# For trial subscriptions:
# - No proration charges (proration_behavior='none')
# - Preserve trial period
# - User gets new tier features immediately
# - User is charged new tier price when trial ends
if is_trialing:
proration_behavior = "none"
logger.info("Trial subscription detected, disabling proration",
tenant_id=tenant_id)
# Step 2: Get Stripe price ID for new plan
stripe_price_id = self.payment_service._get_stripe_price_id(new_plan, billing_cycle)
# Step 3: Calculate proration preview
proration_details = await self.payment_service.calculate_payment_proration(
subscription.subscription_id,
stripe_price_id,
proration_behavior
)
# Step 3: Calculate proration preview (only if not trialing)
proration_details = {}
if not is_trialing:
proration_details = await self.payment_service.calculate_payment_proration(
subscription.subscription_id,
stripe_price_id,
proration_behavior
)
logger.info("Proration calculated for plan upgrade",
tenant_id=tenant_id,
proration_amount=proration_details.get("net_amount", 0))
else:
proration_details = {
"subscription_id": subscription.subscription_id,
"new_price_id": stripe_price_id,
"proration_behavior": proration_behavior,
"net_amount": 0,
"trial_preserved": True
}
logger.info("Proration calculated for plan upgrade",
tenant_id=tenant_id,
proration_amount=proration_details.get("net_amount", 0))
# Step 4: Update in payment provider
# Step 4: Update in payment provider with trial preservation
updated_stripe_subscription = await self.payment_service.update_payment_subscription(
subscription.subscription_id,
stripe_price_id,
proration_behavior=proration_behavior,
billing_cycle_anchor="now" if immediate_change else "unchanged",
billing_cycle_anchor="now" if immediate_change and not is_trialing else "unchanged",
payment_behavior="error_if_incomplete",
immediate_change=immediate_change
immediate_change=immediate_change,
preserve_trial=is_trialing # Preserve trial if currently trialing
)
logger.info("Plan updated in payment provider",
subscription_id=updated_stripe_subscription.id,
new_status=updated_stripe_subscription.status)
new_status=updated_stripe_subscription.status,
is_trialing=getattr(updated_stripe_subscription, 'is_trialing', False))
# Step 5: Update local subscription record
update_result = await self.subscription_service.update_subscription_plan_record(
@@ -722,8 +758,12 @@ class SubscriptionOrchestrationService:
logger.info("Tenant plan information updated",
tenant_id=tenant_id)
# Add immediate_change to result
# Add upgrade metadata to result
update_result["immediate_change"] = immediate_change
update_result["is_trialing"] = is_trialing
update_result["trial_preserved"] = is_trialing
if trial_ends_at:
update_result["trial_ends_at"] = trial_ends_at.isoformat()
return update_result