Add new frontend
This commit is contained in:
@@ -1,133 +0,0 @@
|
||||
# ================================================================
|
||||
# TESTING DOCKER COMPOSE FILE
|
||||
# docker-compose.test.yml
|
||||
# ================================================================
|
||||
|
||||
version: '3.8'
|
||||
|
||||
# Testing-specific configuration
|
||||
# Usage: docker-compose -f docker-compose.yml -f docker-compose.test.yml up -d
|
||||
|
||||
services:
|
||||
# Test database services (separate from development/production)
|
||||
test-auth-db:
|
||||
image: postgres:15-alpine
|
||||
environment:
|
||||
- POSTGRES_DB=test_auth_db
|
||||
- POSTGRES_USER=test_user
|
||||
- POSTGRES_PASSWORD=test_pass
|
||||
tmpfs:
|
||||
- /var/lib/postgresql/data # Use tmpfs for faster tests
|
||||
|
||||
test-training-db:
|
||||
image: postgres:15-alpine
|
||||
environment:
|
||||
- POSTGRES_DB=test_training_db
|
||||
- POSTGRES_USER=test_user
|
||||
- POSTGRES_PASSWORD=test_pass
|
||||
tmpfs:
|
||||
- /var/lib/postgresql/data
|
||||
|
||||
test-forecasting-db:
|
||||
image: postgres:15-alpine
|
||||
environment:
|
||||
- POSTGRES_DB=test_forecasting_db
|
||||
- POSTGRES_USER=test_user
|
||||
- POSTGRES_PASSWORD=test_pass
|
||||
tmpfs:
|
||||
- /var/lib/postgresql/data
|
||||
|
||||
test-data-db:
|
||||
image: postgres:15-alpine
|
||||
environment:
|
||||
- POSTGRES_DB=test_data_db
|
||||
- POSTGRES_USER=test_user
|
||||
- POSTGRES_PASSWORD=test_pass
|
||||
tmpfs:
|
||||
- /var/lib/postgresql/data
|
||||
|
||||
test-tenant-db:
|
||||
image: postgres:15-alpine
|
||||
environment:
|
||||
- POSTGRES_DB=test_tenant_db
|
||||
- POSTGRES_USER=test_user
|
||||
- POSTGRES_PASSWORD=test_pass
|
||||
tmpfs:
|
||||
- /var/lib/postgresql/data
|
||||
|
||||
test-notification-db:
|
||||
image: postgres:15-alpine
|
||||
environment:
|
||||
- POSTGRES_DB=test_notification_db
|
||||
- POSTGRES_USER=test_user
|
||||
- POSTGRES_PASSWORD=test_pass
|
||||
tmpfs:
|
||||
- /var/lib/postgresql/data
|
||||
|
||||
# Test Redis
|
||||
test-redis:
|
||||
image: redis:7-alpine
|
||||
command: redis-server --appendonly no --save ""
|
||||
tmpfs:
|
||||
- /data
|
||||
|
||||
# Override services to use test databases
|
||||
auth-service:
|
||||
environment:
|
||||
- TESTING=true
|
||||
- DATABASE_URL=postgresql+asyncpg://test_user:test_pass@test-auth-db:5432/test_auth_db
|
||||
- REDIS_URL=redis://test-redis:6379
|
||||
- MOCK_EXTERNAL_APIS=true
|
||||
depends_on:
|
||||
- test-auth-db
|
||||
- test-redis
|
||||
|
||||
training-service:
|
||||
environment:
|
||||
- TESTING=true
|
||||
- DATABASE_URL=postgresql+asyncpg://test_user:test_pass@test-training-db:5432/test_training_db
|
||||
- REDIS_URL=redis://test-redis:6379
|
||||
- MOCK_EXTERNAL_APIS=true
|
||||
depends_on:
|
||||
- test-training-db
|
||||
- test-redis
|
||||
|
||||
forecasting-service:
|
||||
environment:
|
||||
- TESTING=true
|
||||
- DATABASE_URL=postgresql+asyncpg://test_user:test_pass@test-forecasting-db:5432/test_forecasting_db
|
||||
- REDIS_URL=redis://test-redis:6379
|
||||
- MOCK_EXTERNAL_APIS=true
|
||||
depends_on:
|
||||
- test-forecasting-db
|
||||
- test-redis
|
||||
|
||||
data-service:
|
||||
environment:
|
||||
- TESTING=true
|
||||
- DATABASE_URL=postgresql+asyncpg://test_user:test_pass@test-data-db:5432/test_data_db
|
||||
- REDIS_URL=redis://test-redis:6379
|
||||
- MOCK_EXTERNAL_APIS=true
|
||||
depends_on:
|
||||
- test-data-db
|
||||
- test-redis
|
||||
|
||||
tenant-service:
|
||||
environment:
|
||||
- TESTING=true
|
||||
- DATABASE_URL=postgresql+asyncpg://test_user:test_pass@test-tenant-db:5432/test_tenant_db
|
||||
- REDIS_URL=redis://test-redis:6379
|
||||
- MOCK_EXTERNAL_APIS=true
|
||||
depends_on:
|
||||
- test-tenant-db
|
||||
- test-redis
|
||||
|
||||
notification-service:
|
||||
environment:
|
||||
- TESTING=true
|
||||
- DATABASE_URL=postgresql+asyncpg://test_user:test_pass@test-notification-db:5432/test_notification_db
|
||||
- REDIS_URL=redis://test-redis:6379
|
||||
- MOCK_EXTERNAL_APIS=true
|
||||
depends_on:
|
||||
- test-notification-db
|
||||
- test-redis
|
||||
50
frontend/package-lock.json
generated
50
frontend/package-lock.json
generated
@@ -56,21 +56,21 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@emnapi/core": {
|
||||
"version": "1.4.4",
|
||||
"resolved": "https://registry.npmjs.org/@emnapi/core/-/core-1.4.4.tgz",
|
||||
"integrity": "sha512-A9CnAbC6ARNMKcIcrQwq6HeHCjpcBZ5wSx4U01WXCqEKlrzB9F9315WDNHkrs2xbx7YjjSxbUYxuN6EQzpcY2g==",
|
||||
"version": "1.4.5",
|
||||
"resolved": "https://registry.npmjs.org/@emnapi/core/-/core-1.4.5.tgz",
|
||||
"integrity": "sha512-XsLw1dEOpkSX/WucdqUhPWP7hDxSvZiY+fsUC14h+FtQ2Ifni4znbBt8punRX+Uj2JG/uDb8nEHVKvrVlvdZ5Q==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"dependencies": {
|
||||
"@emnapi/wasi-threads": "1.0.3",
|
||||
"@emnapi/wasi-threads": "1.0.4",
|
||||
"tslib": "^2.4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@emnapi/runtime": {
|
||||
"version": "1.4.4",
|
||||
"resolved": "https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.4.4.tgz",
|
||||
"integrity": "sha512-hHyapA4A3gPaDCNfiqyZUStTMqIkKRshqPIuDOXv1hcBnD4U3l8cP0T1HMCfGRxQ6V64TGCcoswChANyOAwbQg==",
|
||||
"version": "1.4.5",
|
||||
"resolved": "https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.4.5.tgz",
|
||||
"integrity": "sha512-++LApOtY0pEEz1zrd9vy1/zXVaVJJ/EbAF3u0fXIzPJEDtnITsBGbbK0EkM72amhl/R5b+5xx0Y/QhcVOpuulg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
@@ -79,9 +79,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@emnapi/wasi-threads": {
|
||||
"version": "1.0.3",
|
||||
"resolved": "https://registry.npmjs.org/@emnapi/wasi-threads/-/wasi-threads-1.0.3.tgz",
|
||||
"integrity": "sha512-8K5IFFsQqF9wQNJptGbS6FNKgUTsSRYnTqNCG1vPP8jFdjSv18n2mQfJpkt2Oibo9iBEzcDnDxNwKTzC7svlJw==",
|
||||
"version": "1.0.4",
|
||||
"resolved": "https://registry.npmjs.org/@emnapi/wasi-threads/-/wasi-threads-1.0.4.tgz",
|
||||
"integrity": "sha512-PJR+bOmMOPH8AtcTGAyYNiuJ3/Fcoj2XN/gBEWzDIKh254XO+mM9XoXHk5GNEhodxeMznbg7BlRojVbKN+gC6g==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
@@ -780,9 +780,9 @@
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@types/node": {
|
||||
"version": "20.19.8",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.19.8.tgz",
|
||||
"integrity": "sha512-HzbgCY53T6bfu4tT7Aq3TvViJyHjLjPNaAS3HOuMc9pw97KHsUtXNX4L+wu59g1WnjsZSko35MbEqnO58rihhw==",
|
||||
"version": "20.19.9",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.19.9.tgz",
|
||||
"integrity": "sha512-cuVNgarYWZqxRJDQHEB58GEONhOK79QVR/qYx4S7kcUObQvUwvFnYxJuuHUKm2aieN9X3yZB4LZsuYNU1Qphsw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
@@ -2149,9 +2149,9 @@
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/electron-to-chromium": {
|
||||
"version": "1.5.183",
|
||||
"resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.183.tgz",
|
||||
"integrity": "sha512-vCrDBYjQCAEefWGjlK3EpoSKfKbT10pR4XXPdn65q7snuNOZnthoVpBfZPykmDapOKfoD+MMIPG8ZjKyyc9oHA==",
|
||||
"version": "1.5.189",
|
||||
"resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.189.tgz",
|
||||
"integrity": "sha512-y9D1ntS1ruO/pZ/V2FtLE+JXLQe28XoRpZ7QCCo0T8LdQladzdcOVQZH/IWLVJvCw12OGMb6hYOeOAjntCmJRQ==",
|
||||
"dev": true,
|
||||
"license": "ISC"
|
||||
},
|
||||
@@ -2979,9 +2979,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/form-data": {
|
||||
"version": "4.0.3",
|
||||
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.3.tgz",
|
||||
"integrity": "sha512-qsITQPfmvMOSAdeyZ+12I1c+CKSstAFAwu+97zrnWAbIr5u8wfsExUzCesVLC8NgHuRUqNN4Zy6UPWUTRGslcA==",
|
||||
"version": "4.0.4",
|
||||
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.4.tgz",
|
||||
"integrity": "sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"asynckit": "^0.4.0",
|
||||
@@ -4227,9 +4227,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/napi-postinstall": {
|
||||
"version": "0.3.0",
|
||||
"resolved": "https://registry.npmjs.org/napi-postinstall/-/napi-postinstall-0.3.0.tgz",
|
||||
"integrity": "sha512-M7NqKyhODKV1gRLdkwE7pDsZP2/SC2a2vHkOYh9MCpKMbWVfyVfUw5MaH83Fv6XMjxr5jryUp3IDDL9rlxsTeA==",
|
||||
"version": "0.3.2",
|
||||
"resolved": "https://registry.npmjs.org/napi-postinstall/-/napi-postinstall-0.3.2.tgz",
|
||||
"integrity": "sha512-tWVJxJHmBWLy69PvO96TZMZDrzmw5KeiZBz3RHmiM2XZ9grBJ2WgMAFVVg25nqp3ZjTFUs2Ftw1JhscL3Teliw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"bin": {
|
||||
@@ -5834,9 +5834,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/tinyglobby/node_modules/picomatch": {
|
||||
"version": "4.0.2",
|
||||
"resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.2.tgz",
|
||||
"integrity": "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==",
|
||||
"version": "4.0.3",
|
||||
"resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz",
|
||||
"integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
|
||||
96
frontend/src/api/auth/authService.ts
Normal file
96
frontend/src/api/auth/authService.ts
Normal file
@@ -0,0 +1,96 @@
|
||||
// src/api/auth/authService.ts
|
||||
import { tokenManager } from './tokenManager';
|
||||
import { apiClient } from '../base/apiClient';
|
||||
|
||||
export interface LoginCredentials {
|
||||
email: string;
|
||||
password: string;
|
||||
}
|
||||
|
||||
export interface RegisterData {
|
||||
email: string;
|
||||
password: string;
|
||||
full_name: string;
|
||||
tenant_name?: string;
|
||||
}
|
||||
|
||||
export interface UserProfile {
|
||||
id: string;
|
||||
email: string;
|
||||
full_name: string;
|
||||
tenant_id: string;
|
||||
role: string;
|
||||
is_active: boolean;
|
||||
created_at: string;
|
||||
}
|
||||
|
||||
class AuthService {
|
||||
async login(credentials: LoginCredentials): Promise<UserProfile> {
|
||||
// OAuth2 password flow
|
||||
const formData = new URLSearchParams();
|
||||
formData.append('username', credentials.email);
|
||||
formData.append('password', credentials.password);
|
||||
formData.append('grant_type', 'password');
|
||||
|
||||
const response = await fetch('/api/auth/token', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/x-www-form-urlencoded',
|
||||
},
|
||||
body: formData
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.json();
|
||||
throw new Error(error.detail || 'Login failed');
|
||||
}
|
||||
|
||||
const tokenResponse = await response.json();
|
||||
await tokenManager.storeTokens(tokenResponse);
|
||||
|
||||
// Get user profile
|
||||
return this.getCurrentUser();
|
||||
}
|
||||
|
||||
async register(data: RegisterData): Promise<UserProfile> {
|
||||
const response = await apiClient.post('/auth/register', data);
|
||||
|
||||
// Auto-login after registration
|
||||
await this.login({
|
||||
email: data.email,
|
||||
password: data.password
|
||||
});
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
async logout(): Promise<void> {
|
||||
try {
|
||||
await apiClient.post('/auth/logout');
|
||||
} finally {
|
||||
tokenManager.clearTokens();
|
||||
window.location.href = '/login';
|
||||
}
|
||||
}
|
||||
|
||||
async getCurrentUser(): Promise<UserProfile> {
|
||||
return apiClient.get('/auth/me');
|
||||
}
|
||||
|
||||
async updateProfile(updates: Partial<UserProfile>): Promise<UserProfile> {
|
||||
return apiClient.patch('/auth/profile', updates);
|
||||
}
|
||||
|
||||
async changePassword(currentPassword: string, newPassword: string): Promise<void> {
|
||||
await apiClient.post('/auth/change-password', {
|
||||
current_password: currentPassword,
|
||||
new_password: newPassword
|
||||
});
|
||||
}
|
||||
|
||||
isAuthenticated(): boolean {
|
||||
return tokenManager.isAuthenticated();
|
||||
}
|
||||
}
|
||||
|
||||
export const authService = new AuthService();
|
||||
186
frontend/src/api/auth/tokenManager.ts
Normal file
186
frontend/src/api/auth/tokenManager.ts
Normal file
@@ -0,0 +1,186 @@
|
||||
// src/api/auth/tokenManager.ts
|
||||
import { jwtDecode } from 'jwt-decode';
|
||||
|
||||
interface TokenPayload {
|
||||
sub: string;
|
||||
user_id: string;
|
||||
email: string;
|
||||
exp: number;
|
||||
iat: number;
|
||||
}
|
||||
|
||||
interface TokenResponse {
|
||||
access_token: string;
|
||||
refresh_token: string;
|
||||
token_type: string;
|
||||
expires_in: number;
|
||||
}
|
||||
|
||||
class TokenManager {
|
||||
private static instance: TokenManager;
|
||||
private accessToken: string | null = null;
|
||||
private refreshToken: string | null = null;
|
||||
private refreshPromise: Promise<void> | null = null;
|
||||
private tokenExpiry: Date | null = null;
|
||||
|
||||
private constructor() {}
|
||||
|
||||
static getInstance(): TokenManager {
|
||||
if (!TokenManager.instance) {
|
||||
TokenManager.instance = new TokenManager();
|
||||
}
|
||||
return TokenManager.instance;
|
||||
}
|
||||
|
||||
async initialize(): Promise<void> {
|
||||
// Try to restore tokens from secure storage
|
||||
const stored = this.getStoredTokens();
|
||||
if (stored) {
|
||||
this.accessToken = stored.accessToken;
|
||||
this.refreshToken = stored.refreshToken;
|
||||
this.tokenExpiry = new Date(stored.expiry);
|
||||
|
||||
// Check if token needs refresh
|
||||
if (this.isTokenExpired()) {
|
||||
await this.refreshAccessToken();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async storeTokens(response: TokenResponse): Promise<void> {
|
||||
this.accessToken = response.access_token;
|
||||
this.refreshToken = response.refresh_token;
|
||||
|
||||
// Calculate expiry time
|
||||
const expiresIn = response.expires_in || 3600; // Default 1 hour
|
||||
this.tokenExpiry = new Date(Date.now() + expiresIn * 1000);
|
||||
|
||||
// Store securely (not in localStorage for security)
|
||||
this.secureStore({
|
||||
accessToken: this.accessToken,
|
||||
refreshToken: this.refreshToken,
|
||||
expiry: this.tokenExpiry.toISOString()
|
||||
});
|
||||
}
|
||||
|
||||
async getAccessToken(): Promise<string | null> {
|
||||
// Check if token is expired or will expire soon (5 min buffer)
|
||||
if (this.shouldRefreshToken()) {
|
||||
await this.refreshAccessToken();
|
||||
}
|
||||
return this.accessToken;
|
||||
}
|
||||
|
||||
async refreshAccessToken(): Promise<void> {
|
||||
// Prevent multiple simultaneous refresh attempts
|
||||
if (this.refreshPromise) {
|
||||
return this.refreshPromise;
|
||||
}
|
||||
|
||||
this.refreshPromise = this.performTokenRefresh();
|
||||
|
||||
try {
|
||||
await this.refreshPromise;
|
||||
} finally {
|
||||
this.refreshPromise = null;
|
||||
}
|
||||
}
|
||||
|
||||
private async performTokenRefresh(): Promise<void> {
|
||||
if (!this.refreshToken) {
|
||||
throw new Error('No refresh token available');
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await fetch('/api/auth/refresh', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
refresh_token: this.refreshToken
|
||||
})
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error('Token refresh failed');
|
||||
}
|
||||
|
||||
const data: TokenResponse = await response.json();
|
||||
await this.storeTokens(data);
|
||||
} catch (error) {
|
||||
// Clear tokens on refresh failure
|
||||
this.clearTokens();
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
clearTokens(): void {
|
||||
this.accessToken = null;
|
||||
this.refreshToken = null;
|
||||
this.tokenExpiry = null;
|
||||
this.clearSecureStore();
|
||||
}
|
||||
|
||||
isAuthenticated(): boolean {
|
||||
return !!this.accessToken && !this.isTokenExpired();
|
||||
}
|
||||
|
||||
private isTokenExpired(): boolean {
|
||||
if (!this.tokenExpiry) return true;
|
||||
return new Date() >= this.tokenExpiry;
|
||||
}
|
||||
|
||||
private shouldRefreshToken(): boolean {
|
||||
if (!this.tokenExpiry) return true;
|
||||
// Refresh if token expires in less than 5 minutes
|
||||
const bufferTime = 5 * 60 * 1000; // 5 minutes
|
||||
return new Date(Date.now() + bufferTime) >= this.tokenExpiry;
|
||||
}
|
||||
|
||||
// Secure storage implementation
|
||||
private secureStore(data: any): void {
|
||||
// In production, use httpOnly cookies or secure session storage
|
||||
// For now, using sessionStorage with encryption
|
||||
const encrypted = this.encrypt(JSON.stringify(data));
|
||||
sessionStorage.setItem('auth_tokens', encrypted);
|
||||
}
|
||||
|
||||
private getStoredTokens(): any {
|
||||
const stored = sessionStorage.getItem('auth_tokens');
|
||||
if (!stored) return null;
|
||||
|
||||
try {
|
||||
const decrypted = this.decrypt(stored);
|
||||
return JSON.parse(decrypted);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private clearSecureStore(): void {
|
||||
sessionStorage.removeItem('auth_tokens');
|
||||
}
|
||||
|
||||
// Simple encryption for demo (use proper encryption in production)
|
||||
private encrypt(data: string): string {
|
||||
return btoa(data);
|
||||
}
|
||||
|
||||
private decrypt(data: string): string {
|
||||
return atob(data);
|
||||
}
|
||||
|
||||
// Get decoded token payload
|
||||
getTokenPayload(): TokenPayload | null {
|
||||
if (!this.accessToken) return null;
|
||||
|
||||
try {
|
||||
return jwtDecode<TokenPayload>(this.accessToken);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const tokenManager = TokenManager.getInstance();
|
||||
@@ -1,212 +1,449 @@
|
||||
// frontend/dashboard/src/api/base/apiClient.ts
|
||||
/**
|
||||
* Base API client with authentication and error handling
|
||||
*/
|
||||
// src/api/base/apiClient.ts
|
||||
import { tokenManager } from '../auth/tokenManager';
|
||||
|
||||
import axios, { AxiosInstance, AxiosRequestConfig, AxiosResponse } from 'axios';
|
||||
import { ApiError, TokenResponse } from '../../types/api';
|
||||
|
||||
export interface ApiClientConfig {
|
||||
baseURL?: string;
|
||||
export interface ApiConfig {
|
||||
baseURL: string;
|
||||
timeout?: number;
|
||||
enableAuth?: boolean;
|
||||
enableRetry?: boolean;
|
||||
retryAttempts?: number;
|
||||
retryDelay?: number;
|
||||
}
|
||||
|
||||
export class ApiClient {
|
||||
private client: AxiosInstance;
|
||||
private enableAuth: boolean;
|
||||
private refreshPromise: Promise<string> | null = null;
|
||||
|
||||
constructor(config: ApiClientConfig = {}) {
|
||||
const {
|
||||
baseURL = process.env.REACT_APP_API_URL || 'http://localhost:8000',
|
||||
timeout = 10000,
|
||||
enableAuth = true,
|
||||
enableRetry = true,
|
||||
} = config;
|
||||
|
||||
this.enableAuth = enableAuth;
|
||||
|
||||
this.client = axios.create({
|
||||
baseURL: `${baseURL}/api/v1`,
|
||||
timeout,
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
});
|
||||
|
||||
this.setupInterceptors(enableRetry);
|
||||
export interface ApiError {
|
||||
message: string;
|
||||
code?: string;
|
||||
status?: number;
|
||||
details?: any;
|
||||
}
|
||||
|
||||
private setupInterceptors(enableRetry: boolean) {
|
||||
// Request interceptor - add auth token
|
||||
this.client.interceptors.request.use(
|
||||
(config) => {
|
||||
if (this.enableAuth) {
|
||||
const token = this.getStoredToken();
|
||||
export interface RequestConfig extends RequestInit {
|
||||
params?: Record<string, any>;
|
||||
timeout?: number;
|
||||
retry?: boolean;
|
||||
retryAttempts?: number;
|
||||
}
|
||||
|
||||
type Interceptor<T> = (value: T) => T | Promise<T>;
|
||||
|
||||
class ApiClient {
|
||||
private config: ApiConfig;
|
||||
private requestInterceptors: Interceptor<RequestConfig>[] = [];
|
||||
private responseInterceptors: {
|
||||
fulfilled: Interceptor<Response>;
|
||||
rejected: Interceptor<any>;
|
||||
}[] = [];
|
||||
|
||||
constructor(config: ApiConfig) {
|
||||
this.config = {
|
||||
timeout: 30000,
|
||||
retryAttempts: 3,
|
||||
retryDelay: 1000,
|
||||
...config
|
||||
};
|
||||
|
||||
this.setupDefaultInterceptors();
|
||||
}
|
||||
|
||||
private setupDefaultInterceptors(): void {
|
||||
// Request interceptor for authentication
|
||||
this.addRequestInterceptor(async (config) => {
|
||||
const token = await tokenManager.getAccessToken();
|
||||
if (token) {
|
||||
config.headers.Authorization = `Bearer ${token}`;
|
||||
}
|
||||
config.headers = {
|
||||
...config.headers,
|
||||
'Authorization': `Bearer ${token}`
|
||||
};
|
||||
}
|
||||
return config;
|
||||
},
|
||||
(error) => Promise.reject(error)
|
||||
);
|
||||
});
|
||||
|
||||
// Response interceptor - handle auth errors and retries
|
||||
this.client.interceptors.response.use(
|
||||
// Request interceptor for content type
|
||||
this.addRequestInterceptor((config) => {
|
||||
if (config.body && !(config.body instanceof FormData)) {
|
||||
config.headers = {
|
||||
...config.headers,
|
||||
'Content-Type': 'application/json'
|
||||
};
|
||||
}
|
||||
return config;
|
||||
});
|
||||
|
||||
// Response interceptor for error handling
|
||||
this.addResponseInterceptor(
|
||||
(response) => response,
|
||||
async (error) => {
|
||||
const originalRequest = error.config;
|
||||
|
||||
// Handle 401 errors with token refresh
|
||||
if (
|
||||
error.response?.status === 401 &&
|
||||
this.enableAuth &&
|
||||
!originalRequest._retry
|
||||
) {
|
||||
originalRequest._retry = true;
|
||||
|
||||
if (error.response?.status === 401) {
|
||||
// Try to refresh token
|
||||
try {
|
||||
const newToken = await this.refreshToken();
|
||||
originalRequest.headers.Authorization = `Bearer ${newToken}`;
|
||||
return this.client(originalRequest);
|
||||
await tokenManager.refreshAccessToken();
|
||||
// Retry original request
|
||||
return this.request(error.config);
|
||||
} catch (refreshError) {
|
||||
this.handleAuthFailure();
|
||||
return Promise.reject(refreshError);
|
||||
// Redirect to login
|
||||
window.location.href = '/login';
|
||||
throw refreshError;
|
||||
}
|
||||
}
|
||||
|
||||
// Handle other errors
|
||||
return Promise.reject(this.formatError(error));
|
||||
throw this.transformError(error);
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
private async refreshToken(): Promise<string> {
|
||||
// Prevent multiple simultaneous refresh requests
|
||||
if (this.refreshPromise) {
|
||||
return this.refreshPromise;
|
||||
addRequestInterceptor(interceptor: Interceptor<RequestConfig>): void {
|
||||
this.requestInterceptors.push(interceptor);
|
||||
}
|
||||
|
||||
this.refreshPromise = this.performTokenRefresh();
|
||||
addResponseInterceptor(
|
||||
fulfilled: Interceptor<Response>,
|
||||
rejected: Interceptor<any>
|
||||
): void {
|
||||
this.responseInterceptors.push({ fulfilled, rejected });
|
||||
}
|
||||
|
||||
private async applyRequestInterceptors(config: RequestConfig): Promise<RequestConfig> {
|
||||
let processedConfig = config;
|
||||
for (const interceptor of this.requestInterceptors) {
|
||||
processedConfig = await interceptor(processedConfig);
|
||||
}
|
||||
return processedConfig;
|
||||
}
|
||||
|
||||
private async applyResponseInterceptors(
|
||||
response: Response | Promise<Response>
|
||||
): Promise<Response> {
|
||||
let processedResponse = await response;
|
||||
|
||||
for (const { fulfilled, rejected } of this.responseInterceptors) {
|
||||
try {
|
||||
processedResponse = await fulfilled(processedResponse);
|
||||
} catch (error) {
|
||||
processedResponse = await rejected(error);
|
||||
}
|
||||
}
|
||||
|
||||
return processedResponse;
|
||||
}
|
||||
|
||||
private buildURL(endpoint: string, params?: Record<string, any>): string {
|
||||
const url = new URL(endpoint, this.config.baseURL);
|
||||
|
||||
if (params) {
|
||||
Object.entries(params).forEach(([key, value]) => {
|
||||
if (value !== undefined && value !== null) {
|
||||
url.searchParams.append(key, String(value));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return url.toString();
|
||||
}
|
||||
|
||||
private createTimeoutPromise(timeout: number): Promise<never> {
|
||||
return new Promise((_, reject) => {
|
||||
setTimeout(() => {
|
||||
reject(new Error('Request timeout'));
|
||||
}, timeout);
|
||||
});
|
||||
}
|
||||
|
||||
private async executeWithRetry(
|
||||
fn: () => Promise<Response>,
|
||||
attempts: number,
|
||||
delay: number
|
||||
): Promise<Response> {
|
||||
try {
|
||||
return await fn();
|
||||
} catch (error) {
|
||||
if (attempts <= 1) throw error;
|
||||
|
||||
// Check if error is retryable
|
||||
const isRetryable = this.isRetryableError(error);
|
||||
if (!isRetryable) throw error;
|
||||
|
||||
// Wait before retry
|
||||
await new Promise(resolve => setTimeout(resolve, delay));
|
||||
|
||||
// Exponential backoff
|
||||
return this.executeWithRetry(fn, attempts - 1, delay * 2);
|
||||
}
|
||||
}
|
||||
|
||||
private isRetryableError(error: any): boolean {
|
||||
// Network errors or 5xx server errors are retryable
|
||||
if (!error.response) return true;
|
||||
return error.response.status >= 500;
|
||||
}
|
||||
|
||||
private transformError(error: any): ApiError {
|
||||
if (error.response) {
|
||||
// Server responded with error
|
||||
return {
|
||||
message: error.response.data?.detail || error.response.statusText,
|
||||
code: error.response.data?.code,
|
||||
status: error.response.status,
|
||||
details: error.response.data
|
||||
};
|
||||
} else if (error.request) {
|
||||
// Request made but no response
|
||||
return {
|
||||
message: 'Network error - no response from server',
|
||||
code: 'NETWORK_ERROR'
|
||||
};
|
||||
} else {
|
||||
// Something else happened
|
||||
return {
|
||||
message: error.message || 'An unexpected error occurred',
|
||||
code: 'UNKNOWN_ERROR'
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
async request<T = any>(endpoint: string, config: RequestConfig = {}): Promise<T> {
|
||||
const processedConfig = await this.applyRequestInterceptors({
|
||||
...config,
|
||||
headers: {
|
||||
'X-Request-ID': this.generateRequestId(),
|
||||
...config.headers
|
||||
}
|
||||
});
|
||||
|
||||
const url = this.buildURL(endpoint, processedConfig.params);
|
||||
const timeout = processedConfig.timeout || this.config.timeout;
|
||||
const shouldRetry = processedConfig.retry !== false;
|
||||
const retryAttempts = processedConfig.retryAttempts || this.config.retryAttempts;
|
||||
|
||||
const executeRequest = async () => {
|
||||
const fetchPromise = fetch(url, {
|
||||
...processedConfig,
|
||||
signal: processedConfig.signal
|
||||
});
|
||||
|
||||
const timeoutPromise = this.createTimeoutPromise(timeout);
|
||||
|
||||
const response = await Promise.race([fetchPromise, timeoutPromise]);
|
||||
|
||||
if (!response.ok) {
|
||||
throw { response, config: { endpoint, ...processedConfig } };
|
||||
}
|
||||
|
||||
return response;
|
||||
};
|
||||
|
||||
try {
|
||||
const token = await this.refreshPromise;
|
||||
this.refreshPromise = null;
|
||||
return token;
|
||||
const response = shouldRetry
|
||||
? await this.executeWithRetry(
|
||||
executeRequest,
|
||||
retryAttempts,
|
||||
this.config.retryDelay!
|
||||
)
|
||||
: await executeRequest();
|
||||
|
||||
const processedResponse = await this.applyResponseInterceptors(response);
|
||||
|
||||
// Parse response
|
||||
const contentType = processedResponse.headers.get('content-type');
|
||||
if (contentType?.includes('application/json')) {
|
||||
return await processedResponse.json();
|
||||
} else {
|
||||
return await processedResponse.text() as any;
|
||||
}
|
||||
} catch (error) {
|
||||
this.refreshPromise = null;
|
||||
throw await this.applyResponseInterceptors(Promise.reject(error));
|
||||
}
|
||||
}
|
||||
|
||||
// Convenience methods
|
||||
get<T = any>(endpoint: string, config?: RequestConfig): Promise<T> {
|
||||
return this.request<T>(endpoint, { ...config, method: 'GET' });
|
||||
}
|
||||
|
||||
post<T = any>(endpoint: string, data?: any, config?: RequestConfig): Promise<T> {
|
||||
return this.request<T>(endpoint, {
|
||||
...config,
|
||||
method: 'POST',
|
||||
body: data ? JSON.stringify(data) : undefined
|
||||
});
|
||||
}
|
||||
|
||||
put<T = any>(endpoint: string, data?: any, config?: RequestConfig): Promise<T> {
|
||||
return this.request<T>(endpoint, {
|
||||
...config,
|
||||
method: 'PUT',
|
||||
body: data ? JSON.stringify(data) : undefined
|
||||
});
|
||||
}
|
||||
|
||||
patch<T = any>(endpoint: string, data?: any, config?: RequestConfig): Promise<T> {
|
||||
return this.request<T>(endpoint, {
|
||||
...config,
|
||||
method: 'PATCH',
|
||||
body: data ? JSON.stringify(data) : undefined
|
||||
});
|
||||
}
|
||||
|
||||
delete<T = any>(endpoint: string, config?: RequestConfig): Promise<T> {
|
||||
return this.request<T>(endpoint, { ...config, method: 'DELETE' });
|
||||
}
|
||||
|
||||
// File upload
|
||||
upload<T = any>(
|
||||
endpoint: string,
|
||||
file: File,
|
||||
additionalData?: Record<string, any>,
|
||||
config?: RequestConfig
|
||||
): Promise<T> {
|
||||
const formData = new FormData();
|
||||
formData.append('file', file);
|
||||
|
||||
if (additionalData) {
|
||||
Object.entries(additionalData).forEach(([key, value]) => {
|
||||
formData.append(key, value);
|
||||
});
|
||||
}
|
||||
|
||||
return this.request<T>(endpoint, {
|
||||
...config,
|
||||
method: 'POST',
|
||||
body: formData
|
||||
});
|
||||
}
|
||||
|
||||
// WebSocket connection
|
||||
createWebSocket(endpoint: string): WebSocket {
|
||||
const wsUrl = this.config.baseURL.replace(/^http/, 'ws');
|
||||
return new WebSocket(`${wsUrl}${endpoint}`);
|
||||
}
|
||||
|
||||
private generateRequestId(): string {
|
||||
return `${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
|
||||
}
|
||||
}
|
||||
|
||||
// Create default instance
|
||||
export const apiClient = new ApiClient({
|
||||
baseURL: process.env.REACT_APP_API_URL || 'http://localhost:8000/api'
|
||||
});
|
||||
|
||||
// src/api/base/circuitBreaker.ts
|
||||
export class CircuitBreaker {
|
||||
private failures: number = 0;
|
||||
private lastFailureTime: number = 0;
|
||||
private state: 'CLOSED' | 'OPEN' | 'HALF_OPEN' = 'CLOSED';
|
||||
|
||||
constructor(
|
||||
private threshold: number = 5,
|
||||
private timeout: number = 60000 // 1 minute
|
||||
) {}
|
||||
|
||||
async execute<T>(fn: () => Promise<T>): Promise<T> {
|
||||
if (this.state === 'OPEN') {
|
||||
if (Date.now() - this.lastFailureTime > this.timeout) {
|
||||
this.state = 'HALF_OPEN';
|
||||
} else {
|
||||
throw new Error('Circuit breaker is OPEN');
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await fn();
|
||||
this.onSuccess();
|
||||
return result;
|
||||
} catch (error) {
|
||||
this.onFailure();
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
private async performTokenRefresh(): Promise<string> {
|
||||
const refreshToken = localStorage.getItem('refresh_token');
|
||||
|
||||
if (!refreshToken) {
|
||||
throw new Error('No refresh token available');
|
||||
private onSuccess(): void {
|
||||
this.failures = 0;
|
||||
this.state = 'CLOSED';
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await axios.post<TokenResponse>(
|
||||
`${this.client.defaults.baseURL}/auth/refresh`,
|
||||
{ refresh_token: refreshToken }
|
||||
private onFailure(): void {
|
||||
this.failures++;
|
||||
this.lastFailureTime = Date.now();
|
||||
|
||||
if (this.failures >= this.threshold) {
|
||||
this.state = 'OPEN';
|
||||
}
|
||||
}
|
||||
|
||||
getState(): string {
|
||||
return this.state;
|
||||
}
|
||||
}
|
||||
|
||||
// src/api/services/index.ts
|
||||
import { apiClient } from '../base/apiClient';
|
||||
import { AuthService } from './authService';
|
||||
import { TrainingService } from './trainingService';
|
||||
import { ForecastingService } from './forecastingService';
|
||||
import { DataService } from './dataService';
|
||||
import { TenantService } from './tenantService';
|
||||
|
||||
// Service instances with circuit breakers
|
||||
export const authService = new AuthService(apiClient);
|
||||
export const trainingService = new TrainingService(apiClient);
|
||||
export const forecastingService = new ForecastingService(apiClient);
|
||||
export const dataService = new DataService(apiClient);
|
||||
export const tenantService = new TenantService(apiClient);
|
||||
|
||||
// Export types
|
||||
export * from '../types';
|
||||
|
||||
// src/components/common/ErrorBoundary.tsx
|
||||
import React, { Component, ErrorInfo, ReactNode } from 'react';
|
||||
|
||||
interface Props {
|
||||
children: ReactNode;
|
||||
fallback?: ReactNode;
|
||||
}
|
||||
|
||||
interface State {
|
||||
hasError: boolean;
|
||||
error: Error | null;
|
||||
}
|
||||
|
||||
export class ErrorBoundary extends Component<Props, State> {
|
||||
state: State = {
|
||||
hasError: false,
|
||||
error: null
|
||||
};
|
||||
|
||||
static getDerivedStateFromError(error: Error): State {
|
||||
return { hasError: true, error };
|
||||
}
|
||||
|
||||
componentDidCatch(error: Error, errorInfo: ErrorInfo) {
|
||||
console.error('ErrorBoundary caught:', error, errorInfo);
|
||||
|
||||
// Send error to monitoring service
|
||||
if (process.env.NODE_ENV === 'production') {
|
||||
// logErrorToService(error, errorInfo);
|
||||
}
|
||||
}
|
||||
|
||||
render() {
|
||||
if (this.state.hasError) {
|
||||
return this.props.fallback || (
|
||||
<div className="min-h-screen flex items-center justify-center">
|
||||
<div className="text-center">
|
||||
<h1 className="text-2xl font-bold text-gray-900 mb-4">
|
||||
Algo salió mal
|
||||
</h1>
|
||||
<p className="text-gray-600 mb-6">
|
||||
Ha ocurrido un error inesperado. Por favor, recarga la página.
|
||||
</p>
|
||||
<button
|
||||
onClick={() => window.location.reload()}
|
||||
className="px-4 py-2 bg-indigo-600 text-white rounded-md hover:bg-indigo-700"
|
||||
>
|
||||
Recargar página
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
||||
const { access_token, refresh_token: newRefreshToken } = response.data;
|
||||
|
||||
localStorage.setItem('access_token', access_token);
|
||||
localStorage.setItem('refresh_token', newRefreshToken);
|
||||
|
||||
return access_token;
|
||||
} catch (error) {
|
||||
throw new Error('Token refresh failed');
|
||||
}
|
||||
}
|
||||
|
||||
private getStoredToken(): string | null {
|
||||
return localStorage.getItem('access_token');
|
||||
}
|
||||
|
||||
private handleAuthFailure(): void {
|
||||
localStorage.removeItem('access_token');
|
||||
localStorage.removeItem('refresh_token');
|
||||
localStorage.removeItem('user_profile');
|
||||
|
||||
// Redirect to login
|
||||
window.location.href = '/login';
|
||||
}
|
||||
|
||||
private formatError(error: any): ApiError {
|
||||
if (error.response?.data) {
|
||||
return {
|
||||
detail: error.response.data.detail || 'An error occurred',
|
||||
service: error.response.data.service,
|
||||
error_code: error.response.data.error_code,
|
||||
timestamp: new Date().toISOString(),
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
detail: error.message || 'Network error occurred',
|
||||
timestamp: new Date().toISOString(),
|
||||
};
|
||||
}
|
||||
|
||||
// HTTP methods
|
||||
async get<T>(url: string, config?: AxiosRequestConfig): Promise<T> {
|
||||
const response = await this.client.get<T>(url, config);
|
||||
return response.data;
|
||||
}
|
||||
|
||||
async post<T>(url: string, data?: any, config?: AxiosRequestConfig): Promise<T> {
|
||||
const response = await this.client.post<T>(url, data, config);
|
||||
return response.data;
|
||||
}
|
||||
|
||||
async put<T>(url: string, data?: any, config?: AxiosRequestConfig): Promise<T> {
|
||||
const response = await this.client.put<T>(url, data, config);
|
||||
return response.data;
|
||||
}
|
||||
|
||||
async patch<T>(url: string, data?: any, config?: AxiosRequestConfig): Promise<T> {
|
||||
const response = await this.client.patch<T>(url, data, config);
|
||||
return response.data;
|
||||
}
|
||||
|
||||
async delete<T>(url: string, config?: AxiosRequestConfig): Promise<T> {
|
||||
const response = await this.client.delete<T>(url, config);
|
||||
return response.data;
|
||||
}
|
||||
|
||||
// File upload
|
||||
async uploadFile<T>(url: string, file: File, onProgress?: (progress: number) => void): Promise<T> {
|
||||
const formData = new FormData();
|
||||
formData.append('file', file);
|
||||
|
||||
const response = await this.client.post<T>(url, formData, {
|
||||
headers: {
|
||||
'Content-Type': 'multipart/form-data',
|
||||
},
|
||||
onUploadProgress: (progressEvent) => {
|
||||
if (onProgress && progressEvent.total) {
|
||||
const progress = Math.round((progressEvent.loaded * 100) / progressEvent.total);
|
||||
onProgress(progress);
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
return response.data;
|
||||
}
|
||||
|
||||
// WebSocket connection helper
|
||||
createWebSocket(path: string): WebSocket {
|
||||
const wsUrl = this.client.defaults.baseURL?.replace('http', 'ws') + path;
|
||||
return new WebSocket(wsUrl);
|
||||
return this.props.children;
|
||||
}
|
||||
}
|
||||
|
||||
// Default client instance
|
||||
export const apiClient = new ApiClient();
|
||||
|
||||
17
frontend/src/api/services/index.ts
Normal file
17
frontend/src/api/services/index.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
// src/api/services/index.ts
|
||||
import { apiClient } from '../base/apiClient';
|
||||
import { AuthService } from './authService';
|
||||
import { TrainingService } from './trainingService';
|
||||
import { ForecastingService } from './forecastingService';
|
||||
import { DataService } from './dataService';
|
||||
import { TenantService } from './tenantService';
|
||||
|
||||
// Service instances with circuit breakers
|
||||
export const authService = new AuthService(apiClient);
|
||||
export const trainingService = new TrainingService(apiClient);
|
||||
export const forecastingService = new ForecastingService(apiClient);
|
||||
export const dataService = new DataService(apiClient);
|
||||
export const tenantService = new TenantService(apiClient);
|
||||
|
||||
// Export types
|
||||
export * from '../types';
|
||||
233
frontend/src/api/websocket/WebSocketManager.ts
Normal file
233
frontend/src/api/websocket/WebSocketManager.ts
Normal file
@@ -0,0 +1,233 @@
|
||||
// src/api/websocket/WebSocketManager.ts
|
||||
import { tokenManager } from '../auth/tokenManager';
|
||||
import { EventEmitter } from 'events';
|
||||
|
||||
export interface WebSocketConfig {
|
||||
url: string;
|
||||
protocols?: string[];
|
||||
reconnect?: boolean;
|
||||
reconnectInterval?: number;
|
||||
maxReconnectAttempts?: number;
|
||||
heartbeatInterval?: number;
|
||||
}
|
||||
|
||||
export interface WebSocketHandlers {
|
||||
onOpen?: () => void;
|
||||
onMessage?: (data: any) => void;
|
||||
onError?: (error: Event) => void;
|
||||
onClose?: (event: CloseEvent) => void;
|
||||
onReconnect?: () => void;
|
||||
onReconnectFailed?: () => void;
|
||||
}
|
||||
|
||||
interface WebSocketConnection {
|
||||
ws: WebSocket;
|
||||
config: WebSocketConfig;
|
||||
handlers: WebSocketHandlers;
|
||||
reconnectAttempts: number;
|
||||
heartbeatTimer?: NodeJS.Timeout;
|
||||
reconnectTimer?: NodeJS.Timeout;
|
||||
}
|
||||
|
||||
class WebSocketManager extends EventEmitter {
|
||||
private static instance: WebSocketManager;
|
||||
private connections: Map<string, WebSocketConnection> = new Map();
|
||||
private baseUrl: string;
|
||||
|
||||
private constructor() {
|
||||
super();
|
||||
this.baseUrl = this.getWebSocketBaseUrl();
|
||||
}
|
||||
|
||||
static getInstance(): WebSocketManager {
|
||||
if (!WebSocketManager.instance) {
|
||||
WebSocketManager.instance = new WebSocketManager();
|
||||
}
|
||||
return WebSocketManager.instance;
|
||||
}
|
||||
|
||||
async connect(
|
||||
endpoint: string,
|
||||
handlers: WebSocketHandlers,
|
||||
config: Partial<WebSocketConfig> = {}
|
||||
): Promise<WebSocket> {
|
||||
// Get authentication token
|
||||
const token = await tokenManager.getAccessToken();
|
||||
if (!token) {
|
||||
throw new Error('Authentication required for WebSocket connection');
|
||||
}
|
||||
|
||||
const fullConfig: WebSocketConfig = {
|
||||
url: `${this.baseUrl}${endpoint}`,
|
||||
reconnect: true,
|
||||
reconnectInterval: 1000,
|
||||
maxReconnectAttempts: 5,
|
||||
heartbeatInterval: 30000,
|
||||
...config
|
||||
};
|
||||
|
||||
// Add token to URL as query parameter
|
||||
const urlWithAuth = `${fullConfig.url}?token=${token}`;
|
||||
|
||||
const ws = new WebSocket(urlWithAuth, fullConfig.protocols);
|
||||
|
||||
const connection: WebSocketConnection = {
|
||||
ws,
|
||||
config: fullConfig,
|
||||
handlers,
|
||||
reconnectAttempts: 0
|
||||
};
|
||||
|
||||
this.setupWebSocketHandlers(endpoint, connection);
|
||||
this.connections.set(endpoint, connection);
|
||||
|
||||
return ws;
|
||||
}
|
||||
|
||||
disconnect(endpoint: string): void {
|
||||
const connection = this.connections.get(endpoint);
|
||||
if (connection) {
|
||||
this.cleanupConnection(connection);
|
||||
this.connections.delete(endpoint);
|
||||
}
|
||||
}
|
||||
|
||||
disconnectAll(): void {
|
||||
this.connections.forEach((connection, endpoint) => {
|
||||
this.cleanupConnection(connection);
|
||||
});
|
||||
this.connections.clear();
|
||||
}
|
||||
|
||||
send(endpoint: string, data: any): void {
|
||||
const connection = this.connections.get(endpoint);
|
||||
if (connection && connection.ws.readyState === WebSocket.OPEN) {
|
||||
connection.ws.send(JSON.stringify(data));
|
||||
} else {
|
||||
console.error(`WebSocket not connected for endpoint: ${endpoint}`);
|
||||
}
|
||||
}
|
||||
|
||||
private setupWebSocketHandlers(endpoint: string, connection: WebSocketConnection): void {
|
||||
const { ws, handlers, config } = connection;
|
||||
|
||||
ws.onopen = () => {
|
||||
console.log(`WebSocket connected: ${endpoint}`);
|
||||
connection.reconnectAttempts = 0;
|
||||
|
||||
// Start heartbeat
|
||||
if (config.heartbeatInterval) {
|
||||
this.startHeartbeat(connection);
|
||||
}
|
||||
|
||||
handlers.onOpen?.();
|
||||
this.emit('connected', endpoint);
|
||||
};
|
||||
|
||||
ws.onmessage = (event: MessageEvent) => {
|
||||
try {
|
||||
const data = JSON.parse(event.data);
|
||||
|
||||
// Handle heartbeat response
|
||||
if (data.type === 'pong') {
|
||||
return;
|
||||
}
|
||||
|
||||
handlers.onMessage?.(data);
|
||||
this.emit('message', { endpoint, data });
|
||||
} catch (error) {
|
||||
console.error('Failed to parse WebSocket message:', error);
|
||||
}
|
||||
};
|
||||
|
||||
ws.onerror = (error: Event) => {
|
||||
console.error(`WebSocket error on ${endpoint}:`, error);
|
||||
handlers.onError?.(error);
|
||||
this.emit('error', { endpoint, error });
|
||||
};
|
||||
|
||||
ws.onclose = (event: CloseEvent) => {
|
||||
console.log(`WebSocket closed: ${endpoint}`, event.code, event.reason);
|
||||
|
||||
// Clear heartbeat
|
||||
if (connection.heartbeatTimer) {
|
||||
clearInterval(connection.heartbeatTimer);
|
||||
}
|
||||
|
||||
handlers.onClose?.(event);
|
||||
this.emit('disconnected', endpoint);
|
||||
|
||||
// Attempt reconnection
|
||||
if (config.reconnect && connection.reconnectAttempts < config.maxReconnectAttempts!) {
|
||||
this.scheduleReconnect(endpoint, connection);
|
||||
} else if (connection.reconnectAttempts >= config.maxReconnectAttempts!) {
|
||||
handlers.onReconnectFailed?.();
|
||||
this.emit('reconnectFailed', endpoint);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private scheduleReconnect(endpoint: string, connection: WebSocketConnection): void {
|
||||
const { config, handlers, reconnectAttempts } = connection;
|
||||
|
||||
// Exponential backoff
|
||||
const delay = Math.min(
|
||||
config.reconnectInterval! * Math.pow(2, reconnectAttempts),
|
||||
30000 // Max 30 seconds
|
||||
);
|
||||
|
||||
console.log(`Scheduling reconnect for ${endpoint} in ${delay}ms`);
|
||||
|
||||
connection.reconnectTimer = setTimeout(async () => {
|
||||
connection.reconnectAttempts++;
|
||||
|
||||
try {
|
||||
await this.connect(endpoint, handlers, config);
|
||||
handlers.onReconnect?.();
|
||||
this.emit('reconnected', endpoint);
|
||||
} catch (error) {
|
||||
console.error(`Reconnection failed for ${endpoint}:`, error);
|
||||
}
|
||||
}, delay);
|
||||
}
|
||||
|
||||
private startHeartbeat(connection: WebSocketConnection): void {
|
||||
connection.heartbeatTimer = setInterval(() => {
|
||||
if (connection.ws.readyState === WebSocket.OPEN) {
|
||||
connection.ws.send(JSON.stringify({ type: 'ping' }));
|
||||
}
|
||||
}, connection.config.heartbeatInterval!);
|
||||
}
|
||||
|
||||
private cleanupConnection(connection: WebSocketConnection): void {
|
||||
if (connection.heartbeatTimer) {
|
||||
clearInterval(connection.heartbeatTimer);
|
||||
}
|
||||
|
||||
if (connection.reconnectTimer) {
|
||||
clearTimeout(connection.reconnectTimer);
|
||||
}
|
||||
|
||||
if (connection.ws.readyState === WebSocket.OPEN) {
|
||||
connection.ws.close();
|
||||
}
|
||||
}
|
||||
|
||||
private getWebSocketBaseUrl(): string {
|
||||
const protocol = window.location.protocol === 'https:' ? 'wss:' : 'ws:';
|
||||
const host = process.env.REACT_APP_WS_URL || window.location.host;
|
||||
return `${protocol}//${host}/ws`;
|
||||
}
|
||||
|
||||
// Get connection status
|
||||
getConnectionStatus(endpoint: string): number {
|
||||
const connection = this.connections.get(endpoint);
|
||||
return connection ? connection.ws.readyState : WebSocket.CLOSED;
|
||||
}
|
||||
|
||||
isConnected(endpoint: string): boolean {
|
||||
return this.getConnectionStatus(endpoint) === WebSocket.OPEN;
|
||||
}
|
||||
}
|
||||
|
||||
export const wsManager = WebSocketManager.getInstance();
|
||||
0
frontend/src/components/auth/ProtectedRoute.tsx
Normal file
0
frontend/src/components/auth/ProtectedRoute.tsx
Normal file
0
frontend/src/components/common/ErrorBoundary.tsx
Normal file
0
frontend/src/components/common/ErrorBoundary.tsx
Normal file
0
frontend/src/components/data/SalesUploader.tsx
Normal file
0
frontend/src/components/data/SalesUploader.tsx
Normal file
@@ -1,30 +1,20 @@
|
||||
import React, { createContext, useState, useContext, useEffect } from 'react';
|
||||
import api from '../api/api';
|
||||
import { useRouter } from 'next/router';
|
||||
import axios from 'axios';
|
||||
|
||||
interface User {
|
||||
id: string;
|
||||
email: string;
|
||||
full_name: string;
|
||||
tenant_id: string;
|
||||
}
|
||||
|
||||
interface Tenant {
|
||||
id: string;
|
||||
name: string;
|
||||
subdomain: string;
|
||||
}
|
||||
// src/contexts/AuthContext.tsx
|
||||
import React, { createContext, useContext, useEffect, useState, useCallback } from 'react';
|
||||
import { authService, UserProfile } from '../api/auth/authService';
|
||||
import { tokenManager } from '../api/auth/tokenManager';
|
||||
|
||||
interface AuthContextType {
|
||||
user: User | null;
|
||||
tenant: Tenant | null;
|
||||
user: UserProfile | null;
|
||||
isAuthenticated: boolean;
|
||||
isLoading: boolean;
|
||||
login: (email: string, password: string) => Promise<void>;
|
||||
logout: () => void;
|
||||
loading: boolean;
|
||||
register: (data: any) => Promise<void>;
|
||||
logout: () => Promise<void>;
|
||||
updateProfile: (updates: Partial<UserProfile>) => Promise<void>;
|
||||
refreshUser: () => Promise<void>;
|
||||
}
|
||||
|
||||
const AuthContext = createContext<AuthContextType | undefined>(undefined);
|
||||
const AuthContext = createContext<AuthContextType | null>(null);
|
||||
|
||||
export const useAuth = () => {
|
||||
const context = useContext(AuthContext);
|
||||
@@ -35,74 +25,86 @@ export const useAuth = () => {
|
||||
};
|
||||
|
||||
export const AuthProvider: React.FC<{ children: React.ReactNode }> = ({ children }) => {
|
||||
const [user, setUser] = useState<User | null>(null);
|
||||
const [tenant, setTenant] = useState<Tenant | null>(null);
|
||||
const [loading, setLoading] = useState(true);
|
||||
const router = useRouter();
|
||||
const [user, setUser] = useState<UserProfile | null>(null);
|
||||
const [isLoading, setIsLoading] = useState(true);
|
||||
|
||||
// Initialize auth state
|
||||
useEffect(() => {
|
||||
const token = localStorage.getItem('access_token');
|
||||
if (token) {
|
||||
loadUserData();
|
||||
} else {
|
||||
setLoading(false);
|
||||
const initAuth = async () => {
|
||||
try {
|
||||
await tokenManager.initialize();
|
||||
|
||||
if (authService.isAuthenticated()) {
|
||||
const profile = await authService.getCurrentUser();
|
||||
setUser(profile);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Auth initialization failed:', error);
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
initAuth();
|
||||
}, []);
|
||||
|
||||
const login = useCallback(async (email: string, password: string) => {
|
||||
const profile = await authService.login({ email, password });
|
||||
setUser(profile);
|
||||
}, []);
|
||||
|
||||
const register = useCallback(async (data: any) => {
|
||||
const profile = await authService.register(data);
|
||||
setUser(profile);
|
||||
}, []);
|
||||
|
||||
const logout = useCallback(async () => {
|
||||
await authService.logout();
|
||||
setUser(null);
|
||||
}, []);
|
||||
|
||||
const updateProfile = useCallback(async (updates: Partial<UserProfile>) => {
|
||||
const updated = await authService.updateProfile(updates);
|
||||
setUser(updated);
|
||||
}, [updateProfile]);
|
||||
|
||||
const refreshUser = useCallback(async () => {
|
||||
if (authService.isAuthenticated()) {
|
||||
const profile = await authService.getCurrentUser();
|
||||
setUser(profile);
|
||||
}
|
||||
}, []);
|
||||
|
||||
const loadUserData = async () => {
|
||||
// Set up token refresh interval
|
||||
useEffect(() => {
|
||||
if (!user) return;
|
||||
|
||||
// Check token expiry every minute
|
||||
const interval = setInterval(async () => {
|
||||
try {
|
||||
const response = await api.get('/auth/users/me');
|
||||
setUser(response.data.user);
|
||||
setTenant(response.data.tenant);
|
||||
await tokenManager.getAccessToken(); // This will refresh if needed
|
||||
} catch (error) {
|
||||
console.error('Failed to load user data:', error);
|
||||
localStorage.removeItem('access_token');
|
||||
localStorage.removeItem('tenant_id');
|
||||
setUser(null);
|
||||
setTenant(null);
|
||||
} finally {
|
||||
setLoading(false);
|
||||
console.error('Token refresh failed:', error);
|
||||
await logout();
|
||||
}
|
||||
};
|
||||
}, 60000); // 1 minute
|
||||
|
||||
const login = async (email: string, password: string) => {
|
||||
try {
|
||||
// Create form data for OAuth2PasswordRequestForm
|
||||
const formData = new URLSearchParams();
|
||||
formData.append('username', email);
|
||||
formData.append('password', password);
|
||||
|
||||
// Make login request with correct content type
|
||||
const response = await axios.post(
|
||||
`${process.env.NEXT_PUBLIC_API_URL || 'http://localhost:8000'}/api/v1/auth/token`,
|
||||
formData,
|
||||
{
|
||||
headers: {
|
||||
'Content-Type': 'application/x-www-form-urlencoded',
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
localStorage.setItem('access_token', response.data.access_token);
|
||||
localStorage.setItem('tenant_id', response.data.tenant_id);
|
||||
|
||||
await loadUserData();
|
||||
} catch (error) {
|
||||
console.error('Login failed:', error);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
const logout = () => {
|
||||
localStorage.removeItem('access_token');
|
||||
localStorage.removeItem('tenant_id');
|
||||
setUser(null);
|
||||
setTenant(null);
|
||||
router.push('/login');
|
||||
};
|
||||
return () => clearInterval(interval);
|
||||
}, [user, logout]);
|
||||
|
||||
return (
|
||||
<AuthContext.Provider value={{ user, tenant, login, logout, loading }}>
|
||||
<AuthContext.Provider
|
||||
value={{
|
||||
user,
|
||||
isAuthenticated: !!user,
|
||||
isLoading,
|
||||
login,
|
||||
register,
|
||||
logout,
|
||||
updateProfile,
|
||||
refreshUser
|
||||
}}
|
||||
>
|
||||
{children}
|
||||
</AuthContext.Provider>
|
||||
);
|
||||
|
||||
0
frontend/src/hooks/useSessionTimeout.ts
Normal file
0
frontend/src/hooks/useSessionTimeout.ts
Normal file
0
frontend/src/hooks/useTrainingProgress.ts
Normal file
0
frontend/src/hooks/useTrainingProgress.ts
Normal file
0
frontend/src/hooks/useWebSocket.ts
Normal file
0
frontend/src/hooks/useWebSocket.ts
Normal file
File diff suppressed because it is too large
Load Diff
656
frontend/src/setupTests.ts
Normal file
656
frontend/src/setupTests.ts
Normal file
@@ -0,0 +1,656 @@
|
||||
// src/setupTests.ts
|
||||
import '@testing-library/jest-dom';
|
||||
import { server } from './mocks/server';
|
||||
import { cleanup } from '@testing-library/react';
|
||||
|
||||
// Establish API mocking before all tests
|
||||
beforeAll(() => server.listen());
|
||||
|
||||
// Reset any request handlers added during tests
|
||||
afterEach(() => {
|
||||
server.resetHandlers();
|
||||
cleanup();
|
||||
});
|
||||
|
||||
// Clean up after tests
|
||||
afterAll(() => server.close());
|
||||
|
||||
// Mock WebSocket
|
||||
global.WebSocket = jest.fn().mockImplementation(() => ({
|
||||
send: jest.fn(),
|
||||
close: jest.fn(),
|
||||
addEventListener: jest.fn(),
|
||||
removeEventListener: jest.fn(),
|
||||
readyState: 1
|
||||
}));
|
||||
|
||||
// src/mocks/server.ts
|
||||
import { setupServer } from 'msw/node';
|
||||
import { handlers } from './handlers';
|
||||
|
||||
export const server = setupServer(...handlers);
|
||||
|
||||
// src/mocks/handlers/index.ts
|
||||
import { rest } from 'msw';
|
||||
import { authHandlers } from './auth';
|
||||
import { trainingHandlers } from './training';
|
||||
import { dataHandlers } from './data';
|
||||
|
||||
export const handlers = [
|
||||
...authHandlers,
|
||||
...trainingHandlers,
|
||||
...dataHandlers
|
||||
];
|
||||
|
||||
// src/mocks/handlers/auth.ts
|
||||
import { rest } from 'msw';
|
||||
|
||||
export const authHandlers = [
|
||||
rest.post('/api/auth/token', (req, res, ctx) => {
|
||||
return res(
|
||||
ctx.status(200),
|
||||
ctx.json({
|
||||
access_token: 'mock-access-token',
|
||||
refresh_token: 'mock-refresh-token',
|
||||
token_type: 'bearer',
|
||||
expires_in: 3600
|
||||
})
|
||||
);
|
||||
}),
|
||||
|
||||
rest.get('/api/auth/me', (req, res, ctx) => {
|
||||
const token = req.headers.get('Authorization');
|
||||
|
||||
if (!token || token !== 'Bearer mock-access-token') {
|
||||
return res(ctx.status(401), ctx.json({ detail: 'Unauthorized' }));
|
||||
}
|
||||
|
||||
return res(
|
||||
ctx.status(200),
|
||||
ctx.json({
|
||||
id: '123',
|
||||
email: 'test@bakery.com',
|
||||
full_name: 'Test User',
|
||||
tenant_id: 'tenant-123',
|
||||
role: 'admin',
|
||||
is_active: true,
|
||||
created_at: '2024-01-01T00:00:00Z'
|
||||
})
|
||||
);
|
||||
}),
|
||||
|
||||
rest.post('/api/auth/logout', (req, res, ctx) => {
|
||||
return res(ctx.status(204));
|
||||
})
|
||||
];
|
||||
|
||||
// src/mocks/handlers/training.ts
|
||||
import { rest } from 'msw';
|
||||
|
||||
export const trainingHandlers = [
|
||||
rest.post('/api/training/train', (req, res, ctx) => {
|
||||
return res(
|
||||
ctx.status(200),
|
||||
ctx.json({
|
||||
job_id: 'job-123',
|
||||
status: 'pending',
|
||||
progress: 0,
|
||||
current_step: 'Initializing',
|
||||
total_steps: 5,
|
||||
created_at: new Date().toISOString()
|
||||
})
|
||||
);
|
||||
}),
|
||||
|
||||
rest.get('/api/training/status/:jobId', (req, res, ctx) => {
|
||||
const { jobId } = req.params;
|
||||
|
||||
return res(
|
||||
ctx.status(200),
|
||||
ctx.json({
|
||||
job_id: jobId,
|
||||
status: 'running',
|
||||
progress: 45,
|
||||
current_step: 'Training models',
|
||||
total_steps: 5,
|
||||
estimated_time_remaining: 120
|
||||
})
|
||||
);
|
||||
})
|
||||
];
|
||||
|
||||
// src/__tests__/unit/api/tokenManager.test.ts
|
||||
import { tokenManager } from '../../../api/auth/tokenManager';
|
||||
|
||||
describe('TokenManager', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
sessionStorage.clear();
|
||||
});
|
||||
|
||||
test('should store tokens securely', async () => {
|
||||
const tokenResponse = {
|
||||
access_token: 'test-access-token',
|
||||
refresh_token: 'test-refresh-token',
|
||||
token_type: 'bearer',
|
||||
expires_in: 3600
|
||||
};
|
||||
|
||||
await tokenManager.storeTokens(tokenResponse);
|
||||
const accessToken = await tokenManager.getAccessToken();
|
||||
|
||||
expect(accessToken).toBe('test-access-token');
|
||||
});
|
||||
|
||||
test('should refresh token when expired', async () => {
|
||||
const expiredToken = {
|
||||
access_token: 'expired-token',
|
||||
refresh_token: 'refresh-token',
|
||||
token_type: 'bearer',
|
||||
expires_in: -1 // Already expired
|
||||
};
|
||||
|
||||
await tokenManager.storeTokens(expiredToken);
|
||||
|
||||
// Mock refresh endpoint
|
||||
global.fetch = jest.fn().mockResolvedValueOnce({
|
||||
ok: true,
|
||||
json: async () => ({
|
||||
access_token: 'new-access-token',
|
||||
refresh_token: 'new-refresh-token',
|
||||
token_type: 'bearer',
|
||||
expires_in: 3600
|
||||
})
|
||||
});
|
||||
|
||||
const accessToken = await tokenManager.getAccessToken();
|
||||
expect(accessToken).toBe('new-access-token');
|
||||
});
|
||||
|
||||
test('should clear tokens on logout', () => {
|
||||
tokenManager.clearTokens();
|
||||
expect(tokenManager.isAuthenticated()).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
// src/__tests__/unit/hooks/useWebSocket.test.tsx
|
||||
import { renderHook, act } from '@testing-library/react';
|
||||
import { useWebSocket } from '../../../hooks/useWebSocket';
|
||||
|
||||
describe('useWebSocket', () => {
|
||||
test('should connect to WebSocket', async () => {
|
||||
const onMessage = jest.fn();
|
||||
const onConnect = jest.fn();
|
||||
|
||||
const { result } = renderHook(() =>
|
||||
useWebSocket({
|
||||
endpoint: '/test',
|
||||
onMessage,
|
||||
onConnect
|
||||
})
|
||||
);
|
||||
|
||||
// Wait for connection
|
||||
await act(async () => {
|
||||
await new Promise(resolve => setTimeout(resolve, 100));
|
||||
});
|
||||
|
||||
expect(result.current.isConnected).toBe(true);
|
||||
});
|
||||
|
||||
test('should handle reconnection', async () => {
|
||||
const onReconnect = jest.fn();
|
||||
|
||||
const { result } = renderHook(() =>
|
||||
useWebSocket({
|
||||
endpoint: '/test',
|
||||
onMessage: jest.fn(),
|
||||
onReconnect
|
||||
})
|
||||
);
|
||||
|
||||
// Simulate disconnect and reconnect
|
||||
act(() => {
|
||||
result.current.disconnect();
|
||||
});
|
||||
|
||||
await act(async () => {
|
||||
await result.current.connect();
|
||||
});
|
||||
|
||||
expect(onReconnect).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
// src/__tests__/integration/AuthFlow.test.tsx
|
||||
import React from 'react';
|
||||
import { render, screen, waitFor } from '@testing-library/react';
|
||||
import userEvent from '@testing-library/user-event';
|
||||
import { BrowserRouter } from 'react-router-dom';
|
||||
import { AuthProvider } from '../../../contexts/AuthContext';
|
||||
import { LoginPage } from '../../../pages/LoginPage';
|
||||
import { Dashboard } from '../../../pages/Dashboard/Dashboard';
|
||||
|
||||
const renderWithProviders = (component: React.ReactElement) => {
|
||||
return render(
|
||||
<BrowserRouter>
|
||||
<AuthProvider>{component}</AuthProvider>
|
||||
</BrowserRouter>
|
||||
);
|
||||
};
|
||||
|
||||
describe('Authentication Flow', () => {
|
||||
test('should login and redirect to dashboard', async () => {
|
||||
const user = userEvent.setup();
|
||||
renderWithProviders(<LoginPage />);
|
||||
|
||||
// Fill login form
|
||||
await user.type(screen.getByLabelText(/email/i), 'test@bakery.com');
|
||||
await user.type(screen.getByLabelText(/password/i), 'password123');
|
||||
|
||||
// Submit form
|
||||
await user.click(screen.getByRole('button', { name: /login/i }));
|
||||
|
||||
// Wait for redirect
|
||||
await waitFor(() => {
|
||||
expect(window.location.pathname).toBe('/dashboard');
|
||||
});
|
||||
});
|
||||
|
||||
test('should handle login errors', async () => {
|
||||
const user = userEvent.setup();
|
||||
|
||||
// Mock failed login
|
||||
server.use(
|
||||
rest.post('/api/auth/token', (req, res, ctx) => {
|
||||
return res(
|
||||
ctx.status(401),
|
||||
ctx.json({ detail: 'Invalid credentials' })
|
||||
);
|
||||
})
|
||||
);
|
||||
|
||||
renderWithProviders(<LoginPage />);
|
||||
|
||||
await user.type(screen.getByLabelText(/email/i), 'wrong@email.com');
|
||||
await user.type(screen.getByLabelText(/password/i), 'wrongpass');
|
||||
await user.click(screen.getByRole('button', { name: /login/i }));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText(/invalid credentials/i)).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// src/__tests__/integration/Dashboard.test.tsx
|
||||
import React from 'react';
|
||||
import { render, screen, waitFor, within } from '@testing-library/react';
|
||||
import userEvent from '@testing-library/user-event';
|
||||
import { Dashboard } from '../../../pages/Dashboard/Dashboard';
|
||||
import { AuthProvider } from '../../../contexts/AuthContext';
|
||||
|
||||
const renderDashboard = () => {
|
||||
return render(
|
||||
<AuthProvider>
|
||||
<Dashboard />
|
||||
</AuthProvider>
|
||||
);
|
||||
};
|
||||
|
||||
describe('Dashboard Integration', () => {
|
||||
test('should load and display dashboard data', async () => {
|
||||
renderDashboard();
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText(/bakery forecast dashboard/i)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Check stats cards
|
||||
expect(screen.getByText(/total sales/i)).toBeInTheDocument();
|
||||
expect(screen.getByText(/total revenue/i)).toBeInTheDocument();
|
||||
expect(screen.getByText(/last training/i)).toBeInTheDocument();
|
||||
expect(screen.getByText(/forecast accuracy/i)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
test('should start training process', async () => {
|
||||
const user = userEvent.setup();
|
||||
renderDashboard();
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByRole('button', { name: /start training/i })).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Click training button
|
||||
await user.click(screen.getByRole('button', { name: /start training/i }));
|
||||
|
||||
// Check progress card appears
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText(/training progress/i)).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
test('should handle file upload', async () => {
|
||||
const user = userEvent.setup();
|
||||
renderDashboard();
|
||||
|
||||
const file = new File(['sales,data'], 'sales.csv', { type: 'text/csv' });
|
||||
const input = screen.getByLabelText(/upload sales data/i);
|
||||
|
||||
await user.upload(input, file);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText(/upload successful/i)).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// cypress/e2e/user-workflows.cy.ts
|
||||
describe('End-to-End User Workflows', () => {
|
||||
beforeEach(() => {
|
||||
cy.visit('/');
|
||||
});
|
||||
|
||||
it('should complete full forecasting workflow', () => {
|
||||
// Login
|
||||
cy.get('[data-cy=email-input]').type('test@bakery.com');
|
||||
cy.get('[data-cy=password-input]').type('password123');
|
||||
cy.get('[data-cy=login-button]').click();
|
||||
|
||||
// Wait for dashboard
|
||||
cy.url().should('include', '/dashboard');
|
||||
cy.contains('Bakery Forecast Dashboard').should('be.visible');
|
||||
|
||||
// Upload sales data
|
||||
cy.get('[data-cy=upload-button]').click();
|
||||
cy.get('input[type=file]').selectFile({
|
||||
contents: Cypress.Buffer.from('product,quantity,date\nPan,100,2024-01-01'),
|
||||
fileName: 'sales.csv',
|
||||
mimeType: 'text/csv'
|
||||
});
|
||||
|
||||
// Wait for upload confirmation
|
||||
cy.contains('Upload Successful').should('be.visible');
|
||||
|
||||
// Start training
|
||||
cy.get('[data-cy=train-button]').click();
|
||||
cy.contains('Training Progress').should('be.visible');
|
||||
|
||||
// Verify real-time updates
|
||||
cy.get('[data-cy=progress-bar]', { timeout: 10000 })
|
||||
.should('have.attr', 'aria-valuenow')
|
||||
.and('not.equal', '0');
|
||||
|
||||
// Wait for completion
|
||||
cy.contains('Training Complete', { timeout: 60000 }).should('be.visible');
|
||||
|
||||
// Verify forecasts are displayed
|
||||
cy.get('[data-cy=forecast-chart]').should('have.length.at.least', 1);
|
||||
});
|
||||
|
||||
it('should handle errors gracefully', () => {
|
||||
// Login with invalid credentials
|
||||
cy.get('[data-cy=email-input]').type('invalid@email.com');
|
||||
cy.get('[data-cy=password-input]').type('wrongpassword');
|
||||
cy.get('[data-cy=login-button]').click();
|
||||
|
||||
// Verify error message
|
||||
cy.contains('Invalid credentials').should('be.visible');
|
||||
|
||||
// Login with valid credentials
|
||||
cy.get('[data-cy=email-input]').clear().type('test@bakery.com');
|
||||
cy.get('[data-cy=password-input]').clear().type('password123');
|
||||
cy.get('[data-cy=login-button]').click();
|
||||
|
||||
// Simulate network error during training
|
||||
cy.intercept('POST', '/api/training/train', { statusCode: 500 }).as('trainingError');
|
||||
cy.get('[data-cy=train-button]').click();
|
||||
cy.wait('@trainingError');
|
||||
|
||||
// Verify error notification
|
||||
cy.contains('Failed to start training').should('be.visible');
|
||||
});
|
||||
|
||||
it('should maintain session across tabs', () => {
|
||||
// Login in first tab
|
||||
cy.get('[data-cy=email-input]').type('test@bakery.com');
|
||||
cy.get('[data-cy=password-input]').type('password123');
|
||||
cy.get('[data-cy=login-button]').click();
|
||||
|
||||
// Open new tab (simulated)
|
||||
cy.window().then((win) => {
|
||||
cy.stub(win, 'open').as('newTab');
|
||||
});
|
||||
|
||||
// Verify session persists
|
||||
cy.reload();
|
||||
cy.url().should('include', '/dashboard');
|
||||
cy.contains('Bakery Forecast Dashboard').should('be.visible');
|
||||
});
|
||||
});
|
||||
|
||||
// cypress/support/commands.ts
|
||||
Cypress.Commands.add('login', (email: string, password: string) => {
|
||||
cy.visit('/login');
|
||||
cy.get('[data-cy=email-input]').type(email);
|
||||
cy.get('[data-cy=password-input]').type(password);
|
||||
cy.get('[data-cy=login-button]').click();
|
||||
cy.url().should('include', '/dashboard');
|
||||
});
|
||||
|
||||
Cypress.Commands.add('mockWebSocket', () => {
|
||||
cy.window().then((win) => {
|
||||
win.WebSocket = class MockWebSocket {
|
||||
constructor(url: string) {
|
||||
setTimeout(() => {
|
||||
this.onopen?.({} as Event);
|
||||
}, 100);
|
||||
}
|
||||
send = cy.stub();
|
||||
close = cy.stub();
|
||||
onopen?: (event: Event) => void;
|
||||
onmessage?: (event: MessageEvent) => void;
|
||||
onerror?: (event: Event) => void;
|
||||
onclose?: (event: CloseEvent) => void;
|
||||
} as any;
|
||||
});
|
||||
});
|
||||
|
||||
// src/__tests__/performance/Dashboard.perf.test.tsx
|
||||
import { render } from '@testing-library/react';
|
||||
import { Dashboard } from '../../../pages/Dashboard/Dashboard';
|
||||
import { AuthProvider } from '../../../contexts/AuthContext';
|
||||
|
||||
describe('Dashboard Performance', () => {
|
||||
test('should render within performance budget', async () => {
|
||||
const startTime = performance.now();
|
||||
|
||||
render(
|
||||
<AuthProvider>
|
||||
<Dashboard />
|
||||
</AuthProvider>
|
||||
);
|
||||
|
||||
const endTime = performance.now();
|
||||
const renderTime = endTime - startTime;
|
||||
|
||||
// Should render within 100ms
|
||||
expect(renderTime).toBeLessThan(100);
|
||||
});
|
||||
|
||||
test('should not cause memory leaks', async () => {
|
||||
const initialMemory = (performance as any).memory?.usedJSHeapSize;
|
||||
|
||||
// Render and unmount multiple times
|
||||
for (let i = 0; i < 10; i++) {
|
||||
const { unmount } = render(
|
||||
<AuthProvider>
|
||||
<Dashboard />
|
||||
</AuthProvider>
|
||||
);
|
||||
unmount();
|
||||
}
|
||||
|
||||
// Force garbage collection if available
|
||||
if (global.gc) {
|
||||
global.gc();
|
||||
}
|
||||
|
||||
const finalMemory = (performance as any).memory?.usedJSHeapSize;
|
||||
|
||||
// Memory should not increase significantly
|
||||
if (initialMemory && finalMemory) {
|
||||
const memoryIncrease = finalMemory - initialMemory;
|
||||
expect(memoryIncrease).toBeLessThan(10 * 1024 * 1024); // 10MB threshold
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
// jest.config.js
|
||||
module.exports = {
|
||||
preset: 'ts-jest',
|
||||
testEnvironment: 'jsdom',
|
||||
setupFilesAfterEnv: ['<rootDir>/src/setupTests.ts'],
|
||||
moduleNameMapper: {
|
||||
'^@/(.*): '<rootDir>/src/$1',
|
||||
'\\.(css|less|scss|sass): 'identity-obj-proxy',
|
||||
},
|
||||
transform: {
|
||||
'^.+\\.(ts|tsx): 'ts-jest',
|
||||
},
|
||||
collectCoverageFrom: [
|
||||
'src/**/*.{ts,tsx}',
|
||||
'!src/**/*.d.ts',
|
||||
'!src/mocks/**',
|
||||
'!src/setupTests.ts',
|
||||
],
|
||||
coverageThreshold: {
|
||||
global: {
|
||||
branches: 80,
|
||||
functions: 80,
|
||||
lines: 80,
|
||||
statements: 80,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
// cypress.config.ts
|
||||
import { defineConfig } from 'cypress';
|
||||
|
||||
export default defineConfig({
|
||||
e2e: {
|
||||
baseUrl: 'http://localhost:3000',
|
||||
viewportWidth: 1280,
|
||||
viewportHeight: 720,
|
||||
video: true,
|
||||
screenshotOnRunFailure: true,
|
||||
defaultCommandTimeout: 10000,
|
||||
requestTimeout: 10000,
|
||||
responseTimeout: 10000,
|
||||
setupNodeEvents(on, config) {
|
||||
// Performance testing
|
||||
on('task', {
|
||||
measurePerformance: () => {
|
||||
return {
|
||||
memory: process.memoryUsage(),
|
||||
cpu: process.cpuUsage(),
|
||||
};
|
||||
},
|
||||
});
|
||||
},
|
||||
},
|
||||
component: {
|
||||
devServer: {
|
||||
framework: 'react',
|
||||
bundler: 'webpack',
|
||||
},
|
||||
specPattern: 'src/**/*.cy.{ts,tsx}',
|
||||
},
|
||||
});
|
||||
|
||||
// package.json (test scripts)
|
||||
{
|
||||
"scripts": {
|
||||
"test": "jest",
|
||||
"test:watch": "jest --watch",
|
||||
"test:coverage": "jest --coverage",
|
||||
"test:e2e": "cypress run",
|
||||
"test:e2e:open": "cypress open",
|
||||
"test:integration": "jest --testMatch='**/*.integration.test.{ts,tsx}'",
|
||||
"test:unit": "jest --testMatch='**/*.unit.test.{ts,tsx}'",
|
||||
"test:perf": "jest --testMatch='**/*.perf.test.{ts,tsx}'",
|
||||
"test:all": "npm run test:unit && npm run test:integration && npm run test:e2e"
|
||||
}
|
||||
}
|
||||
|
||||
// .github/workflows/test.yml
|
||||
name: Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main, develop]
|
||||
pull_request:
|
||||
branches: [main]
|
||||
|
||||
jobs:
|
||||
unit-tests:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: '18'
|
||||
cache: 'npm'
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
|
||||
- name: Run unit tests
|
||||
run: npm run test:unit
|
||||
|
||||
- name: Upload coverage
|
||||
uses: codecov/codecov-action@v3
|
||||
with:
|
||||
file: ./coverage/lcov.info
|
||||
|
||||
integration-tests:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: '18'
|
||||
cache: 'npm'
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
|
||||
- name: Run integration tests
|
||||
run: npm run test:integration
|
||||
|
||||
e2e-tests:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: '18'
|
||||
cache: 'npm'
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
|
||||
- name: Start application
|
||||
run: |
|
||||
npm run build
|
||||
npm run start &
|
||||
npx wait-on http://localhost:3000
|
||||
|
||||
- name: Run E2E tests
|
||||
run: npm run test:e2e
|
||||
|
||||
- name: Upload test videos
|
||||
if: failure()
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: cypress-videos
|
||||
path: cypress/videos
|
||||
@@ -1,214 +0,0 @@
|
||||
# ================================================================
|
||||
# FIXED SETUP SCRIPT
|
||||
# scripts/docker-setup.sh
|
||||
# ================================================================
|
||||
|
||||
#!/bin/bash
|
||||
|
||||
# Fixed setup script with proper error handling
|
||||
|
||||
set -e
|
||||
|
||||
ENVIRONMENT=${1:-development}
|
||||
PROFILES=${2:-"development,frontend"}
|
||||
|
||||
# Colors for output
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
RED='\033[0;31m'
|
||||
NC='\033[0m'
|
||||
|
||||
# Logging functions
|
||||
print_step() {
|
||||
echo -e "${GREEN}[STEP]${NC} $1"
|
||||
}
|
||||
|
||||
print_warning() {
|
||||
echo -e "${YELLOW}[WARNING]${NC} $1"
|
||||
}
|
||||
|
||||
print_error() {
|
||||
echo -e "${RED}[ERROR]${NC} $1"
|
||||
}
|
||||
|
||||
print_step "Setting up Bakery Forecasting Platform"
|
||||
echo "Environment: $ENVIRONMENT"
|
||||
echo "Profiles: $PROFILES"
|
||||
|
||||
# Check if .env file exists
|
||||
if [ ! -f ".env" ]; then
|
||||
print_error ".env file not found!"
|
||||
echo "Please create .env file with the content from the artifact."
|
||||
echo "Run: cp .env.example .env"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate critical environment variables
|
||||
print_step "Validating environment variables..."
|
||||
|
||||
# Source the .env file to check variables
|
||||
set -a # automatically export all variables
|
||||
source .env
|
||||
set +a
|
||||
|
||||
# Check critical variables
|
||||
critical_vars=(
|
||||
"IMAGE_TAG"
|
||||
"AUTH_DB_NAME"
|
||||
"AUTH_DB_USER"
|
||||
"AUTH_DB_PASSWORD"
|
||||
"REDIS_PASSWORD"
|
||||
"RABBITMQ_USER"
|
||||
"RABBITMQ_PASSWORD"
|
||||
"GATEWAY_PORT"
|
||||
"AUTH_SERVICE_PORT"
|
||||
)
|
||||
|
||||
missing_vars=()
|
||||
|
||||
for var in "${critical_vars[@]}"; do
|
||||
if [ -z "${!var}" ]; then
|
||||
missing_vars+=("$var")
|
||||
fi
|
||||
done
|
||||
|
||||
if [ ${#missing_vars[@]} -gt 0 ]; then
|
||||
print_error "Missing required environment variables:"
|
||||
printf '%s\n' "${missing_vars[@]}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
print_step "Environment variables validated successfully"
|
||||
|
||||
# Create necessary directories
|
||||
print_step "Creating necessary directories..."
|
||||
mkdir -p infrastructure/{redis,rabbitmq,postgres/init-scripts,monitoring/{prometheus/rules,grafana/{dashboards,datasources}},pgadmin}
|
||||
mkdir -p backups logs models templates/{email,whatsapp}
|
||||
mkdir -p shared/{config,auth,database,messaging,monitoring,utils}
|
||||
|
||||
# Create basic monitoring configs if they don't exist
|
||||
if [ ! -f "infrastructure/monitoring/prometheus/prometheus.yml" ]; then
|
||||
print_step "Creating basic Prometheus configuration..."
|
||||
cat > infrastructure/monitoring/prometheus/prometheus.yml << 'EOF'
|
||||
global:
|
||||
scrape_interval: 15s
|
||||
|
||||
scrape_configs:
|
||||
- job_name: 'gateway'
|
||||
static_configs:
|
||||
- targets: ['gateway:8000']
|
||||
|
||||
- job_name: 'auth-service'
|
||||
static_configs:
|
||||
- targets: ['auth-service:8000']
|
||||
|
||||
- job_name: 'training-service'
|
||||
static_configs:
|
||||
- targets: ['training-service:8000']
|
||||
|
||||
- job_name: 'forecasting-service'
|
||||
static_configs:
|
||||
- targets: ['forecasting-service:8000']
|
||||
|
||||
- job_name: 'data-service'
|
||||
static_configs:
|
||||
- targets: ['data-service:8000']
|
||||
|
||||
- job_name: 'tenant-service'
|
||||
static_configs:
|
||||
- targets: ['tenant-service:8000']
|
||||
|
||||
- job_name: 'notification-service'
|
||||
static_configs:
|
||||
- targets: ['notification-service:8000']
|
||||
EOF
|
||||
fi
|
||||
|
||||
# Set proper permissions
|
||||
chmod 644 infrastructure/monitoring/prometheus/prometheus.yml 2>/dev/null || true
|
||||
|
||||
# Stop any existing containers
|
||||
print_step "Stopping existing containers..."
|
||||
docker-compose down --remove-orphans 2>/dev/null || true
|
||||
|
||||
# Build and start services based on environment
|
||||
case $ENVIRONMENT in
|
||||
"development")
|
||||
print_step "Starting development environment..."
|
||||
IFS=',' read -ra PROFILE_ARRAY <<< "$PROFILES"
|
||||
PROFILE_ARGS=""
|
||||
for profile in "${PROFILE_ARRAY[@]}"; do
|
||||
PROFILE_ARGS="$PROFILE_ARGS --profile $profile"
|
||||
done
|
||||
|
||||
# Build first to catch any build errors
|
||||
print_step "Building services..."
|
||||
docker-compose $PROFILE_ARGS build
|
||||
|
||||
# Then start
|
||||
print_step "Starting services..."
|
||||
docker-compose $PROFILE_ARGS up -d
|
||||
;;
|
||||
"production")
|
||||
print_step "Starting production environment..."
|
||||
docker-compose -f docker-compose.yml -f docker-compose.prod.yml --profile production --profile monitoring up -d --build
|
||||
;;
|
||||
"testing")
|
||||
print_step "Starting testing environment..."
|
||||
docker-compose -f docker-compose.yml -f docker-compose.test.yml up -d --build
|
||||
;;
|
||||
*)
|
||||
print_step "Starting with custom profiles: $PROFILES"
|
||||
IFS=',' read -ra PROFILE_ARRAY <<< "$PROFILES"
|
||||
PROFILE_ARGS=""
|
||||
for profile in "${PROFILE_ARRAY[@]}"; do
|
||||
PROFILE_ARGS="$PROFILE_ARGS --profile $profile"
|
||||
done
|
||||
docker-compose $PROFILE_ARGS build
|
||||
docker-compose $PROFILE_ARGS up -d
|
||||
;;
|
||||
esac
|
||||
|
||||
# Wait a moment for services to start
|
||||
print_step "Waiting for services to start..."
|
||||
sleep 10
|
||||
|
||||
# Check service status
|
||||
print_step "Checking service status..."
|
||||
if command -v curl &> /dev/null; then
|
||||
# Check if gateway is responding
|
||||
if curl -f -s "http://localhost:${GATEWAY_PORT}/health" > /dev/null 2>&1; then
|
||||
echo "✅ Gateway is responding"
|
||||
else
|
||||
echo "⚠️ Gateway is not yet responding (this is normal during first startup)"
|
||||
fi
|
||||
else
|
||||
echo "⚠️ curl not found - skipping health check"
|
||||
fi
|
||||
|
||||
print_step "Setup completed!"
|
||||
echo ""
|
||||
echo "================================================================"
|
||||
echo -e "${GREEN}SERVICES AVAILABLE${NC}"
|
||||
echo "================================================================"
|
||||
echo "- Gateway: http://localhost:${GATEWAY_PORT}"
|
||||
echo "- API Docs: http://localhost:${GATEWAY_PORT}/docs"
|
||||
echo "- Dashboard: http://localhost:${DASHBOARD_PORT} (if frontend profile enabled)"
|
||||
echo "- Grafana: http://localhost:${GRAFANA_PORT} (${GRAFANA_ADMIN_USER}/${GRAFANA_ADMIN_PASSWORD})"
|
||||
echo "- pgAdmin: http://localhost:${PGADMIN_PORT} (${PGADMIN_EMAIL}/${PGADMIN_PASSWORD})"
|
||||
echo "- RabbitMQ: http://localhost:${RABBITMQ_MANAGEMENT_PORT} (${RABBITMQ_USER}/${RABBITMQ_PASSWORD})"
|
||||
echo "- Redis Commander: http://localhost:${REDIS_COMMANDER_PORT} (${REDIS_COMMANDER_USER}/${REDIS_COMMANDER_PASSWORD})"
|
||||
echo ""
|
||||
echo "================================================================"
|
||||
echo -e "${GREEN}NEXT STEPS${NC}"
|
||||
echo "================================================================"
|
||||
echo "1. Check service health:"
|
||||
echo " ./scripts/docker-health-check.sh"
|
||||
echo ""
|
||||
echo "2. View logs:"
|
||||
echo " docker-compose logs -f"
|
||||
echo ""
|
||||
echo "3. Check specific service:"
|
||||
echo " docker-compose logs -f auth-service"
|
||||
echo ""
|
||||
echo "If you see any errors, check the logs for more details."
|
||||
984
scripts/setup.sh
984
scripts/setup.sh
@@ -1,984 +0,0 @@
|
||||
# ================================================================
|
||||
# UPDATED SETUP SCRIPT
|
||||
# scripts/setup.sh
|
||||
# ================================================================
|
||||
|
||||
#!/bin/bash
|
||||
|
||||
# Bakery Forecasting Platform - Microservices Setup Script
|
||||
# This script sets up the complete development environment
|
||||
|
||||
set -e
|
||||
|
||||
# Colors for output
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
BLUE='\033[0;34m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
# Logging functions
|
||||
print_step() {
|
||||
echo -e "${BLUE}[STEP]${NC} $1"
|
||||
}
|
||||
|
||||
print_success() {
|
||||
echo -e "${GREEN}[SUCCESS]${NC} $1"
|
||||
}
|
||||
|
||||
print_warning() {
|
||||
echo -e "${YELLOW}[WARNING]${NC} $1"
|
||||
}
|
||||
|
||||
print_error() {
|
||||
echo -e "${RED}[ERROR]${NC} $1"
|
||||
}
|
||||
|
||||
print_header() {
|
||||
echo ""
|
||||
echo "================================================================"
|
||||
echo -e "${BLUE}$1${NC}"
|
||||
echo "================================================================"
|
||||
}
|
||||
|
||||
# Check prerequisites
|
||||
check_prerequisites() {
|
||||
print_header "CHECKING PREREQUISITES"
|
||||
|
||||
# Check Docker
|
||||
if ! command -v docker &> /dev/null; then
|
||||
print_error "Docker is not installed. Please install Docker first."
|
||||
exit 1
|
||||
fi
|
||||
print_success "Docker is installed"
|
||||
|
||||
# Check Docker Compose
|
||||
if ! command -v docker-compose &> /dev/null; then
|
||||
print_error "Docker Compose is not installed. Please install Docker Compose first."
|
||||
exit 1
|
||||
fi
|
||||
print_success "Docker Compose is installed"
|
||||
|
||||
# Check if Docker is running
|
||||
if ! docker info &> /dev/null; then
|
||||
print_error "Docker is not running. Please start Docker first."
|
||||
exit 1
|
||||
fi
|
||||
print_success "Docker is running"
|
||||
|
||||
# Check available ports
|
||||
local ports=(8000 8001 8002 8003 8004 8005 8006 3000 3001 3002 5432 6379 5672 15672 9090)
|
||||
local used_ports=()
|
||||
|
||||
for port in "${ports[@]}"; do
|
||||
if netstat -tuln 2>/dev/null | grep -q ":$port "; then
|
||||
used_ports+=($port)
|
||||
fi
|
||||
done
|
||||
|
||||
if [ ${#used_ports[@]} -gt 0 ]; then
|
||||
print_warning "The following ports are in use: ${used_ports[*]}"
|
||||
print_warning "You may need to stop other services or change port configurations"
|
||||
else
|
||||
print_success "All required ports are available"
|
||||
fi
|
||||
}
|
||||
|
||||
# Create directory structure
|
||||
create_directory_structure() {
|
||||
print_header "CREATING DIRECTORY STRUCTURE"
|
||||
|
||||
# Core directories
|
||||
local dirs=(
|
||||
"shared/config"
|
||||
"shared/auth"
|
||||
"shared/database"
|
||||
"shared/messaging"
|
||||
"shared/monitoring"
|
||||
"shared/utils"
|
||||
"gateway/app/core"
|
||||
"gateway/app/middleware"
|
||||
"gateway/app/routes"
|
||||
"gateway/tests"
|
||||
)
|
||||
|
||||
# Service directories
|
||||
local services=("auth" "training" "forecasting" "data" "tenant" "notification")
|
||||
for service in "${services[@]}"; do
|
||||
dirs+=(
|
||||
"services/$service/app/core"
|
||||
"services/$service/app/models"
|
||||
"services/$service/app/schemas"
|
||||
"services/$service/app/services"
|
||||
"services/$service/app/api"
|
||||
"services/$service/migrations/versions"
|
||||
"services/$service/tests"
|
||||
)
|
||||
done
|
||||
|
||||
# Additional directories
|
||||
dirs+=(
|
||||
"frontend/dashboard/src/components"
|
||||
"frontend/dashboard/src/pages"
|
||||
"frontend/dashboard/src/services"
|
||||
"frontend/dashboard/src/hooks"
|
||||
"frontend/dashboard/src/utils"
|
||||
"frontend/marketing/src/components"
|
||||
"frontend/marketing/src/pages"
|
||||
"infrastructure/docker"
|
||||
"infrastructure/kubernetes/base"
|
||||
"infrastructure/terraform/modules"
|
||||
"deployment/nginx"
|
||||
"tests/integration"
|
||||
"tests/e2e"
|
||||
"tests/performance"
|
||||
"docs/architecture"
|
||||
"docs/api"
|
||||
"docs/deployment"
|
||||
"scripts"
|
||||
"logs"
|
||||
"models"
|
||||
"templates/email"
|
||||
"templates/whatsapp"
|
||||
)
|
||||
|
||||
for dir in "${dirs[@]}"; do
|
||||
if [ ! -d "$dir" ]; then
|
||||
mkdir -p "$dir"
|
||||
print_success "Created directory: $dir"
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
# Create shared base configuration
|
||||
create_shared_config() {
|
||||
print_step "Creating shared configuration..."
|
||||
|
||||
if [ ! -f "shared/config/__init__.py" ]; then
|
||||
touch "shared/config/__init__.py"
|
||||
fi
|
||||
|
||||
if [ ! -f "shared/config/base.py" ]; then
|
||||
cat > "shared/config/base.py" << 'EOF'
|
||||
"""
|
||||
Base configuration for all microservices
|
||||
This file should contain the BaseServiceSettings class
|
||||
"""
|
||||
|
||||
# Import the base configuration from the artifact
|
||||
# The complete base.py content should be copied here from the artifact
|
||||
EOF
|
||||
print_success "Created shared base configuration template"
|
||||
print_warning "Please copy the BaseServiceSettings class from the artifact to shared/config/base.py"
|
||||
fi
|
||||
}
|
||||
|
||||
# Create service configurations
|
||||
create_service_configs() {
|
||||
print_header "CREATING SERVICE CONFIGURATIONS"
|
||||
|
||||
local services=("auth" "training" "forecasting" "data" "tenant" "notification")
|
||||
|
||||
for service in "${services[@]}"; do
|
||||
print_step "Creating configuration for $service service..."
|
||||
|
||||
local service_dir="services/$service"
|
||||
local config_file="$service_dir/app/core/config.py"
|
||||
|
||||
if [ ! -f "$config_file" ]; then
|
||||
cat > "$config_file" << EOF
|
||||
"""
|
||||
$service service configuration
|
||||
"""
|
||||
|
||||
from shared.config.base import BaseServiceSettings
|
||||
import os
|
||||
|
||||
class ${service^}Settings(BaseServiceSettings):
|
||||
"""$service service specific settings"""
|
||||
|
||||
# Service Identity
|
||||
APP_NAME: str = "${service^} Service"
|
||||
SERVICE_NAME: str = "$service-service"
|
||||
DESCRIPTION: str = "$service microservice for bakery platform"
|
||||
|
||||
# Database Configuration
|
||||
DATABASE_URL: str = os.getenv("${service^^}_DATABASE_URL",
|
||||
"postgresql+asyncpg://${service}_user:${service}_pass123@${service}-db:5432/${service}_db")
|
||||
|
||||
# Redis Database (each service gets its own DB number)
|
||||
REDIS_DB: int = $(( $(echo "${services[@]}" | tr ' ' '\n' | grep -n "^$service$" | cut -d: -f1) - 1 ))
|
||||
|
||||
settings = ${service^}Settings()
|
||||
EOF
|
||||
print_success "Created: $config_file"
|
||||
fi
|
||||
|
||||
# Create database configuration
|
||||
local db_config_file="$service_dir/app/core/database.py"
|
||||
if [ ! -f "$db_config_file" ]; then
|
||||
cat > "$db_config_file" << EOF
|
||||
"""
|
||||
Database configuration for $service service
|
||||
"""
|
||||
|
||||
from shared.database.base import DatabaseManager
|
||||
from app.core.config import settings
|
||||
|
||||
# Initialize database manager
|
||||
database_manager = DatabaseManager(settings.DATABASE_URL)
|
||||
|
||||
# Alias for convenience
|
||||
get_db = database_manager.get_db
|
||||
EOF
|
||||
print_success "Created: $db_config_file"
|
||||
fi
|
||||
|
||||
# Create auth configuration
|
||||
local auth_config_file="$service_dir/app/core/auth.py"
|
||||
if [ ! -f "$auth_config_file" ]; then
|
||||
cat > "$auth_config_file" << EOF
|
||||
"""
|
||||
Authentication configuration for $service service
|
||||
"""
|
||||
|
||||
from shared.auth.jwt_handler import JWTHandler
|
||||
from shared.auth.decorators import require_auth, require_role
|
||||
from app.core.config import settings
|
||||
|
||||
# Initialize JWT handler
|
||||
jwt_handler = JWTHandler(
|
||||
secret_key=settings.JWT_SECRET_KEY,
|
||||
algorithm=settings.JWT_ALGORITHM,
|
||||
access_token_expire_minutes=settings.JWT_ACCESS_TOKEN_EXPIRE_MINUTES
|
||||
)
|
||||
|
||||
# Export commonly used functions
|
||||
verify_token = jwt_handler.verify_token
|
||||
create_access_token = jwt_handler.create_access_token
|
||||
get_current_user = jwt_handler.get_current_user
|
||||
|
||||
# Export decorators
|
||||
__all__ = ['verify_token', 'create_access_token', 'get_current_user', 'require_auth', 'require_role']
|
||||
EOF
|
||||
print_success "Created: $auth_config_file"
|
||||
fi
|
||||
|
||||
# Create requirements.txt
|
||||
local requirements_file="$service_dir/requirements.txt"
|
||||
if [ ! -f "$requirements_file" ]; then
|
||||
cat > "$requirements_file" << 'EOF'
|
||||
# Core FastAPI dependencies
|
||||
fastapi==0.104.1
|
||||
uvicorn[standard]==0.24.0
|
||||
pydantic==2.5.0
|
||||
pydantic-settings==2.1.0
|
||||
|
||||
# Database
|
||||
sqlalchemy==2.0.23
|
||||
asyncpg==0.29.0
|
||||
alembic==1.12.1
|
||||
|
||||
# HTTP client
|
||||
httpx==0.25.2
|
||||
|
||||
# Caching and messaging
|
||||
redis==5.0.1
|
||||
aio-pika==9.3.0
|
||||
|
||||
# Monitoring and logging
|
||||
prometheus-client==0.17.1
|
||||
python-json-logger==2.0.4
|
||||
|
||||
# Utilities
|
||||
pytz==2023.3
|
||||
python-multipart==0.0.6
|
||||
|
||||
# Security
|
||||
python-jose[cryptography]==3.3.0
|
||||
passlib[bcrypt]==1.7.4
|
||||
python-dateutil==2.8.2
|
||||
|
||||
# ML dependencies (for training and forecasting services)
|
||||
pandas==2.1.3
|
||||
numpy==1.25.2
|
||||
scikit-learn==1.3.2
|
||||
prophet==1.1.4
|
||||
|
||||
# Spanish localization
|
||||
babel==2.13.1
|
||||
EOF
|
||||
print_success "Created: $requirements_file"
|
||||
fi
|
||||
|
||||
# Create Dockerfile
|
||||
local dockerfile="$service_dir/Dockerfile"
|
||||
if [ ! -f "$dockerfile" ]; then
|
||||
cat > "$dockerfile" << 'EOF'
|
||||
FROM python:3.11-slim
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install system dependencies
|
||||
RUN apt-get update && apt-get install -y \
|
||||
gcc \
|
||||
g++ \
|
||||
curl \
|
||||
libpq-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy requirements
|
||||
COPY requirements.txt .
|
||||
|
||||
# Install Python dependencies
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy shared libraries first
|
||||
COPY shared/ /app/shared/
|
||||
|
||||
# Copy application code
|
||||
COPY . .
|
||||
|
||||
# Add shared libraries to Python path
|
||||
ENV PYTHONPATH="/app:/app/shared:$PYTHONPATH"
|
||||
|
||||
# Create non-root user
|
||||
RUN useradd -m -u 1000 appuser && chown -R appuser:appuser /app
|
||||
USER appuser
|
||||
|
||||
# Expose port
|
||||
EXPOSE 8000
|
||||
|
||||
# Health check
|
||||
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
|
||||
CMD curl -f http://localhost:8000/health || exit 1
|
||||
|
||||
# Run application
|
||||
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]
|
||||
EOF
|
||||
print_success "Created: $dockerfile"
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
# Create gateway configuration
|
||||
create_gateway_config() {
|
||||
print_step "Creating gateway configuration..."
|
||||
|
||||
if [ ! -f "gateway/app/core/config.py" ]; then
|
||||
cat > "gateway/app/core/config.py" << 'EOF'
|
||||
"""
|
||||
Gateway service configuration
|
||||
"""
|
||||
|
||||
from shared.config.base import BaseServiceSettings
|
||||
import os
|
||||
from typing import Dict, List
|
||||
|
||||
class GatewaySettings(BaseServiceSettings):
|
||||
"""Gateway-specific settings"""
|
||||
|
||||
# Service Identity
|
||||
APP_NAME: str = "Bakery Forecasting Gateway"
|
||||
SERVICE_NAME: str = "gateway"
|
||||
DESCRIPTION: str = "API Gateway for Bakery Forecasting Platform"
|
||||
|
||||
# Gateway-specific Redis database
|
||||
REDIS_DB: int = 6
|
||||
|
||||
# Gateway doesn't need a database
|
||||
DATABASE_URL: str = ""
|
||||
|
||||
# Service Discovery
|
||||
CONSUL_URL: str = os.getenv("CONSUL_URL", "http://consul:8500")
|
||||
ENABLE_SERVICE_DISCOVERY: bool = os.getenv("ENABLE_SERVICE_DISCOVERY", "false").lower() == "true"
|
||||
|
||||
settings = GatewaySettings()
|
||||
EOF
|
||||
print_success "Created gateway configuration"
|
||||
fi
|
||||
}
|
||||
|
||||
# Create environment file
|
||||
create_environment_file() {
|
||||
print_header "CREATING ENVIRONMENT CONFIGURATION"
|
||||
|
||||
if [ ! -f ".env" ]; then
|
||||
print_step "Creating .env file from template..."
|
||||
|
||||
# Copy the environment template from the artifact
|
||||
cat > ".env" << 'EOF'
|
||||
# Copy the complete .env content from the artifact here
|
||||
# This should include all the environment variables defined in the artifact
|
||||
EOF
|
||||
print_success "Created .env file"
|
||||
print_warning "Please update the .env file with your actual configuration values"
|
||||
print_warning "Especially change JWT_SECRET_KEY, database passwords, and API keys"
|
||||
else
|
||||
print_warning ".env file already exists - skipping creation"
|
||||
fi
|
||||
}
|
||||
|
||||
# Create Docker Compose configuration
|
||||
create_docker_compose() {
|
||||
print_header "CREATING DOCKER COMPOSE CONFIGURATION"
|
||||
|
||||
if [ ! -f "docker-compose.yml" ]; then
|
||||
print_step "Creating docker-compose.yml..."
|
||||
|
||||
cat > "docker-compose.yml" << 'EOF'
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
# ============================================================
|
||||
# INFRASTRUCTURE SERVICES
|
||||
# ============================================================
|
||||
|
||||
# PostgreSQL Databases (one per service)
|
||||
auth-db:
|
||||
image: postgres:15-alpine
|
||||
environment:
|
||||
POSTGRES_DB: auth_db
|
||||
POSTGRES_USER: auth_user
|
||||
POSTGRES_PASSWORD: auth_pass123
|
||||
volumes:
|
||||
- auth_db_data:/var/lib/postgresql/data
|
||||
networks:
|
||||
- bakery-network
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "pg_isready -U auth_user -d auth_db"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
training-db:
|
||||
image: postgres:15-alpine
|
||||
environment:
|
||||
POSTGRES_DB: training_db
|
||||
POSTGRES_USER: training_user
|
||||
POSTGRES_PASSWORD: training_pass123
|
||||
volumes:
|
||||
- training_db_data:/var/lib/postgresql/data
|
||||
networks:
|
||||
- bakery-network
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "pg_isready -U training_user -d training_db"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
forecasting-db:
|
||||
image: postgres:15-alpine
|
||||
environment:
|
||||
POSTGRES_DB: forecasting_db
|
||||
POSTGRES_USER: forecasting_user
|
||||
POSTGRES_PASSWORD: forecasting_pass123
|
||||
volumes:
|
||||
- forecasting_db_data:/var/lib/postgresql/data
|
||||
networks:
|
||||
- bakery-network
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "pg_isready -U forecasting_user -d forecasting_db"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
data-db:
|
||||
image: postgres:15-alpine
|
||||
environment:
|
||||
POSTGRES_DB: data_db
|
||||
POSTGRES_USER: data_user
|
||||
POSTGRES_PASSWORD: data_pass123
|
||||
volumes:
|
||||
- data_db_data:/var/lib/postgresql/data
|
||||
networks:
|
||||
- bakery-network
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "pg_isready -U data_user -d data_db"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
tenant-db:
|
||||
image: postgres:15-alpine
|
||||
environment:
|
||||
POSTGRES_DB: tenant_db
|
||||
POSTGRES_USER: tenant_user
|
||||
POSTGRES_PASSWORD: tenant_pass123
|
||||
volumes:
|
||||
- tenant_db_data:/var/lib/postgresql/data
|
||||
networks:
|
||||
- bakery-network
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "pg_isready -U tenant_user -d tenant_db"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
notification-db:
|
||||
image: postgres:15-alpine
|
||||
environment:
|
||||
POSTGRES_DB: notification_db
|
||||
POSTGRES_USER: notification_user
|
||||
POSTGRES_PASSWORD: notification_pass123
|
||||
volumes:
|
||||
- notification_db_data:/var/lib/postgresql/data
|
||||
networks:
|
||||
- bakery-network
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "pg_isready -U notification_user -d notification_db"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
# Redis Cache
|
||||
redis:
|
||||
image: redis:7-alpine
|
||||
command: redis-server --appendonly yes --requirepass redis_pass123
|
||||
volumes:
|
||||
- redis_data:/data
|
||||
networks:
|
||||
- bakery-network
|
||||
healthcheck:
|
||||
test: ["CMD", "redis-cli", "ping"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
# RabbitMQ Message Broker
|
||||
rabbitmq:
|
||||
image: rabbitmq:3-management-alpine
|
||||
environment:
|
||||
RABBITMQ_DEFAULT_USER: bakery
|
||||
RABBITMQ_DEFAULT_PASS: forecast123
|
||||
ports:
|
||||
- "15672:15672" # Management UI
|
||||
volumes:
|
||||
- rabbitmq_data:/var/lib/rabbitmq
|
||||
networks:
|
||||
- bakery-network
|
||||
healthcheck:
|
||||
test: ["CMD", "rabbitmq-diagnostics", "ping"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 5
|
||||
|
||||
# ============================================================
|
||||
# MICROSERVICES
|
||||
# ============================================================
|
||||
|
||||
# API Gateway
|
||||
gateway:
|
||||
build: ./gateway
|
||||
ports:
|
||||
- "8000:8000"
|
||||
env_file: .env
|
||||
depends_on:
|
||||
- redis
|
||||
- rabbitmq
|
||||
networks:
|
||||
- bakery-network
|
||||
volumes:
|
||||
- ./logs:/app/logs
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:8000/health"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
|
||||
# Auth Service
|
||||
auth-service:
|
||||
build: ./services/auth
|
||||
ports:
|
||||
- "8001:8000"
|
||||
env_file: .env
|
||||
depends_on:
|
||||
- auth-db
|
||||
- redis
|
||||
- rabbitmq
|
||||
networks:
|
||||
- bakery-network
|
||||
volumes:
|
||||
- ./logs:/app/logs
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:8000/health"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
|
||||
# Training Service
|
||||
training-service:
|
||||
build: ./services/training
|
||||
ports:
|
||||
- "8002:8000"
|
||||
env_file: .env
|
||||
depends_on:
|
||||
- training-db
|
||||
- redis
|
||||
- rabbitmq
|
||||
networks:
|
||||
- bakery-network
|
||||
volumes:
|
||||
- ./logs:/app/logs
|
||||
- ./models:/app/models
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:8000/health"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
|
||||
# Forecasting Service
|
||||
forecasting-service:
|
||||
build: ./services/forecasting
|
||||
ports:
|
||||
- "8003:8000"
|
||||
env_file: .env
|
||||
depends_on:
|
||||
- forecasting-db
|
||||
- redis
|
||||
- rabbitmq
|
||||
networks:
|
||||
- bakery-network
|
||||
volumes:
|
||||
- ./logs:/app/logs
|
||||
- ./models:/app/models
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:8000/health"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
|
||||
# Data Service
|
||||
data-service:
|
||||
build: ./services/data
|
||||
ports:
|
||||
- "8004:8000"
|
||||
env_file: .env
|
||||
depends_on:
|
||||
- data-db
|
||||
- redis
|
||||
- rabbitmq
|
||||
networks:
|
||||
- bakery-network
|
||||
volumes:
|
||||
- ./logs:/app/logs
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:8000/health"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
|
||||
# Tenant Service
|
||||
tenant-service:
|
||||
build: ./services/tenant
|
||||
ports:
|
||||
- "8005:8000"
|
||||
env_file: .env
|
||||
depends_on:
|
||||
- tenant-db
|
||||
- redis
|
||||
- rabbitmq
|
||||
networks:
|
||||
- bakery-network
|
||||
volumes:
|
||||
- ./logs:/app/logs
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:8000/health"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
|
||||
# Notification Service
|
||||
notification-service:
|
||||
build: ./services/notification
|
||||
ports:
|
||||
- "8006:8000"
|
||||
env_file: .env
|
||||
depends_on:
|
||||
- notification-db
|
||||
- redis
|
||||
- rabbitmq
|
||||
networks:
|
||||
- bakery-network
|
||||
volumes:
|
||||
- ./logs:/app/logs
|
||||
- ./templates:/app/templates
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:8000/health"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
|
||||
# ============================================================
|
||||
# MONITORING STACK
|
||||
# ============================================================
|
||||
|
||||
# Prometheus
|
||||
prometheus:
|
||||
image: prom/prometheus:latest
|
||||
ports:
|
||||
- "9090:9090"
|
||||
volumes:
|
||||
- ./infrastructure/monitoring/prometheus.yml:/etc/prometheus/prometheus.yml
|
||||
- prometheus_data:/prometheus
|
||||
networks:
|
||||
- bakery-network
|
||||
command:
|
||||
- '--config.file=/etc/prometheus/prometheus.yml'
|
||||
- '--storage.tsdb.path=/prometheus'
|
||||
- '--web.console.libraries=/etc/prometheus/console_libraries'
|
||||
- '--web.console.templates=/etc/prometheus/consoles'
|
||||
|
||||
# Grafana
|
||||
grafana:
|
||||
image: grafana/grafana:latest
|
||||
ports:
|
||||
- "3002:3000"
|
||||
environment:
|
||||
GF_SECURITY_ADMIN_PASSWORD: admin123
|
||||
volumes:
|
||||
- grafana_data:/var/lib/grafana
|
||||
- ./infrastructure/monitoring/grafana/dashboards:/etc/grafana/provisioning/dashboards
|
||||
- ./infrastructure/monitoring/grafana/datasources:/etc/grafana/provisioning/datasources
|
||||
networks:
|
||||
- bakery-network
|
||||
|
||||
networks:
|
||||
bakery-network:
|
||||
driver: bridge
|
||||
|
||||
volumes:
|
||||
# Database volumes
|
||||
auth_db_data:
|
||||
training_db_data:
|
||||
forecasting_db_data:
|
||||
data_db_data:
|
||||
tenant_db_data:
|
||||
notification_db_data:
|
||||
|
||||
# Cache and messaging volumes
|
||||
redis_data:
|
||||
rabbitmq_data:
|
||||
|
||||
# Monitoring volumes
|
||||
prometheus_data:
|
||||
grafana_data:
|
||||
EOF
|
||||
print_success "Created docker-compose.yml"
|
||||
fi
|
||||
}
|
||||
|
||||
# Create utility scripts
|
||||
create_utility_scripts() {
|
||||
print_header "CREATING UTILITY SCRIPTS"
|
||||
|
||||
# Test script
|
||||
if [ ! -f "scripts/test.sh" ]; then
|
||||
cat > "scripts/test.sh" << 'EOF'
|
||||
#!/bin/bash
|
||||
|
||||
# Run tests for all services
|
||||
|
||||
set -e
|
||||
|
||||
echo "Running tests for all microservices..."
|
||||
|
||||
services=("auth" "training" "forecasting" "data" "tenant" "notification")
|
||||
|
||||
for service in "${services[@]}"; do
|
||||
echo "Testing $service service..."
|
||||
docker-compose exec ${service}-service python -m pytest tests/ -v
|
||||
done
|
||||
|
||||
echo "Running integration tests..."
|
||||
docker-compose exec gateway python -m pytest ../tests/integration/ -v
|
||||
|
||||
echo "All tests completed!"
|
||||
EOF
|
||||
chmod +x "scripts/test.sh"
|
||||
print_success "Created test script"
|
||||
fi
|
||||
|
||||
# Deployment script
|
||||
if [ ! -f "scripts/deploy.sh" ]; then
|
||||
cat > "scripts/deploy.sh" << 'EOF'
|
||||
#!/bin/bash
|
||||
|
||||
# Deploy services to production
|
||||
|
||||
set -e
|
||||
|
||||
echo "Building and deploying services..."
|
||||
|
||||
# Build all services
|
||||
docker-compose build
|
||||
|
||||
# Deploy with zero downtime
|
||||
docker-compose up -d --no-deps --force-recreate
|
||||
|
||||
# Wait for health checks
|
||||
echo "Waiting for services to be healthy..."
|
||||
sleep 30
|
||||
|
||||
# Verify deployment
|
||||
./scripts/health-check.sh
|
||||
|
||||
echo "Deployment completed successfully!"
|
||||
EOF
|
||||
chmod +x "scripts/deploy.sh"
|
||||
print_success "Created deployment script"
|
||||
fi
|
||||
|
||||
# Health check script
|
||||
if [ ! -f "scripts/health-check.sh" ]; then
|
||||
cat > "scripts/health-check.sh" << 'EOF'
|
||||
#!/bin/bash
|
||||
|
||||
# Check health of all services
|
||||
|
||||
services=(
|
||||
"gateway:8000"
|
||||
"auth-service:8001"
|
||||
"training-service:8002"
|
||||
"forecasting-service:8003"
|
||||
"data-service:8004"
|
||||
"tenant-service:8005"
|
||||
"notification-service:8006"
|
||||
)
|
||||
|
||||
echo "Checking service health..."
|
||||
|
||||
all_healthy=true
|
||||
|
||||
for service_port in "${services[@]}"; do
|
||||
service=$(echo $service_port | cut -d: -f1)
|
||||
port=$(echo $service_port | cut -d: -f2)
|
||||
|
||||
if curl -f -s "http://localhost:$port/health" > /dev/null; then
|
||||
echo "✅ $service is healthy"
|
||||
else
|
||||
echo "❌ $service is unhealthy"
|
||||
all_healthy=false
|
||||
fi
|
||||
done
|
||||
|
||||
if $all_healthy; then
|
||||
echo "🎉 All services are healthy!"
|
||||
exit 0
|
||||
else
|
||||
echo "⚠️ Some services are unhealthy"
|
||||
exit 1
|
||||
fi
|
||||
EOF
|
||||
chmod +x "scripts/health-check.sh"
|
||||
print_success "Created health check script"
|
||||
fi
|
||||
}
|
||||
|
||||
# Create monitoring configuration
|
||||
create_monitoring_config() {
|
||||
print_step "Creating monitoring configuration..."
|
||||
|
||||
# Prometheus configuration
|
||||
if [ ! -f "infrastructure/monitoring/prometheus.yml" ]; then
|
||||
mkdir -p infrastructure/monitoring
|
||||
cat > "infrastructure/monitoring/prometheus.yml" << 'EOF'
|
||||
global:
|
||||
scrape_interval: 15s
|
||||
|
||||
scrape_configs:
|
||||
- job_name: 'gateway'
|
||||
static_configs:
|
||||
- targets: ['gateway:8000']
|
||||
|
||||
- job_name: 'auth-service'
|
||||
static_configs:
|
||||
- targets: ['auth-service:8000']
|
||||
|
||||
- job_name: 'training-service'
|
||||
static_configs:
|
||||
- targets: ['training-service:8000']
|
||||
|
||||
- job_name: 'forecasting-service'
|
||||
static_configs:
|
||||
- targets: ['forecasting-service:8000']
|
||||
|
||||
- job_name: 'data-service'
|
||||
static_configs:
|
||||
- targets: ['data-service:8000']
|
||||
|
||||
- job_name: 'tenant-service'
|
||||
static_configs:
|
||||
- targets: ['tenant-service:8000']
|
||||
|
||||
- job_name: 'notification-service'
|
||||
static_configs:
|
||||
- targets: ['notification-service:8000']
|
||||
EOF
|
||||
print_success "Created Prometheus configuration"
|
||||
fi
|
||||
}
|
||||
|
||||
# Final setup steps
|
||||
final_setup() {
|
||||
print_header "FINAL SETUP STEPS"
|
||||
|
||||
# Make scripts executable
|
||||
chmod +x scripts/*.sh
|
||||
|
||||
# Create logs directory
|
||||
mkdir -p logs models
|
||||
|
||||
print_success "Setup completed successfully!"
|
||||
|
||||
echo ""
|
||||
echo "================================================================"
|
||||
echo -e "${GREEN}NEXT STEPS${NC}"
|
||||
echo "================================================================"
|
||||
echo "1. Update .env file with your configuration:"
|
||||
echo " - Change JWT_SECRET_KEY"
|
||||
echo " - Add AEMET and Madrid Open Data API keys"
|
||||
echo " - Configure email settings"
|
||||
echo ""
|
||||
echo "2. Copy the configuration classes from artifacts:"
|
||||
echo " - Copy BaseServiceSettings to shared/config/base.py"
|
||||
echo " - Copy service-specific settings to respective config files"
|
||||
echo ""
|
||||
echo "3. Start the services:"
|
||||
echo " docker-compose up -d"
|
||||
echo ""
|
||||
echo "4. Check service health:"
|
||||
echo " ./scripts/health-check.sh"
|
||||
echo ""
|
||||
echo "5. Access the services:"
|
||||
echo " - Gateway: http://localhost:8000"
|
||||
echo " - API Docs: http://localhost:8000/docs"
|
||||
echo " - Grafana: http://localhost:3002 (admin/admin123)"
|
||||
echo " - RabbitMQ: http://localhost:15672 (bakery/forecast123)"
|
||||
echo ""
|
||||
echo "================================================================"
|
||||
}
|
||||
|
||||
# Main execution
|
||||
main() {
|
||||
print_header "BAKERY FORECASTING PLATFORM - MICROSERVICES SETUP"
|
||||
|
||||
check_prerequisites
|
||||
create_directory_structure
|
||||
create_shared_config
|
||||
create_service_configs
|
||||
create_gateway_config
|
||||
create_environment_file
|
||||
create_docker_compose
|
||||
create_utility_scripts
|
||||
create_monitoring_config
|
||||
final_setup
|
||||
}
|
||||
|
||||
# Run main function
|
||||
main "$@"
|
||||
EOF
|
||||
chmod +x "scripts/setup.sh"
|
||||
print_success "Created setup script"
|
||||
fi
|
||||
}
|
||||
@@ -1,15 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
echo "🧪 Running tests for all services..."
|
||||
|
||||
# Run tests for each service
|
||||
for service in auth training forecasting data tenant notification; do
|
||||
echo "Testing $service service..."
|
||||
if docker-compose ps | grep -q "${service}-service.*Up"; then
|
||||
docker-compose exec -T ${service}-service python -m pytest tests/ -v || echo "Tests failed for $service"
|
||||
else
|
||||
echo "Service $service is not running, skipping tests"
|
||||
fi
|
||||
done
|
||||
|
||||
echo "✅ Test run completed"
|
||||
@@ -1,297 +0,0 @@
|
||||
# ================================================================
|
||||
# CONFIGURATION VALIDATION SCRIPT
|
||||
# scripts/validate-config.sh
|
||||
# ================================================================
|
||||
|
||||
#!/bin/bash
|
||||
|
||||
# Configuration validation script
|
||||
|
||||
set -e
|
||||
|
||||
GREEN='\033[0;32m'
|
||||
RED='\033[0;31m'
|
||||
YELLOW='\033[1;33m'
|
||||
NC='\033[0m'
|
||||
|
||||
print_header() {
|
||||
echo ""
|
||||
echo "================================================================"
|
||||
echo -e "${GREEN}$1${NC}"
|
||||
echo "================================================================"
|
||||
}
|
||||
|
||||
print_success() {
|
||||
echo -e "${GREEN}[✓]${NC} $1"
|
||||
}
|
||||
|
||||
print_error() {
|
||||
echo -e "${RED}[✗]${NC} $1"
|
||||
}
|
||||
|
||||
print_warning() {
|
||||
echo -e "${YELLOW}[!]${NC} $1"
|
||||
}
|
||||
|
||||
validate_env_file() {
|
||||
print_header "VALIDATING ENVIRONMENT CONFIGURATION"
|
||||
|
||||
if [ ! -f ".env" ]; then
|
||||
print_error ".env file not found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Load environment variables
|
||||
source .env
|
||||
|
||||
# Critical settings validation
|
||||
critical_vars=(
|
||||
"JWT_SECRET_KEY"
|
||||
"AUTH_DATABASE_URL"
|
||||
"TRAINING_DATABASE_URL"
|
||||
"FORECASTING_DATABASE_URL"
|
||||
"DATA_DATABASE_URL"
|
||||
"TENANT_DATABASE_URL"
|
||||
"NOTIFICATION_DATABASE_URL"
|
||||
"REDIS_URL"
|
||||
"RABBITMQ_URL"
|
||||
)
|
||||
|
||||
all_good=true
|
||||
|
||||
for var in "${critical_vars[@]}"; do
|
||||
if [ -z "${!var}" ]; then
|
||||
print_error "$var is not set"
|
||||
all_good=false
|
||||
elif [[ "${!var}" == *"change"* ]] || [[ "${!var}" == *"default"* ]]; then
|
||||
print_warning "$var appears to use default/placeholder value"
|
||||
else
|
||||
print_success "$var is configured"
|
||||
fi
|
||||
done
|
||||
|
||||
# Check JWT secret strength
|
||||
if [ ${#JWT_SECRET_KEY} -lt 32 ]; then
|
||||
print_error "JWT_SECRET_KEY must be at least 32 characters long"
|
||||
all_good=false
|
||||
fi
|
||||
|
||||
# Check environment
|
||||
if [ "$ENVIRONMENT" = "production" ]; then
|
||||
production_vars=("AEMET_API_KEY" "MADRID_OPENDATA_API_KEY" "SMTP_USER" "SMTP_PASSWORD")
|
||||
for var in "${production_vars[@]}"; do
|
||||
if [ -z "${!var}" ]; then
|
||||
print_warning "$var should be configured for production"
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
if $all_good; then
|
||||
print_success "Environment configuration is valid"
|
||||
else
|
||||
print_error "Environment configuration has issues"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
validate_service_configs() {
|
||||
print_header "VALIDATING SERVICE CONFIGURATIONS"
|
||||
|
||||
services=("auth" "training" "forecasting" "data" "tenant" "notification")
|
||||
|
||||
for service in "${services[@]}"; do
|
||||
config_file="services/$service/app/core/config.py"
|
||||
|
||||
if [ -f "$config_file" ]; then
|
||||
print_success "$service configuration exists"
|
||||
|
||||
# Check if configuration follows the standard
|
||||
if grep -q "BaseServiceSettings" "$config_file"; then
|
||||
print_success "$service uses BaseServiceSettings"
|
||||
else
|
||||
print_warning "$service doesn't inherit from BaseServiceSettings"
|
||||
fi
|
||||
|
||||
if grep -q "DATABASE_URL" "$config_file"; then
|
||||
print_success "$service has database configuration"
|
||||
else
|
||||
print_warning "$service missing database configuration"
|
||||
fi
|
||||
else
|
||||
print_error "$service configuration missing"
|
||||
fi
|
||||
done
|
||||
|
||||
# Check gateway configuration
|
||||
if [ -f "gateway/app/core/config.py" ]; then
|
||||
print_success "Gateway configuration exists"
|
||||
else
|
||||
print_error "Gateway configuration missing"
|
||||
fi
|
||||
}
|
||||
|
||||
validate_shared_config() {
|
||||
print_header "VALIDATING SHARED CONFIGURATION"
|
||||
|
||||
if [ -f "shared/config/base.py" ]; then
|
||||
print_success "Base configuration exists"
|
||||
|
||||
if grep -q "BaseServiceSettings" "shared/config/base.py"; then
|
||||
print_success "BaseServiceSettings class found"
|
||||
else
|
||||
print_error "BaseServiceSettings class missing"
|
||||
fi
|
||||
else
|
||||
print_error "Base configuration missing"
|
||||
fi
|
||||
|
||||
shared_modules=("auth" "database" "messaging" "monitoring" "utils")
|
||||
for module in "${shared_modules[@]}"; do
|
||||
if [ -d "shared/$module" ]; then
|
||||
print_success "Shared $module module exists"
|
||||
else
|
||||
print_warning "Shared $module module missing"
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
validate_docker_config() {
|
||||
print_header "VALIDATING DOCKER CONFIGURATION"
|
||||
|
||||
if [ -f "docker-compose.yml" ]; then
|
||||
print_success "Docker Compose configuration exists"
|
||||
|
||||
# Check if all services are defined
|
||||
services=("gateway" "auth-service" "training-service" "forecasting-service" "data-service" "tenant-service" "notification-service")
|
||||
for service in "${services[@]}"; do
|
||||
if grep -q "$service:" docker-compose.yml; then
|
||||
print_success "$service defined in docker-compose.yml"
|
||||
else
|
||||
print_error "$service missing from docker-compose.yml"
|
||||
fi
|
||||
done
|
||||
|
||||
# Check if all databases are defined
|
||||
databases=("auth-db" "training-db" "forecasting-db" "data-db" "tenant-db" "notification-db")
|
||||
for db in "${databases[@]}"; do
|
||||
if grep -q "$db:" docker-compose.yml; then
|
||||
print_success "$db defined in docker-compose.yml"
|
||||
else
|
||||
print_error "$db missing from docker-compose.yml"
|
||||
fi
|
||||
done
|
||||
|
||||
# Check infrastructure services
|
||||
infra=("redis" "rabbitmq" "prometheus" "grafana")
|
||||
for service in "${infra[@]}"; do
|
||||
if grep -q "$service:" docker-compose.yml; then
|
||||
print_success "$service defined in docker-compose.yml"
|
||||
else
|
||||
print_warning "$service missing from docker-compose.yml"
|
||||
fi
|
||||
done
|
||||
else
|
||||
print_error "Docker Compose configuration missing"
|
||||
fi
|
||||
|
||||
# Check Dockerfiles
|
||||
services=("gateway" "auth" "training" "forecasting" "data" "tenant" "notification")
|
||||
for service in "${services[@]}"; do
|
||||
if [ "$service" = "gateway" ]; then
|
||||
dockerfile="gateway/Dockerfile"
|
||||
else
|
||||
dockerfile="services/$service/Dockerfile"
|
||||
fi
|
||||
|
||||
if [ -f "$dockerfile" ]; then
|
||||
print_success "$service Dockerfile exists"
|
||||
else
|
||||
print_warning "$service Dockerfile missing"
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
validate_directory_structure() {
|
||||
print_header "VALIDATING DIRECTORY STRUCTURE"
|
||||
|
||||
required_dirs=(
|
||||
"shared/config"
|
||||
"shared/auth"
|
||||
"shared/database"
|
||||
"shared/messaging"
|
||||
"gateway/app/core"
|
||||
"services/auth/app/core"
|
||||
"services/training/app/core"
|
||||
"services/forecasting/app/core"
|
||||
"services/data/app/core"
|
||||
"services/tenant/app/core"
|
||||
"services/notification/app/core"
|
||||
"scripts"
|
||||
"logs"
|
||||
"models"
|
||||
"templates"
|
||||
)
|
||||
|
||||
missing_dirs=()
|
||||
|
||||
for dir in "${required_dirs[@]}"; do
|
||||
if [ -d "$dir" ]; then
|
||||
print_success "$dir exists"
|
||||
else
|
||||
print_warning "$dir missing"
|
||||
missing_dirs+=("$dir")
|
||||
fi
|
||||
done
|
||||
|
||||
if [ ${#missing_dirs[@]} -gt 0 ]; then
|
||||
print_warning "Creating missing directories..."
|
||||
for dir in "${missing_dirs[@]}"; do
|
||||
mkdir -p "$dir"
|
||||
print_success "Created $dir"
|
||||
done
|
||||
fi
|
||||
}
|
||||
|
||||
validate_scripts() {
|
||||
print_header "VALIDATING UTILITY SCRIPTS"
|
||||
|
||||
scripts=("setup.sh" "test.sh" "deploy.sh" "health-check.sh" "validate-config.sh")
|
||||
|
||||
for script in "${scripts[@]}"; do
|
||||
script_path="scripts/$script"
|
||||
if [ -f "$script_path" ]; then
|
||||
print_success "$script exists"
|
||||
|
||||
if [ -x "$script_path" ]; then
|
||||
print_success "$script is executable"
|
||||
else
|
||||
print_warning "$script is not executable - fixing..."
|
||||
chmod +x "$script_path"
|
||||
fi
|
||||
else
|
||||
print_warning "$script missing"
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
# Main validation function
|
||||
main() {
|
||||
print_header "CONFIGURATION VALIDATION"
|
||||
|
||||
validate_directory_structure
|
||||
validate_shared_config
|
||||
validate_service_configs
|
||||
validate_env_file
|
||||
validate_docker_config
|
||||
validate_scripts
|
||||
|
||||
print_header "VALIDATION COMPLETE"
|
||||
echo "If all validations passed, you're ready to start the services!"
|
||||
echo ""
|
||||
echo "Next steps:"
|
||||
echo "1. docker-compose up -d"
|
||||
echo "2. ./scripts/health-check.sh"
|
||||
}
|
||||
|
||||
# Run validation
|
||||
main "$@"
|
||||
Reference in New Issue
Block a user