Improve the frontend 5
This commit is contained in:
267
frontend/src/api/services/auditLogs.ts
Normal file
267
frontend/src/api/services/auditLogs.ts
Normal file
@@ -0,0 +1,267 @@
|
||||
// ================================================================
|
||||
// frontend/src/api/services/auditLogs.ts
|
||||
// ================================================================
|
||||
/**
|
||||
* Audit Logs Aggregation Service
|
||||
*
|
||||
* Aggregates audit logs from all microservices and provides
|
||||
* unified access to system event history.
|
||||
*
|
||||
* Backend endpoints:
|
||||
* - GET /tenants/{tenant_id}/{service}/audit-logs
|
||||
* - GET /tenants/{tenant_id}/{service}/audit-logs/stats
|
||||
*
|
||||
* Last Updated: 2025-11-02
|
||||
* Status: ✅ Complete - Multi-service aggregation
|
||||
*/
|
||||
|
||||
import { apiClient } from '../client';
|
||||
import {
|
||||
AuditLogResponse,
|
||||
AuditLogFilters,
|
||||
AuditLogListResponse,
|
||||
AuditLogStatsResponse,
|
||||
AggregatedAuditLog,
|
||||
AUDIT_LOG_SERVICES,
|
||||
AuditLogServiceName,
|
||||
} from '../types/auditLogs';
|
||||
|
||||
export class AuditLogsService {
|
||||
private readonly baseUrl = '/tenants';
|
||||
|
||||
/**
|
||||
* Get audit logs from a single service
|
||||
*/
|
||||
async getServiceAuditLogs(
|
||||
tenantId: string,
|
||||
serviceName: AuditLogServiceName,
|
||||
filters?: AuditLogFilters
|
||||
): Promise<AuditLogListResponse> {
|
||||
const queryParams = new URLSearchParams();
|
||||
|
||||
if (filters?.start_date) queryParams.append('start_date', filters.start_date);
|
||||
if (filters?.end_date) queryParams.append('end_date', filters.end_date);
|
||||
if (filters?.user_id) queryParams.append('user_id', filters.user_id);
|
||||
if (filters?.action) queryParams.append('action', filters.action);
|
||||
if (filters?.resource_type) queryParams.append('resource_type', filters.resource_type);
|
||||
if (filters?.severity) queryParams.append('severity', filters.severity);
|
||||
if (filters?.search) queryParams.append('search', filters.search);
|
||||
if (filters?.limit) queryParams.append('limit', filters.limit.toString());
|
||||
if (filters?.offset) queryParams.append('offset', filters.offset.toString());
|
||||
|
||||
const url = `${this.baseUrl}/${tenantId}/${serviceName}/audit-logs${queryParams.toString() ? '?' + queryParams.toString() : ''}`;
|
||||
|
||||
return apiClient.get<AuditLogListResponse>(url);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get audit log statistics from a single service
|
||||
*/
|
||||
async getServiceAuditLogStats(
|
||||
tenantId: string,
|
||||
serviceName: AuditLogServiceName,
|
||||
filters?: {
|
||||
start_date?: string;
|
||||
end_date?: string;
|
||||
}
|
||||
): Promise<AuditLogStatsResponse> {
|
||||
const queryParams = new URLSearchParams();
|
||||
|
||||
if (filters?.start_date) queryParams.append('start_date', filters.start_date);
|
||||
if (filters?.end_date) queryParams.append('end_date', filters.end_date);
|
||||
|
||||
const url = `${this.baseUrl}/${tenantId}/${serviceName}/audit-logs/stats${queryParams.toString() ? '?' + queryParams.toString() : ''}`;
|
||||
|
||||
return apiClient.get<AuditLogStatsResponse>(url);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get aggregated audit logs from ALL services
|
||||
* Makes parallel requests to all services and combines results
|
||||
*/
|
||||
async getAllAuditLogs(
|
||||
tenantId: string,
|
||||
filters?: AuditLogFilters
|
||||
): Promise<AggregatedAuditLog[]> {
|
||||
// Make parallel requests to all services
|
||||
const promises = AUDIT_LOG_SERVICES.map(service =>
|
||||
this.getServiceAuditLogs(tenantId, service, {
|
||||
...filters,
|
||||
limit: filters?.limit || 100,
|
||||
}).catch(error => {
|
||||
// If a service fails, log the error but don't fail the entire request
|
||||
console.warn(`Failed to fetch audit logs from ${service}:`, error);
|
||||
return { items: [], total: 0, limit: 0, offset: 0, has_more: false };
|
||||
})
|
||||
);
|
||||
|
||||
const results = await Promise.all(promises);
|
||||
|
||||
// Combine all results
|
||||
const allLogs: AggregatedAuditLog[] = results.flatMap(result => result.items);
|
||||
|
||||
// Sort by created_at descending (most recent first)
|
||||
allLogs.sort((a, b) => {
|
||||
const dateA = new Date(a.created_at).getTime();
|
||||
const dateB = new Date(b.created_at).getTime();
|
||||
return dateB - dateA;
|
||||
});
|
||||
|
||||
// Apply limit if specified
|
||||
const limit = filters?.limit || 100;
|
||||
const offset = filters?.offset || 0;
|
||||
|
||||
return allLogs.slice(offset, offset + limit);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get aggregated statistics from ALL services
|
||||
*/
|
||||
async getAllAuditLogStats(
|
||||
tenantId: string,
|
||||
filters?: {
|
||||
start_date?: string;
|
||||
end_date?: string;
|
||||
}
|
||||
): Promise<AuditLogStatsResponse> {
|
||||
// Make parallel requests to all services
|
||||
const promises = AUDIT_LOG_SERVICES.map(service =>
|
||||
this.getServiceAuditLogStats(tenantId, service, filters).catch(error => {
|
||||
console.warn(`Failed to fetch audit log stats from ${service}:`, error);
|
||||
return {
|
||||
total_events: 0,
|
||||
events_by_action: {},
|
||||
events_by_severity: {},
|
||||
events_by_resource_type: {},
|
||||
date_range: { min: null, max: null },
|
||||
};
|
||||
})
|
||||
);
|
||||
|
||||
const results = await Promise.all(promises);
|
||||
|
||||
// Aggregate statistics
|
||||
const aggregated: AuditLogStatsResponse = {
|
||||
total_events: 0,
|
||||
events_by_action: {},
|
||||
events_by_severity: {},
|
||||
events_by_resource_type: {},
|
||||
date_range: { min: null, max: null },
|
||||
};
|
||||
|
||||
for (const result of results) {
|
||||
aggregated.total_events += result.total_events;
|
||||
|
||||
// Merge events_by_action
|
||||
for (const [action, count] of Object.entries(result.events_by_action)) {
|
||||
aggregated.events_by_action[action] = (aggregated.events_by_action[action] || 0) + count;
|
||||
}
|
||||
|
||||
// Merge events_by_severity
|
||||
for (const [severity, count] of Object.entries(result.events_by_severity)) {
|
||||
aggregated.events_by_severity[severity] = (aggregated.events_by_severity[severity] || 0) + count;
|
||||
}
|
||||
|
||||
// Merge events_by_resource_type
|
||||
for (const [resource, count] of Object.entries(result.events_by_resource_type)) {
|
||||
aggregated.events_by_resource_type[resource] = (aggregated.events_by_resource_type[resource] || 0) + count;
|
||||
}
|
||||
|
||||
// Update date range
|
||||
if (result.date_range.min) {
|
||||
if (!aggregated.date_range.min || result.date_range.min < aggregated.date_range.min) {
|
||||
aggregated.date_range.min = result.date_range.min;
|
||||
}
|
||||
}
|
||||
if (result.date_range.max) {
|
||||
if (!aggregated.date_range.max || result.date_range.max > aggregated.date_range.max) {
|
||||
aggregated.date_range.max = result.date_range.max;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return aggregated;
|
||||
}
|
||||
|
||||
/**
|
||||
* Export audit logs to CSV format
|
||||
*/
|
||||
exportToCSV(logs: AggregatedAuditLog[]): string {
|
||||
if (logs.length === 0) return '';
|
||||
|
||||
const headers = [
|
||||
'Timestamp',
|
||||
'Service',
|
||||
'User ID',
|
||||
'Action',
|
||||
'Resource Type',
|
||||
'Resource ID',
|
||||
'Severity',
|
||||
'Description',
|
||||
'IP Address',
|
||||
'Endpoint',
|
||||
'Method',
|
||||
];
|
||||
|
||||
const rows = logs.map(log => [
|
||||
log.created_at,
|
||||
log.service_name,
|
||||
log.user_id || '',
|
||||
log.action,
|
||||
log.resource_type,
|
||||
log.resource_id || '',
|
||||
log.severity,
|
||||
log.description,
|
||||
log.ip_address || '',
|
||||
log.endpoint || '',
|
||||
log.method || '',
|
||||
]);
|
||||
|
||||
const csvContent = [
|
||||
headers.join(','),
|
||||
...rows.map(row => row.map(cell => `"${cell}"`).join(',')),
|
||||
].join('\n');
|
||||
|
||||
return csvContent;
|
||||
}
|
||||
|
||||
/**
|
||||
* Export audit logs to JSON format
|
||||
*/
|
||||
exportToJSON(logs: AggregatedAuditLog[]): string {
|
||||
return JSON.stringify(logs, null, 2);
|
||||
}
|
||||
|
||||
/**
|
||||
* Download audit logs as a file
|
||||
*/
|
||||
downloadAuditLogs(
|
||||
logs: AggregatedAuditLog[],
|
||||
format: 'csv' | 'json',
|
||||
filename?: string
|
||||
): void {
|
||||
const content = format === 'csv' ? this.exportToCSV(logs) : this.exportToJSON(logs);
|
||||
const blob = new Blob([content], {
|
||||
type: format === 'csv' ? 'text/csv;charset=utf-8;' : 'application/json',
|
||||
});
|
||||
|
||||
const link = document.createElement('a');
|
||||
const url = URL.createObjectURL(blob);
|
||||
|
||||
link.setAttribute('href', url);
|
||||
link.setAttribute(
|
||||
'download',
|
||||
filename || `audit-logs-${new Date().toISOString().split('T')[0]}.${format}`
|
||||
);
|
||||
link.style.visibility = 'hidden';
|
||||
|
||||
document.body.appendChild(link);
|
||||
link.click();
|
||||
document.body.removeChild(link);
|
||||
|
||||
URL.revokeObjectURL(url);
|
||||
}
|
||||
}
|
||||
|
||||
// Export singleton instance
|
||||
export const auditLogsService = new AuditLogsService();
|
||||
Reference in New Issue
Block a user