REFACTOR API gateway fix 5

This commit is contained in:
Urtzi Alfaro
2025-07-26 21:10:54 +02:00
parent dacf114922
commit 7d5c8bc9a4
6 changed files with 91 additions and 19 deletions

View File

@@ -67,10 +67,21 @@ async def startup_event():
logger.info("Starting API Gateway") logger.info("Starting API Gateway")
# Start metrics server # Start metrics server
metrics_collector.start_metrics_server(8080) metrics_collector.register_counter(
"gateway_auth_requests_total",
"Total authentication requests through gateway"
)
metrics_collector.register_counter(
"gateway_auth_responses_total",
"Total authentication responses through gateway"
)
metrics_collector.register_histogram(
"gateway_request_duration_seconds",
"Gateway request duration"
)
# Initialize service discovery
# await service_discovery.initialize() metrics_collector.start_metrics_server(8080)
logger.info("API Gateway started successfully") logger.info("API Gateway started successfully")

View File

@@ -42,10 +42,20 @@ async def get_tenant_members(request: Request, tenant_id: str = Path(...)):
# TENANT-SCOPED DATA SERVICE ENDPOINTS # TENANT-SCOPED DATA SERVICE ENDPOINTS
# ================================================================ # ================================================================
@router.api_route("/{tenant_id}/sales/{path:path}", methods=["GET", "POST", "PUT", "DELETE", "OPTIONS"]) @router.api_route("/{tenant_id}/sales{path:path}", methods=["GET", "POST", "PUT", "DELETE", "OPTIONS"])
async def proxy_tenant_sales(request: Request, tenant_id: str = Path(...), path: str = ""): async def proxy_all_tenant_sales_alternative(request: Request, tenant_id: str = Path(...), path: str = ""):
"""Proxy tenant sales requests to data service""" """Proxy all tenant sales requests - handles both base and sub-paths"""
target_path = f"/api/v1/tenants/{tenant_id}/sales/{path}".rstrip("/") base_path = f"/api/v1/tenants/{tenant_id}/sales"
# If path is empty or just "/", use base path
if not path or path == "/" or path == "":
target_path = base_path
else:
# Ensure path starts with "/"
if not path.startswith("/"):
path = "/" + path
target_path = base_path + path
return await _proxy_to_data_service(request, target_path) return await _proxy_to_data_service(request, target_path)
@router.api_route("/{tenant_id}/weather/{path:path}", methods=["GET", "POST", "OPTIONS"]) @router.api_route("/{tenant_id}/weather/{path:path}", methods=["GET", "POST", "OPTIONS"])

View File

@@ -173,7 +173,7 @@ async def import_sales_data(
current_user: Dict[str, Any] = Depends(get_current_user_dep), current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db) db: AsyncSession = Depends(get_db)
): ):
"""Import sales data from file for tenant""" """Import sales data from file for tenant - FIXED VERSION"""
try: try:
logger.info("Importing sales data", logger.info("Importing sales data",
tenant_id=tenant_id, tenant_id=tenant_id,
@@ -185,7 +185,7 @@ async def import_sales_data(
content = await file.read() content = await file.read()
file_content = content.decode('utf-8') file_content = content.decode('utf-8')
# Process import # ✅ FIX: tenant_id comes from URL path, not file upload
result = await DataImportService.process_upload( result = await DataImportService.process_upload(
tenant_id, tenant_id,
file_content, file_content,
@@ -198,7 +198,7 @@ async def import_sales_data(
# Publish event # Publish event
try: try:
await publish_data_imported({ await publish_data_imported({
"tenant_id": tenant_id, "tenant_id": str(tenant_id), # Ensure string conversion
"type": "file_import", "type": "file_import",
"format": file_format, "format": file_format,
"filename": file.filename, "filename": file.filename,

View File

@@ -6,7 +6,7 @@
from sqlalchemy import Column, String, DateTime, Float, Integer, Text, Index from sqlalchemy import Column, String, DateTime, Float, Integer, Text, Index
from sqlalchemy.dialects.postgresql import UUID from sqlalchemy.dialects.postgresql import UUID
import uuid import uuid
from datetime import datetime from datetime import datetime, timezone
from app.core.database import Base from app.core.database import Base
@@ -15,15 +15,17 @@ class SalesData(Base):
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True) tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
date = Column(DateTime, nullable=False, index=True) date = Column(DateTime(timezone=True), nullable=False, index=True)
product_name = Column(String(255), nullable=False, index=True) product_name = Column(String(255), nullable=False, index=True)
quantity_sold = Column(Integer, nullable=False) quantity_sold = Column(Integer, nullable=False)
revenue = Column(Float, nullable=False) revenue = Column(Float, nullable=False)
location_id = Column(String(100), nullable=True, index=True) location_id = Column(String(100), nullable=True, index=True)
source = Column(String(50), nullable=False, default="manual") source = Column(String(50), nullable=False, default="manual")
notes = Column(Text, nullable=True) notes = Column(Text, nullable=True)
created_at = Column(DateTime, default=datetime.utcnow) created_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc))
updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow) updated_at = Column(DateTime(timezone=True),
default=lambda: datetime.now(timezone.utc),
onupdate=lambda: datetime.now(timezone.utc))
__table_args__ = ( __table_args__ = (
Index('idx_sales_tenant_date', 'tenant_id', 'date'), Index('idx_sales_tenant_date', 'tenant_id', 'date'),

View File

@@ -9,8 +9,9 @@ from typing import Optional, List, Dict, Any
from uuid import UUID from uuid import UUID
class SalesDataCreate(BaseModel): class SalesDataCreate(BaseModel):
"""Schema for creating sales data""" """Schema for creating sales data - FIXED to work with gateway"""
tenant_id: UUID # ✅ FIX: Make tenant_id optional since it comes from URL path
tenant_id: Optional[UUID] = Field(None, description="Tenant ID (auto-injected from URL path)")
date: datetime date: datetime
product_name: str = Field(..., min_length=1, max_length=255) product_name: str = Field(..., min_length=1, max_length=255)
quantity_sold: int = Field(..., gt=0) quantity_sold: int = Field(..., gt=0)
@@ -25,6 +26,16 @@ class SalesDataCreate(BaseModel):
class Config: class Config:
from_attributes = True from_attributes = True
json_schema_extra = {
"example": {
"date": "2024-01-15T10:00:00Z",
"product_name": "Pan Integral",
"quantity_sold": 25,
"revenue": 37.50,
"source": "manual"
# Note: tenant_id is automatically injected from URL path by gateway
}
}
class SalesDataResponse(BaseModel): class SalesDataResponse(BaseModel):
"""Schema for sales data response""" """Schema for sales data response"""
@@ -62,15 +73,23 @@ class SalesDataQuery(BaseModel):
from_attributes = True from_attributes = True
class SalesDataImport(BaseModel): class SalesDataImport(BaseModel):
"""Schema for importing sales data""" """Schema for importing sales data - FIXED to work with gateway"""
tenant_id: UUID # ✅ FIX: Make tenant_id optional since it comes from URL path
data: str # JSON string or CSV content tenant_id: Optional[UUID] = Field(None, description="Tenant ID (auto-injected from URL path)")
data: str = Field(..., description="JSON string or CSV content")
data_format: str = Field(..., pattern="^(csv|json|excel)$") data_format: str = Field(..., pattern="^(csv|json|excel)$")
source: str = Field(default="import", max_length=50) source: str = Field(default="import", max_length=50)
validate_only: bool = Field(default=False) validate_only: bool = Field(default=False)
class Config: class Config:
from_attributes = True from_attributes = True
json_schema_extra = {
"example": {
"data": "date,product,quantity,revenue\n2024-01-01,bread,10,25.50",
"data_format": "csv",
# Note: tenant_id is automatically injected from URL path by gateway
}
}
class SalesDataBulkCreate(BaseModel): class SalesDataBulkCreate(BaseModel):
"""Schema for bulk creating sales data""" """Schema for bulk creating sales data"""

View File

@@ -181,6 +181,36 @@ if [ -n "$TENANT_ID" ]; then
echo "Validation Response: $VALIDATION_RESPONSE" echo "Validation Response: $VALIDATION_RESPONSE"
check_response "$VALIDATION_RESPONSE" "Import Validation" check_response "$VALIDATION_RESPONSE" "Import Validation"
# Step 6.5: Import Sample Sales Data
echo -e "\n6.5. Importing Sample Sales Data..."
IMPORT_RESPONSE=$(curl -s -X POST "$API_BASE/api/v1/tenants/$TENANT_ID/sales" \
-H "Content-Type: application/json" \
-H "Authorization: Bearer $ACCESS_TOKEN" \
-d '{
"product_name": "Pan Integral",
"quantity_sold": 25,
"revenue": 37.50,
"date": "2024-01-15T10:00:00Z"
}')
echo "Import Response: $IMPORT_RESPONSE"
check_response "$IMPORT_RESPONSE" "Sales Data Import"
# Now test sales endpoint again - should have data!
echo -e "\n6.6. Testing Sales Endpoint Again (Should Have Data)..."
SALES_RESPONSE_WITH_DATA=$(curl -s -X GET "$API_BASE/api/v1/tenants/$TENANT_ID/sales" \
-H "Authorization: Bearer $ACCESS_TOKEN")
echo "Sales Response with Data: $SALES_RESPONSE_WITH_DATA"
check_response "$SALES_RESPONSE_WITH_DATA" "Tenant Sales Endpoint with Data"
# Check if we actually got data
if echo "$SALES_RESPONSE_WITH_DATA" | grep -q "Pan Integral"; then
echo -e "${GREEN}✅ Successfully retrieved sales data!${NC}"
else
echo -e "${YELLOW}⚠️ No sales data returned (might need different import endpoint)${NC}"
fi
fi fi
# Step 7: Additional Debug Information # Step 7: Additional Debug Information