REFACTOR data service

This commit is contained in:
Urtzi Alfaro
2025-08-12 18:17:30 +02:00
parent 7c237c0acc
commit fbe7470ad9
149 changed files with 8528 additions and 7393 deletions

View File

@@ -14,7 +14,8 @@ This directory contains the enhanced inter-service communication system that int
Each service has a specialized enhanced client:
- **EnhancedDataServiceClient** - Sales data, weather, traffic, products with optimized caching
- **SalesServiceClient** - Sales data, products, data import with optimized caching
- **ExternalServiceClient** - Weather and traffic data collection with external API integration
- **EnhancedAuthServiceClient** - Authentication, user management, permissions with security focus
- **EnhancedTrainingServiceClient** - ML training, model management, deployment with pipeline monitoring
- **EnhancedForecastingServiceClient** - Forecasting, predictions, scenarios with analytics

View File

@@ -6,7 +6,8 @@ Provides easy access to all service clients
from .base_service_client import BaseServiceClient, ServiceAuthenticator
from .training_client import TrainingServiceClient
from .data_client import DataServiceClient
from .sales_client import SalesServiceClient
from .external_client import ExternalServiceClient
from .forecast_client import ForecastServiceClient
# Import config
@@ -25,14 +26,24 @@ def get_training_client(config: BaseServiceSettings = None, service_name: str =
_client_cache[cache_key] = TrainingServiceClient(config, service_name)
return _client_cache[cache_key]
def get_data_client(config: BaseServiceSettings = None, service_name: str = "unknown") -> DataServiceClient:
"""Get or create a data service client"""
def get_sales_client(config: BaseServiceSettings = None, service_name: str = "unknown") -> SalesServiceClient:
"""Get or create a sales service client"""
if config is None:
from app.core.config import settings as config
cache_key = f"data_{service_name}"
cache_key = f"sales_{service_name}"
if cache_key not in _client_cache:
_client_cache[cache_key] = DataServiceClient(config, service_name)
_client_cache[cache_key] = SalesServiceClient(config, service_name)
return _client_cache[cache_key]
def get_external_client(config: BaseServiceSettings = None, service_name: str = "unknown") -> ExternalServiceClient:
"""Get or create an external service client"""
if config is None:
from app.core.config import settings as config
cache_key = f"external_{service_name}"
if cache_key not in _client_cache:
_client_cache[cache_key] = ExternalServiceClient(config, service_name)
return _client_cache[cache_key]
def get_forecast_client(config: BaseServiceSettings = None, service_name: str = "unknown") -> ForecastServiceClient:
@@ -45,6 +56,7 @@ def get_forecast_client(config: BaseServiceSettings = None, service_name: str =
_client_cache[cache_key] = ForecastServiceClient(config, service_name)
return _client_cache[cache_key]
class ServiceClients:
"""Convenient wrapper for all service clients"""
@@ -54,7 +66,8 @@ class ServiceClients:
# Initialize clients lazily
self._training_client = None
self._data_client = None
self._sales_client = None
self._external_client = None
self._forecast_client = None
def _get_default_config(self):
@@ -73,11 +86,18 @@ class ServiceClients:
return self._training_client
@property
def data(self) -> DataServiceClient:
"""Get data service client"""
if self._data_client is None:
self._data_client = get_data_client(self.config, self.service_name)
return self._data_client
def sales(self) -> SalesServiceClient:
"""Get sales service client"""
if self._sales_client is None:
self._sales_client = get_sales_client(self.config, self.service_name)
return self._sales_client
@property
def external(self) -> ExternalServiceClient:
"""Get external service client"""
if self._external_client is None:
self._external_client = get_external_client(self.config, self.service_name)
return self._external_client
@property
def forecast(self) -> ForecastServiceClient:
@@ -96,11 +116,13 @@ __all__ = [
'BaseServiceClient',
'ServiceAuthenticator',
'TrainingServiceClient',
'DataServiceClient',
'SalesServiceClient',
'ExternalServiceClient',
'ForecastServiceClient',
'ServiceClients',
'get_training_client',
'get_data_client',
'get_sales_client',
'get_external_client',
'get_forecast_client',
'get_service_clients'
]

View File

@@ -227,7 +227,7 @@ class BaseServiceClient(ABC):
endpoint: str,
tenant_id: Optional[str] = None,
params: Optional[Dict[str, Any]] = None,
page_size: int = 5000,
page_size: int = 1000,
max_pages: int = 100,
timeout: Optional[Union[int, httpx.Timeout]] = None
) -> List[Dict[str, Any]]:
@@ -239,7 +239,7 @@ class BaseServiceClient(ABC):
endpoint: API endpoint
tenant_id: Optional tenant ID
params: Base query parameters
page_size: Records per page (default 5000)
page_size: Records per page (default 1000)
max_pages: Maximum pages to fetch (safety limit)
timeout: Request timeout override
@@ -337,7 +337,7 @@ class BaseServiceClient(ABC):
endpoint: str,
tenant_id: Optional[str] = None,
params: Optional[Dict[str, Any]] = None,
page_size: int = 5000,
page_size: int = 1000,
max_pages: int = 100,
timeout: Optional[Union[int, httpx.Timeout]] = None
) -> List[Dict[str, Any]]:

View File

@@ -1,450 +0,0 @@
# shared/clients/data_client.py
"""
Data Service Client
Handles all API calls to the data service
"""
import httpx
import structlog
from typing import Dict, Any, Optional, List, Union
from .base_service_client import BaseServiceClient
from shared.config.base import BaseServiceSettings
logger = structlog.get_logger()
class DataServiceClient(BaseServiceClient):
"""Client for communicating with the data service"""
def __init__(self, config: BaseServiceSettings, calling_service_name: str = "unknown"):
super().__init__(calling_service_name, config)
def get_service_base_path(self) -> str:
return "/api/v1"
# ================================================================
# SALES DATA (with advanced pagination support)
# ================================================================
async def get_sales_data(
self,
tenant_id: str,
start_date: Optional[str] = None,
end_date: Optional[str] = None,
product_id: Optional[str] = None,
aggregation: str = "daily"
) -> Optional[List[Dict[str, Any]]]:
"""Get sales data for a date range"""
params = {"aggregation": aggregation}
if start_date:
params["start_date"] = start_date
if end_date:
params["end_date"] = end_date
if product_id:
params["product_id"] = product_id
result = await self.get("sales", tenant_id=tenant_id, params=params)
return result.get("sales", []) if result else None
async def get_all_sales_data(
self,
tenant_id: str,
start_date: Optional[str] = None,
end_date: Optional[str] = None,
product_id: Optional[str] = None,
aggregation: str = "daily",
page_size: int = 5000,
max_pages: int = 100
) -> List[Dict[str, Any]]:
"""
Get ALL sales data using pagination (equivalent to original fetch_sales_data)
Retrieves all records without pagination limits
"""
params = {"aggregation": aggregation}
if start_date:
params["start_date"] = start_date
if end_date:
params["end_date"] = end_date
if product_id:
params["product_id"] = product_id
# Use the inherited paginated request method
try:
all_records = await self.get_paginated(
"sales",
tenant_id=tenant_id,
params=params,
page_size=page_size,
max_pages=max_pages,
timeout=2000.0 # Match original timeout
)
logger.info(f"Successfully fetched {len(all_records)} total sales records via gateway",
tenant_id=tenant_id)
return all_records
except AttributeError as e:
# Fallback: implement pagination directly if inheritance isn't working
logger.warning(f"Using fallback pagination due to: {e}")
return await self._fallback_paginated_sales(tenant_id, params, page_size, max_pages)
async def _fallback_paginated_sales(
self,
tenant_id: str,
base_params: Dict[str, Any],
page_size: int = 5000,
max_pages: int = 100
) -> List[Dict[str, Any]]:
"""
Fallback pagination implementation for sales data
This replicates your original pagination logic directly
"""
all_records = []
page = 0
logger.info(f"Starting fallback paginated request for sales data",
tenant_id=tenant_id, page_size=page_size)
while page < max_pages:
# Prepare pagination parameters
params = base_params.copy()
params.update({
"limit": page_size,
"offset": page * page_size
})
logger.info(f"Fetching sales data page {page + 1} (offset: {page * page_size})",
tenant_id=tenant_id)
# Make request using the base client's _make_request method
result = await self._make_request(
"GET",
"sales",
tenant_id=tenant_id,
params=params,
timeout=2000.0
)
if result is None:
logger.error(f"Failed to fetch page {page + 1}", tenant_id=tenant_id)
break
# Handle different response formats (from your original code)
if isinstance(result, list):
# Direct list response (no pagination metadata)
records = result
logger.info(f"Retrieved {len(records)} records from page {page + 1} (direct list)")
if len(records) == 0:
logger.info("No records in response, pagination complete")
break
elif len(records) < page_size:
# Got fewer than requested, this is the last page
all_records.extend(records)
logger.info(f"Final page: retrieved {len(records)} records, total: {len(all_records)}")
break
else:
# Got full page, there might be more
all_records.extend(records)
logger.info(f"Full page retrieved: {len(records)} records, continuing to next page")
elif isinstance(result, dict):
# Paginated response format
records = result.get('records', result.get('data', []))
total_available = result.get('total', 0)
logger.info(f"Retrieved {len(records)} records from page {page + 1} (paginated response)")
if not records:
logger.info("No more records found in paginated response")
break
all_records.extend(records)
# Check if we've got all available records
if len(all_records) >= total_available:
logger.info(f"Retrieved all available records: {len(all_records)}/{total_available}")
break
else:
logger.warning(f"Unexpected response format: {type(result)}")
break
page += 1
logger.info(f"Fallback pagination complete: fetched {len(all_records)} total records",
tenant_id=tenant_id, pages_fetched=page)
return all_records
async def upload_sales_data(
self,
tenant_id: str,
sales_data: List[Dict[str, Any]]
) -> Optional[Dict[str, Any]]:
"""Upload sales data"""
data = {"sales": sales_data}
return await self.post("sales", data=data, tenant_id=tenant_id)
# ================================================================
# WEATHER DATA
# ================================================================
async def get_weather_historical(
self,
tenant_id: str,
start_date: str,
end_date: str,
latitude: Optional[float] = None,
longitude: Optional[float] = None
) -> Optional[List[Dict[str, Any]]]:
"""
Get weather data for a date range and location
Uses POST request as per original implementation
"""
# Prepare request payload with proper date handling
payload = {
"start_date": start_date, # Already in ISO format from calling code
"end_date": end_date, # Already in ISO format from calling code
"latitude": latitude or 40.4168, # Default Madrid coordinates
"longitude": longitude or -3.7038
}
logger.info(f"Weather request payload: {payload}", tenant_id=tenant_id)
# Use POST request with extended timeout
result = await self._make_request(
"POST",
"weather/historical",
tenant_id=tenant_id,
data=payload,
timeout=2000.0 # Match original timeout
)
if result:
logger.info(f"Successfully fetched {len(result)} weather records")
return result
else:
logger.error("Failed to fetch weather data")
return []
async def get_weather_forecast(
self,
tenant_id: str,
days: int = 1,
latitude: Optional[float] = None,
longitude: Optional[float] = None
) -> Optional[List[Dict[str, Any]]]:
"""
Get weather forecast for location
FIXED: Uses GET request with query parameters as expected by the weather API
"""
payload = {
"latitude": latitude or 40.4168, # Default Madrid coordinates
"longitude": longitude or -3.7038,
"days": days
}
logger.info(f"Weather forecast request params: {payload}", tenant_id=tenant_id)
result = await self._make_request(
"POST",
"weather/forecast",
tenant_id=tenant_id,
data=payload,
timeout=200.0
)
if result:
logger.info(f"Successfully fetched weather forecast for {days} days")
return result
else:
logger.error("Failed to fetch weather forecast")
return []
# ================================================================
# TRAFFIC DATA
# ================================================================
async def get_traffic_data(
self,
tenant_id: str,
start_date: str,
end_date: str,
latitude: Optional[float] = None,
longitude: Optional[float] = None
) -> Optional[List[Dict[str, Any]]]:
"""
Get traffic data for a date range and location
Uses POST request with extended timeout for Madrid traffic data processing
"""
# Prepare request payload
payload = {
"start_date": start_date, # Already in ISO format from calling code
"end_date": end_date, # Already in ISO format from calling code
"latitude": latitude or 40.4168, # Default Madrid coordinates
"longitude": longitude or -3.7038
}
logger.info(f"Traffic request payload: {payload}", tenant_id=tenant_id)
# Madrid traffic data can take 5-10 minutes to download and process
traffic_timeout = httpx.Timeout(
connect=30.0, # Connection timeout
read=600.0, # Read timeout: 10 minutes (was 30s)
write=30.0, # Write timeout
pool=30.0 # Pool timeout
)
# Use POST request with extended timeout
logger.info("Making traffic data request",
url="traffic/historical",
tenant_id=tenant_id,
timeout=traffic_timeout.read)
result = await self._make_request(
"POST",
"traffic/historical",
tenant_id=tenant_id,
data=payload,
timeout=traffic_timeout
)
if result:
logger.info(f"Successfully fetched {len(result)} traffic records")
return result
else:
logger.error("Failed to fetch traffic data - _make_request returned None")
logger.error("This could be due to: network timeout, HTTP error, authentication failure, or service unavailable")
return None
async def get_stored_traffic_data_for_training(
self,
tenant_id: str,
start_date: str,
end_date: str,
latitude: Optional[float] = None,
longitude: Optional[float] = None
) -> Optional[List[Dict[str, Any]]]:
"""
Get stored traffic data specifically for model training/re-training
This method prioritizes database-stored data over API calls
"""
# Prepare request payload
payload = {
"start_date": start_date,
"end_date": end_date,
"latitude": latitude or 40.4168, # Default Madrid coordinates
"longitude": longitude or -3.7038,
"stored_only": True # Flag to indicate we want stored data only
}
logger.info(f"Training traffic data request: {payload}", tenant_id=tenant_id)
# Standard timeout since we're only querying the database
training_timeout = httpx.Timeout(
connect=30.0,
read=120.0, # 2 minutes should be enough for database query
write=30.0,
pool=30.0
)
result = await self._make_request(
"POST",
"traffic/stored", # New endpoint for stored traffic data
tenant_id=tenant_id,
data=payload,
timeout=training_timeout
)
if result:
logger.info(f"Successfully retrieved {len(result)} stored traffic records for training")
return result
else:
logger.warning("No stored traffic data available for training")
return None
# ================================================================
# PRODUCTS
# ================================================================
async def get_products(self, tenant_id: str) -> Optional[List[Dict[str, Any]]]:
"""Get all products for a tenant"""
result = await self.get("products", tenant_id=tenant_id)
return result.get("products", []) if result else None
async def get_product(self, tenant_id: str, product_id: str) -> Optional[Dict[str, Any]]:
"""Get a specific product"""
return await self.get(f"products/{product_id}", tenant_id=tenant_id)
async def create_product(
self,
tenant_id: str,
name: str,
category: str,
price: float,
**kwargs
) -> Optional[Dict[str, Any]]:
"""Create a new product"""
data = {
"name": name,
"category": category,
"price": price,
**kwargs
}
return await self.post("products", data=data, tenant_id=tenant_id)
async def update_product(
self,
tenant_id: str,
product_id: str,
**updates
) -> Optional[Dict[str, Any]]:
"""Update a product"""
return await self.put(f"products/{product_id}", data=updates, tenant_id=tenant_id)
# ================================================================
# STORES & LOCATIONS
# ================================================================
async def get_stores(self, tenant_id: str) -> Optional[List[Dict[str, Any]]]:
"""Get all stores for a tenant"""
result = await self.get("stores", tenant_id=tenant_id)
return result.get("stores", []) if result else None
async def get_store(self, tenant_id: str, store_id: str) -> Optional[Dict[str, Any]]:
"""Get a specific store"""
return await self.get(f"stores/{store_id}", tenant_id=tenant_id)
# ================================================================
# DATA VALIDATION & HEALTH
# ================================================================
async def validate_data_quality(
self,
tenant_id: str,
start_date: str,
end_date: str
) -> Optional[Dict[str, Any]]:
"""Validate data quality for a date range"""
params = {
"start_date": start_date,
"end_date": end_date
}
return await self.get("validation", tenant_id=tenant_id, params=params)
async def get_data_statistics(
self,
tenant_id: str,
start_date: Optional[str] = None,
end_date: Optional[str] = None
) -> Optional[Dict[str, Any]]:
"""Get data statistics for a tenant"""
params = {}
if start_date:
params["start_date"] = start_date
if end_date:
params["end_date"] = end_date
return await self.get("statistics", tenant_id=tenant_id, params=params)

View File

@@ -0,0 +1,203 @@
# shared/clients/external_client.py
"""
External Service Client
Handles all API calls to the external service (weather and traffic data)
"""
import httpx
import structlog
from typing import Dict, Any, Optional, List
from .base_service_client import BaseServiceClient
from shared.config.base import BaseServiceSettings
logger = structlog.get_logger()
class ExternalServiceClient(BaseServiceClient):
"""Client for communicating with the external service"""
def __init__(self, config: BaseServiceSettings, calling_service_name: str = "unknown"):
super().__init__(calling_service_name, config)
self.service_url = config.EXTERNAL_SERVICE_URL
def get_service_base_path(self) -> str:
return "/api/v1"
# ================================================================
# WEATHER DATA
# ================================================================
async def get_weather_historical(
self,
tenant_id: str,
start_date: str,
end_date: str,
latitude: Optional[float] = None,
longitude: Optional[float] = None
) -> Optional[List[Dict[str, Any]]]:
"""
Get weather data for a date range and location
Uses POST request as per original implementation
"""
# Prepare request payload with proper date handling
payload = {
"start_date": start_date, # Already in ISO format from calling code
"end_date": end_date, # Already in ISO format from calling code
"latitude": latitude or 40.4168, # Default Madrid coordinates
"longitude": longitude or -3.7038
}
logger.info(f"Weather request payload: {payload}", tenant_id=tenant_id)
# Use POST request with extended timeout
result = await self._make_request(
"POST",
"weather/historical",
tenant_id=tenant_id,
data=payload,
timeout=2000.0 # Match original timeout
)
if result:
logger.info(f"Successfully fetched {len(result)} weather records")
return result
else:
logger.error("Failed to fetch weather data")
return []
async def get_weather_forecast(
self,
tenant_id: str,
days: int = 1,
latitude: Optional[float] = None,
longitude: Optional[float] = None
) -> Optional[List[Dict[str, Any]]]:
"""
Get weather forecast for location
FIXED: Uses GET request with query parameters as expected by the weather API
"""
payload = {
"latitude": latitude or 40.4168, # Default Madrid coordinates
"longitude": longitude or -3.7038,
"days": days
}
logger.info(f"Weather forecast request params: {payload}", tenant_id=tenant_id)
result = await self._make_request(
"POST",
"weather/forecast",
tenant_id=tenant_id,
data=payload,
timeout=200.0
)
if result:
logger.info(f"Successfully fetched weather forecast for {days} days")
return result
else:
logger.error("Failed to fetch weather forecast")
return []
# ================================================================
# TRAFFIC DATA
# ================================================================
async def get_traffic_data(
self,
tenant_id: str,
start_date: str,
end_date: str,
latitude: Optional[float] = None,
longitude: Optional[float] = None
) -> Optional[List[Dict[str, Any]]]:
"""
Get traffic data for a date range and location
Uses POST request with extended timeout for Madrid traffic data processing
"""
# Prepare request payload
payload = {
"start_date": start_date, # Already in ISO format from calling code
"end_date": end_date, # Already in ISO format from calling code
"latitude": latitude or 40.4168, # Default Madrid coordinates
"longitude": longitude or -3.7038
}
logger.info(f"Traffic request payload: {payload}", tenant_id=tenant_id)
# Madrid traffic data can take 5-10 minutes to download and process
traffic_timeout = httpx.Timeout(
connect=30.0, # Connection timeout
read=600.0, # Read timeout: 10 minutes (was 30s)
write=30.0, # Write timeout
pool=30.0 # Pool timeout
)
# Use POST request with extended timeout
logger.info("Making traffic data request",
url="traffic/historical",
tenant_id=tenant_id,
timeout=traffic_timeout.read)
result = await self._make_request(
"POST",
"traffic/historical",
tenant_id=tenant_id,
data=payload,
timeout=traffic_timeout
)
if result:
logger.info(f"Successfully fetched {len(result)} traffic records")
return result
else:
logger.error("Failed to fetch traffic data - _make_request returned None")
logger.error("This could be due to: network timeout, HTTP error, authentication failure, or service unavailable")
return None
async def get_stored_traffic_data_for_training(
self,
tenant_id: str,
start_date: str,
end_date: str,
latitude: Optional[float] = None,
longitude: Optional[float] = None
) -> Optional[List[Dict[str, Any]]]:
"""
Get stored traffic data specifically for model training/re-training
This method prioritizes database-stored data over API calls
"""
# Prepare request payload
payload = {
"start_date": start_date,
"end_date": end_date,
"latitude": latitude or 40.4168, # Default Madrid coordinates
"longitude": longitude or -3.7038,
"stored_only": True # Flag to indicate we want stored data only
}
logger.info(f"Training traffic data request: {payload}", tenant_id=tenant_id)
# Standard timeout since we're only querying the database
training_timeout = httpx.Timeout(
connect=30.0,
read=120.0, # 2 minutes should be enough for database query
write=30.0,
pool=30.0
)
result = await self._make_request(
"POST",
"traffic/stored", # New endpoint for stored traffic data
tenant_id=tenant_id,
data=payload,
timeout=training_timeout
)
if result:
logger.info(f"Successfully retrieved {len(result)} stored traffic records for training")
return result
else:
logger.warning("No stored traffic data available for training")
return None

View File

@@ -0,0 +1,156 @@
# shared/clients/sales_client.py
"""
Sales Service Client
Handles all API calls to the sales service
"""
import httpx
import structlog
from typing import Dict, Any, Optional, List, Union
from .base_service_client import BaseServiceClient
from shared.config.base import BaseServiceSettings
logger = structlog.get_logger()
class SalesServiceClient(BaseServiceClient):
"""Client for communicating with the sales service"""
def __init__(self, config: BaseServiceSettings, calling_service_name: str = "unknown"):
super().__init__(calling_service_name, config)
self.service_url = config.SALES_SERVICE_URL
def get_service_base_path(self) -> str:
return "/api/v1"
# ================================================================
# SALES DATA (with advanced pagination support)
# ================================================================
async def get_sales_data(
self,
tenant_id: str,
start_date: Optional[str] = None,
end_date: Optional[str] = None,
product_id: Optional[str] = None,
aggregation: str = "daily"
) -> Optional[List[Dict[str, Any]]]:
"""Get sales data for a date range"""
params = {"aggregation": aggregation}
if start_date:
params["start_date"] = start_date
if end_date:
params["end_date"] = end_date
if product_id:
params["product_id"] = product_id
result = await self.get("sales", tenant_id=tenant_id, params=params)
return result.get("sales", []) if result else None
async def get_all_sales_data(
self,
tenant_id: str,
start_date: Optional[str] = None,
end_date: Optional[str] = None,
product_id: Optional[str] = None,
aggregation: str = "daily",
page_size: int = 1000,
max_pages: int = 100
) -> List[Dict[str, Any]]:
"""
Get ALL sales data using pagination (equivalent to original fetch_sales_data)
Retrieves all records without pagination limits
"""
params = {"aggregation": aggregation}
if start_date:
params["start_date"] = start_date
if end_date:
params["end_date"] = end_date
if product_id:
params["product_id"] = product_id
# Use the inherited paginated request method
try:
all_records = await self.get_paginated(
"sales",
tenant_id=tenant_id,
params=params,
page_size=page_size,
max_pages=max_pages,
timeout=2000.0
)
logger.info(f"Successfully fetched {len(all_records)} total sales records via sales service",
tenant_id=tenant_id)
return all_records
except Exception as e:
logger.error(f"Failed to fetch paginated sales data: {e}")
return []
async def upload_sales_data(
self,
tenant_id: str,
sales_data: List[Dict[str, Any]]
) -> Optional[Dict[str, Any]]:
"""Upload sales data"""
data = {"sales": sales_data}
return await self.post("sales", data=data, tenant_id=tenant_id)
# ================================================================
# PRODUCTS
# ================================================================
async def get_products(self, tenant_id: str) -> Optional[List[Dict[str, Any]]]:
"""Get all products for a tenant"""
result = await self.get("products", tenant_id=tenant_id)
return result.get("products", []) if result else None
async def get_product(self, tenant_id: str, product_id: str) -> Optional[Dict[str, Any]]:
"""Get a specific product"""
return await self.get(f"products/{product_id}", tenant_id=tenant_id)
async def create_product(
self,
tenant_id: str,
name: str,
category: str,
price: float,
**kwargs
) -> Optional[Dict[str, Any]]:
"""Create a new product"""
data = {
"name": name,
"category": category,
"price": price,
**kwargs
}
return await self.post("products", data=data, tenant_id=tenant_id)
async def update_product(
self,
tenant_id: str,
product_id: str,
**updates
) -> Optional[Dict[str, Any]]:
"""Update a product"""
return await self.put(f"products/{product_id}", data=updates, tenant_id=tenant_id)
# ================================================================
# DATA IMPORT
# ================================================================
async def import_sales_data(
self,
tenant_id: str,
file_content: str,
file_format: str,
filename: Optional[str] = None
) -> Optional[Dict[str, Any]]:
"""Import sales data from CSV/Excel/JSON"""
data = {
"content": file_content,
"format": file_format,
"filename": filename
}
return await self.post("import", data=data, tenant_id=tenant_id)

View File

@@ -118,7 +118,8 @@ class BaseServiceSettings(BaseSettings):
AUTH_SERVICE_URL: str = os.getenv("AUTH_SERVICE_URL", "http://auth-service:8000")
TRAINING_SERVICE_URL: str = os.getenv("TRAINING_SERVICE_URL", "http://training-service:8000")
FORECASTING_SERVICE_URL: str = os.getenv("FORECASTING_SERVICE_URL", "http://forecasting-service:8000")
DATA_SERVICE_URL: str = os.getenv("DATA_SERVICE_URL", "http://data-service:8000")
SALES_SERVICE_URL: str = os.getenv("SALES_SERVICE_URL", "http://sales-service:8000")
EXTERNAL_SERVICE_URL: str = os.getenv("EXTERNAL_SERVICE_URL", "http://external-service:8000")
TENANT_SERVICE_URL: str = os.getenv("TENANT_SERVICE_URL", "http://tenant-service:8000")
NOTIFICATION_SERVICE_URL: str = os.getenv("NOTIFICATION_SERVICE_URL", "http://notification-service:8000")
NOMINATIM_SERVICE_URL: str = os.getenv("NOMINATIM_SERVICE_URL", "http://nominatim:8080")
@@ -327,7 +328,8 @@ class BaseServiceSettings(BaseSettings):
"auth": self.AUTH_SERVICE_URL,
"training": self.TRAINING_SERVICE_URL,
"forecasting": self.FORECASTING_SERVICE_URL,
"data": self.DATA_SERVICE_URL,
"sales": self.SALES_SERVICE_URL,
"external": self.EXTERNAL_SERVICE_URL,
"tenant": self.TENANT_SERVICE_URL,
"notification": self.NOTIFICATION_SERVICE_URL,
}

View File

@@ -1,78 +0,0 @@
"""
Base database configuration for all microservices
"""
import os
from sqlalchemy import create_engine
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
from sqlalchemy.orm import sessionmaker, declarative_base
from sqlalchemy.pool import StaticPool
from contextlib import asynccontextmanager
import logging
logger = logging.getLogger(__name__)
Base = declarative_base()
class DatabaseManager:
"""Database manager for microservices"""
def __init__(self, database_url: str):
self.database_url = database_url
self.async_engine = create_async_engine(
database_url,
echo=False,
pool_pre_ping=True,
pool_recycle=300,
pool_size=20,
max_overflow=30
)
self.async_session_local = sessionmaker(
self.async_engine,
class_=AsyncSession,
expire_on_commit=False
)
async def get_db(self):
"""Get database session for request handlers"""
async with self.async_session_local() as session:
try:
yield session
except Exception as e:
logger.error(f"Database session error: {e}")
await session.rollback()
raise
finally:
await session.close()
@asynccontextmanager
async def get_background_session(self):
"""
✅ NEW: Get database session for background tasks
Usage:
async with database_manager.get_background_session() as session:
# Your background task code here
await session.commit()
"""
async with self.async_session_local() as session:
try:
yield session
await session.commit()
except Exception as e:
await session.rollback()
logger.error(f"Background task database error: {e}")
raise
finally:
await session.close()
async def create_tables(self):
"""Create database tables"""
async with self.async_engine.begin() as conn:
await conn.run_sync(Base.metadata.create_all)
async def drop_tables(self):
"""Drop database tables"""
async with self.async_engine.begin() as conn:
await conn.run_sync(Base.metadata.drop_all)