Add more services
This commit is contained in:
221
services/production/app/repositories/base.py
Normal file
221
services/production/app/repositories/base.py
Normal file
@@ -0,0 +1,221 @@
|
||||
"""
|
||||
Base Repository for Production Service
|
||||
Service-specific repository base class with production utilities
|
||||
"""
|
||||
|
||||
from typing import Optional, List, Dict, Any, Type
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import text, and_, or_
|
||||
from datetime import datetime, date, timedelta
|
||||
import structlog
|
||||
|
||||
from shared.database.repository import BaseRepository
|
||||
from shared.database.exceptions import DatabaseError
|
||||
from shared.database.transactions import transactional
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class ProductionBaseRepository(BaseRepository):
|
||||
"""Base repository for production service with common production operations"""
|
||||
|
||||
def __init__(self, model: Type, session: AsyncSession, cache_ttl: Optional[int] = 300):
|
||||
# Production data is more dynamic, shorter cache time (5 minutes)
|
||||
super().__init__(model, session, cache_ttl)
|
||||
|
||||
@transactional
|
||||
async def get_by_tenant_id(self, tenant_id: str, skip: int = 0, limit: int = 100) -> List:
|
||||
"""Get records by tenant ID"""
|
||||
if hasattr(self.model, 'tenant_id'):
|
||||
return await self.get_multi(
|
||||
skip=skip,
|
||||
limit=limit,
|
||||
filters={"tenant_id": tenant_id},
|
||||
order_by="created_at",
|
||||
order_desc=True
|
||||
)
|
||||
return await self.get_multi(skip=skip, limit=limit)
|
||||
|
||||
@transactional
|
||||
async def get_by_status(
|
||||
self,
|
||||
tenant_id: str,
|
||||
status: str,
|
||||
skip: int = 0,
|
||||
limit: int = 100
|
||||
) -> List:
|
||||
"""Get records by tenant and status"""
|
||||
if hasattr(self.model, 'status'):
|
||||
return await self.get_multi(
|
||||
skip=skip,
|
||||
limit=limit,
|
||||
filters={
|
||||
"tenant_id": tenant_id,
|
||||
"status": status
|
||||
},
|
||||
order_by="created_at",
|
||||
order_desc=True
|
||||
)
|
||||
return await self.get_by_tenant_id(tenant_id, skip, limit)
|
||||
|
||||
@transactional
|
||||
async def get_by_date_range(
|
||||
self,
|
||||
tenant_id: str,
|
||||
start_date: date,
|
||||
end_date: date,
|
||||
date_field: str = "created_at",
|
||||
skip: int = 0,
|
||||
limit: int = 100
|
||||
) -> List:
|
||||
"""Get records by tenant and date range"""
|
||||
try:
|
||||
start_datetime = datetime.combine(start_date, datetime.min.time())
|
||||
end_datetime = datetime.combine(end_date, datetime.max.time())
|
||||
|
||||
filters = {
|
||||
"tenant_id": tenant_id,
|
||||
f"{date_field}__gte": start_datetime,
|
||||
f"{date_field}__lte": end_datetime
|
||||
}
|
||||
|
||||
return await self.get_multi(
|
||||
skip=skip,
|
||||
limit=limit,
|
||||
filters=filters,
|
||||
order_by=date_field,
|
||||
order_desc=True
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error("Error fetching records by date range",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
raise DatabaseError(f"Failed to fetch records by date range: {str(e)}")
|
||||
|
||||
@transactional
|
||||
async def get_active_records(
|
||||
self,
|
||||
tenant_id: str,
|
||||
active_field: str = "is_active",
|
||||
skip: int = 0,
|
||||
limit: int = 100
|
||||
) -> List:
|
||||
"""Get active records for a tenant"""
|
||||
if hasattr(self.model, active_field):
|
||||
return await self.get_multi(
|
||||
skip=skip,
|
||||
limit=limit,
|
||||
filters={
|
||||
"tenant_id": tenant_id,
|
||||
active_field: True
|
||||
},
|
||||
order_by="created_at",
|
||||
order_desc=True
|
||||
)
|
||||
return await self.get_by_tenant_id(tenant_id, skip, limit)
|
||||
|
||||
def _validate_production_data(
|
||||
self,
|
||||
data: Dict[str, Any],
|
||||
required_fields: List[str]
|
||||
) -> Dict[str, Any]:
|
||||
"""Validate production data with required fields"""
|
||||
errors = []
|
||||
|
||||
# Check required fields
|
||||
for field in required_fields:
|
||||
if field not in data or data[field] is None:
|
||||
errors.append(f"Missing required field: {field}")
|
||||
|
||||
# Validate tenant_id format
|
||||
if "tenant_id" in data:
|
||||
try:
|
||||
import uuid
|
||||
uuid.UUID(str(data["tenant_id"]))
|
||||
except (ValueError, TypeError):
|
||||
errors.append("Invalid tenant_id format")
|
||||
|
||||
# Validate datetime fields
|
||||
datetime_fields = ["planned_start_time", "planned_end_time", "actual_start_time", "actual_end_time"]
|
||||
for field in datetime_fields:
|
||||
if field in data and data[field] is not None:
|
||||
if not isinstance(data[field], (datetime, str)):
|
||||
errors.append(f"Invalid datetime format for {field}")
|
||||
|
||||
# Validate numeric fields
|
||||
numeric_fields = ["planned_quantity", "actual_quantity", "quality_score", "yield_percentage"]
|
||||
for field in numeric_fields:
|
||||
if field in data and data[field] is not None:
|
||||
try:
|
||||
float(data[field])
|
||||
if data[field] < 0:
|
||||
errors.append(f"{field} cannot be negative")
|
||||
except (ValueError, TypeError):
|
||||
errors.append(f"Invalid numeric value for {field}")
|
||||
|
||||
# Validate percentage fields (0-100)
|
||||
percentage_fields = ["yield_percentage", "efficiency_percentage", "utilization_percentage"]
|
||||
for field in percentage_fields:
|
||||
if field in data and data[field] is not None:
|
||||
try:
|
||||
value = float(data[field])
|
||||
if value < 0 or value > 100:
|
||||
errors.append(f"{field} must be between 0 and 100")
|
||||
except (ValueError, TypeError):
|
||||
pass # Already caught by numeric validation
|
||||
|
||||
return {
|
||||
"is_valid": len(errors) == 0,
|
||||
"errors": errors
|
||||
}
|
||||
|
||||
async def get_production_statistics(
|
||||
self,
|
||||
tenant_id: str,
|
||||
start_date: date,
|
||||
end_date: date
|
||||
) -> Dict[str, Any]:
|
||||
"""Get production statistics for a tenant and date range"""
|
||||
try:
|
||||
# Base query for the model
|
||||
start_datetime = datetime.combine(start_date, datetime.min.time())
|
||||
end_datetime = datetime.combine(end_date, datetime.max.time())
|
||||
|
||||
# This would need to be implemented per specific model
|
||||
# For now, return basic count
|
||||
records = await self.get_by_date_range(
|
||||
tenant_id, start_date, end_date, limit=1000
|
||||
)
|
||||
|
||||
return {
|
||||
"total_records": len(records),
|
||||
"period_start": start_date.isoformat(),
|
||||
"period_end": end_date.isoformat(),
|
||||
"tenant_id": tenant_id
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error calculating production statistics",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
raise DatabaseError(f"Failed to calculate statistics: {str(e)}")
|
||||
|
||||
async def check_duplicate(
|
||||
self,
|
||||
tenant_id: str,
|
||||
unique_fields: Dict[str, Any]
|
||||
) -> bool:
|
||||
"""Check if a record with the same unique fields exists"""
|
||||
try:
|
||||
filters = {"tenant_id": tenant_id}
|
||||
filters.update(unique_fields)
|
||||
|
||||
existing = await self.get_multi(
|
||||
filters=filters,
|
||||
limit=1
|
||||
)
|
||||
|
||||
return len(existing) > 0
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error checking for duplicates",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
return False
|
||||
Reference in New Issue
Block a user