Add POI feature and imporve the overall backend implementation
This commit is contained in:
123
services/external/app/models/poi_context.py
vendored
Normal file
123
services/external/app/models/poi_context.py
vendored
Normal file
@@ -0,0 +1,123 @@
|
||||
"""
|
||||
POI Context Model
|
||||
|
||||
Stores Point of Interest detection results and ML features for bakery locations.
|
||||
Used for location-based demand forecasting with contextual features.
|
||||
"""
|
||||
|
||||
from sqlalchemy import Column, String, DateTime, Float, Index, Integer
|
||||
from sqlalchemy.dialects.postgresql import UUID, JSONB
|
||||
from datetime import datetime, timezone, timedelta
|
||||
import uuid
|
||||
|
||||
from app.core.database import Base
|
||||
|
||||
|
||||
class TenantPOIContext(Base):
|
||||
"""
|
||||
POI (Point of Interest) context for bakery location.
|
||||
|
||||
Stores detected POIs around bakery and calculated ML features
|
||||
for demand forecasting with location-specific context.
|
||||
"""
|
||||
|
||||
__tablename__ = "tenant_poi_contexts"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
tenant_id = Column(UUID(as_uuid=True), nullable=False, unique=True, index=True)
|
||||
|
||||
# Location (denormalized for quick reference and spatial queries)
|
||||
latitude = Column(Float, nullable=False)
|
||||
longitude = Column(Float, nullable=False)
|
||||
|
||||
# POI Detection Results (full raw data)
|
||||
# Structure: {
|
||||
# "schools": {
|
||||
# "pois": [{"osm_id": "...", "name": "...", "lat": ..., "lon": ...}],
|
||||
# "features": {"proximity_score": 3.45, "count_0_100m": 2, ...},
|
||||
# "count": 5
|
||||
# },
|
||||
# "offices": {...},
|
||||
# ...
|
||||
# }
|
||||
poi_detection_results = Column(JSONB, nullable=False, default=dict)
|
||||
|
||||
# ML Features (flat structure for easy model ingestion)
|
||||
# Structure: {
|
||||
# "poi_schools_proximity_score": 3.45,
|
||||
# "poi_schools_weighted_proximity_score": 5.18,
|
||||
# "poi_schools_count_0_100m": 2,
|
||||
# "poi_offices_proximity_score": 1.23,
|
||||
# ...
|
||||
# }
|
||||
ml_features = Column(JSONB, nullable=False, default=dict)
|
||||
|
||||
# Summary Statistics
|
||||
total_pois_detected = Column(Integer, default=0)
|
||||
high_impact_categories = Column(JSONB, default=list) # Categories with significant POI presence
|
||||
relevant_categories = Column(JSONB, default=list) # Categories that passed relevance thresholds
|
||||
|
||||
# Detection Metadata
|
||||
detection_timestamp = Column(DateTime(timezone=True), nullable=False)
|
||||
detection_source = Column(String(50), default="overpass_api")
|
||||
detection_status = Column(String(20), default="completed") # completed, failed, partial
|
||||
detection_error = Column(String(500), nullable=True) # Error message if detection failed
|
||||
|
||||
# Data Freshness Strategy
|
||||
# POIs don't change frequently, refresh every 6 months
|
||||
next_refresh_date = Column(DateTime(timezone=True), nullable=True)
|
||||
refresh_interval_days = Column(Integer, default=180) # 6 months default
|
||||
last_refreshed_at = Column(DateTime(timezone=True), nullable=True)
|
||||
|
||||
# Timestamps
|
||||
created_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc))
|
||||
updated_at = Column(
|
||||
DateTime(timezone=True),
|
||||
default=lambda: datetime.now(timezone.utc),
|
||||
onupdate=lambda: datetime.now(timezone.utc)
|
||||
)
|
||||
|
||||
__table_args__ = (
|
||||
Index('idx_tenant_poi_location', 'latitude', 'longitude'),
|
||||
Index('idx_tenant_poi_refresh', 'next_refresh_date'),
|
||||
Index('idx_tenant_poi_status', 'detection_status'),
|
||||
)
|
||||
|
||||
def to_dict(self):
|
||||
"""Convert to dictionary for API responses"""
|
||||
return {
|
||||
"id": str(self.id),
|
||||
"tenant_id": str(self.tenant_id),
|
||||
"location": {
|
||||
"latitude": self.latitude,
|
||||
"longitude": self.longitude
|
||||
},
|
||||
"poi_detection_results": self.poi_detection_results,
|
||||
"ml_features": self.ml_features,
|
||||
"total_pois_detected": self.total_pois_detected,
|
||||
"high_impact_categories": self.high_impact_categories,
|
||||
"relevant_categories": self.relevant_categories,
|
||||
"detection_timestamp": self.detection_timestamp.isoformat() if self.detection_timestamp else None,
|
||||
"detection_source": self.detection_source,
|
||||
"detection_status": self.detection_status,
|
||||
"detection_error": self.detection_error,
|
||||
"next_refresh_date": self.next_refresh_date.isoformat() if self.next_refresh_date else None,
|
||||
"last_refreshed_at": self.last_refreshed_at.isoformat() if self.last_refreshed_at else None,
|
||||
"created_at": self.created_at.isoformat() if self.created_at else None,
|
||||
"updated_at": self.updated_at.isoformat() if self.updated_at else None
|
||||
}
|
||||
|
||||
def is_stale(self) -> bool:
|
||||
"""Check if POI data needs refresh"""
|
||||
if not self.next_refresh_date:
|
||||
return True
|
||||
return datetime.now(timezone.utc) > self.next_refresh_date
|
||||
|
||||
def calculate_next_refresh(self) -> datetime:
|
||||
"""Calculate next refresh date based on interval"""
|
||||
return datetime.now(timezone.utc) + timedelta(days=self.refresh_interval_days)
|
||||
|
||||
def mark_refreshed(self):
|
||||
"""Mark as refreshed and calculate next refresh date"""
|
||||
self.last_refreshed_at = datetime.now(timezone.utc)
|
||||
self.next_refresh_date = self.calculate_next_refresh()
|
||||
Reference in New Issue
Block a user