Add POI feature and imporve the overall backend implementation
This commit is contained in:
123
services/external/app/models/poi_context.py
vendored
Normal file
123
services/external/app/models/poi_context.py
vendored
Normal file
@@ -0,0 +1,123 @@
|
||||
"""
|
||||
POI Context Model
|
||||
|
||||
Stores Point of Interest detection results and ML features for bakery locations.
|
||||
Used for location-based demand forecasting with contextual features.
|
||||
"""
|
||||
|
||||
from sqlalchemy import Column, String, DateTime, Float, Index, Integer
|
||||
from sqlalchemy.dialects.postgresql import UUID, JSONB
|
||||
from datetime import datetime, timezone, timedelta
|
||||
import uuid
|
||||
|
||||
from app.core.database import Base
|
||||
|
||||
|
||||
class TenantPOIContext(Base):
|
||||
"""
|
||||
POI (Point of Interest) context for bakery location.
|
||||
|
||||
Stores detected POIs around bakery and calculated ML features
|
||||
for demand forecasting with location-specific context.
|
||||
"""
|
||||
|
||||
__tablename__ = "tenant_poi_contexts"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
tenant_id = Column(UUID(as_uuid=True), nullable=False, unique=True, index=True)
|
||||
|
||||
# Location (denormalized for quick reference and spatial queries)
|
||||
latitude = Column(Float, nullable=False)
|
||||
longitude = Column(Float, nullable=False)
|
||||
|
||||
# POI Detection Results (full raw data)
|
||||
# Structure: {
|
||||
# "schools": {
|
||||
# "pois": [{"osm_id": "...", "name": "...", "lat": ..., "lon": ...}],
|
||||
# "features": {"proximity_score": 3.45, "count_0_100m": 2, ...},
|
||||
# "count": 5
|
||||
# },
|
||||
# "offices": {...},
|
||||
# ...
|
||||
# }
|
||||
poi_detection_results = Column(JSONB, nullable=False, default=dict)
|
||||
|
||||
# ML Features (flat structure for easy model ingestion)
|
||||
# Structure: {
|
||||
# "poi_schools_proximity_score": 3.45,
|
||||
# "poi_schools_weighted_proximity_score": 5.18,
|
||||
# "poi_schools_count_0_100m": 2,
|
||||
# "poi_offices_proximity_score": 1.23,
|
||||
# ...
|
||||
# }
|
||||
ml_features = Column(JSONB, nullable=False, default=dict)
|
||||
|
||||
# Summary Statistics
|
||||
total_pois_detected = Column(Integer, default=0)
|
||||
high_impact_categories = Column(JSONB, default=list) # Categories with significant POI presence
|
||||
relevant_categories = Column(JSONB, default=list) # Categories that passed relevance thresholds
|
||||
|
||||
# Detection Metadata
|
||||
detection_timestamp = Column(DateTime(timezone=True), nullable=False)
|
||||
detection_source = Column(String(50), default="overpass_api")
|
||||
detection_status = Column(String(20), default="completed") # completed, failed, partial
|
||||
detection_error = Column(String(500), nullable=True) # Error message if detection failed
|
||||
|
||||
# Data Freshness Strategy
|
||||
# POIs don't change frequently, refresh every 6 months
|
||||
next_refresh_date = Column(DateTime(timezone=True), nullable=True)
|
||||
refresh_interval_days = Column(Integer, default=180) # 6 months default
|
||||
last_refreshed_at = Column(DateTime(timezone=True), nullable=True)
|
||||
|
||||
# Timestamps
|
||||
created_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc))
|
||||
updated_at = Column(
|
||||
DateTime(timezone=True),
|
||||
default=lambda: datetime.now(timezone.utc),
|
||||
onupdate=lambda: datetime.now(timezone.utc)
|
||||
)
|
||||
|
||||
__table_args__ = (
|
||||
Index('idx_tenant_poi_location', 'latitude', 'longitude'),
|
||||
Index('idx_tenant_poi_refresh', 'next_refresh_date'),
|
||||
Index('idx_tenant_poi_status', 'detection_status'),
|
||||
)
|
||||
|
||||
def to_dict(self):
|
||||
"""Convert to dictionary for API responses"""
|
||||
return {
|
||||
"id": str(self.id),
|
||||
"tenant_id": str(self.tenant_id),
|
||||
"location": {
|
||||
"latitude": self.latitude,
|
||||
"longitude": self.longitude
|
||||
},
|
||||
"poi_detection_results": self.poi_detection_results,
|
||||
"ml_features": self.ml_features,
|
||||
"total_pois_detected": self.total_pois_detected,
|
||||
"high_impact_categories": self.high_impact_categories,
|
||||
"relevant_categories": self.relevant_categories,
|
||||
"detection_timestamp": self.detection_timestamp.isoformat() if self.detection_timestamp else None,
|
||||
"detection_source": self.detection_source,
|
||||
"detection_status": self.detection_status,
|
||||
"detection_error": self.detection_error,
|
||||
"next_refresh_date": self.next_refresh_date.isoformat() if self.next_refresh_date else None,
|
||||
"last_refreshed_at": self.last_refreshed_at.isoformat() if self.last_refreshed_at else None,
|
||||
"created_at": self.created_at.isoformat() if self.created_at else None,
|
||||
"updated_at": self.updated_at.isoformat() if self.updated_at else None
|
||||
}
|
||||
|
||||
def is_stale(self) -> bool:
|
||||
"""Check if POI data needs refresh"""
|
||||
if not self.next_refresh_date:
|
||||
return True
|
||||
return datetime.now(timezone.utc) > self.next_refresh_date
|
||||
|
||||
def calculate_next_refresh(self) -> datetime:
|
||||
"""Calculate next refresh date based on interval"""
|
||||
return datetime.now(timezone.utc) + timedelta(days=self.refresh_interval_days)
|
||||
|
||||
def mark_refreshed(self):
|
||||
"""Mark as refreshed and calculate next refresh date"""
|
||||
self.last_refreshed_at = datetime.now(timezone.utc)
|
||||
self.next_refresh_date = self.calculate_next_refresh()
|
||||
154
services/external/app/models/poi_refresh_job.py
vendored
Normal file
154
services/external/app/models/poi_refresh_job.py
vendored
Normal file
@@ -0,0 +1,154 @@
|
||||
"""
|
||||
POI Refresh Job Model
|
||||
|
||||
Tracks background jobs for periodic POI context refresh.
|
||||
"""
|
||||
|
||||
from sqlalchemy import Column, String, DateTime, Integer, Boolean, Text, Float
|
||||
from sqlalchemy.dialects.postgresql import UUID, JSONB
|
||||
from datetime import datetime, timezone
|
||||
import uuid
|
||||
|
||||
from app.core.database import Base
|
||||
|
||||
|
||||
class POIRefreshJob(Base):
|
||||
"""
|
||||
POI Refresh Background Job Model
|
||||
|
||||
Tracks periodic POI context refresh jobs for all tenants.
|
||||
Jobs run on a configurable schedule (default: 180 days).
|
||||
"""
|
||||
|
||||
__tablename__ = "poi_refresh_jobs"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
|
||||
# Job scheduling
|
||||
scheduled_at = Column(
|
||||
DateTime(timezone=True),
|
||||
nullable=False,
|
||||
index=True,
|
||||
comment="When this job was scheduled"
|
||||
)
|
||||
started_at = Column(
|
||||
DateTime(timezone=True),
|
||||
nullable=True,
|
||||
comment="When job execution started"
|
||||
)
|
||||
completed_at = Column(
|
||||
DateTime(timezone=True),
|
||||
nullable=True,
|
||||
comment="When job execution completed"
|
||||
)
|
||||
|
||||
# Job status
|
||||
status = Column(
|
||||
String(50),
|
||||
nullable=False,
|
||||
default="pending",
|
||||
index=True,
|
||||
comment="Job status: pending, running, completed, failed"
|
||||
)
|
||||
|
||||
# Job execution details
|
||||
attempt_count = Column(
|
||||
Integer,
|
||||
nullable=False,
|
||||
default=0,
|
||||
comment="Number of execution attempts"
|
||||
)
|
||||
max_attempts = Column(
|
||||
Integer,
|
||||
nullable=False,
|
||||
default=3,
|
||||
comment="Maximum number of retry attempts"
|
||||
)
|
||||
|
||||
# Location data (cached for job execution)
|
||||
latitude = Column(
|
||||
Float,
|
||||
nullable=False,
|
||||
comment="Bakery latitude for POI detection"
|
||||
)
|
||||
longitude = Column(
|
||||
Float,
|
||||
nullable=False,
|
||||
comment="Bakery longitude for POI detection"
|
||||
)
|
||||
|
||||
# Results
|
||||
pois_detected = Column(
|
||||
Integer,
|
||||
nullable=True,
|
||||
comment="Number of POIs detected in this refresh"
|
||||
)
|
||||
changes_detected = Column(
|
||||
Boolean,
|
||||
default=False,
|
||||
comment="Whether significant changes were detected"
|
||||
)
|
||||
change_summary = Column(
|
||||
JSONB,
|
||||
nullable=True,
|
||||
comment="Summary of changes detected"
|
||||
)
|
||||
|
||||
# Error handling
|
||||
error_message = Column(
|
||||
Text,
|
||||
nullable=True,
|
||||
comment="Error message if job failed"
|
||||
)
|
||||
error_details = Column(
|
||||
JSONB,
|
||||
nullable=True,
|
||||
comment="Detailed error information"
|
||||
)
|
||||
|
||||
# Next execution
|
||||
next_scheduled_at = Column(
|
||||
DateTime(timezone=True),
|
||||
nullable=True,
|
||||
index=True,
|
||||
comment="When next refresh should be scheduled"
|
||||
)
|
||||
|
||||
# Metadata
|
||||
created_at = Column(
|
||||
DateTime(timezone=True),
|
||||
nullable=False,
|
||||
default=lambda: datetime.now(timezone.utc)
|
||||
)
|
||||
updated_at = Column(
|
||||
DateTime(timezone=True),
|
||||
nullable=False,
|
||||
default=lambda: datetime.now(timezone.utc),
|
||||
onupdate=lambda: datetime.now(timezone.utc)
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return (
|
||||
f"<POIRefreshJob(id={self.id}, tenant_id={self.tenant_id}, "
|
||||
f"status={self.status}, scheduled_at={self.scheduled_at})>"
|
||||
)
|
||||
|
||||
@property
|
||||
def is_overdue(self) -> bool:
|
||||
"""Check if job is overdue for execution"""
|
||||
if self.status in ("completed", "running"):
|
||||
return False
|
||||
return datetime.now(timezone.utc) > self.scheduled_at
|
||||
|
||||
@property
|
||||
def can_retry(self) -> bool:
|
||||
"""Check if job can be retried"""
|
||||
return self.attempt_count < self.max_attempts
|
||||
|
||||
@property
|
||||
def duration_seconds(self) -> float | None:
|
||||
"""Calculate job duration in seconds"""
|
||||
if self.started_at and self.completed_at:
|
||||
return (self.completed_at - self.started_at).total_seconds()
|
||||
return None
|
||||
Reference in New Issue
Block a user