Fix new Frontend 12

This commit is contained in:
Urtzi Alfaro
2025-08-04 18:21:42 +02:00
parent d4c276c888
commit 35b02ca364
6 changed files with 528 additions and 174 deletions

View File

@@ -75,19 +75,13 @@ class TrainingDataOrchestrator:
try:
await publish_job_progress(job_id, tenant_id, 5, "Extrayendo datos de ventas",
step_details="Conectando con servicio de datos")
sales_data = await self.data_client.fetch_sales_data(tenant_id)
# Step 1: Extract and validate sales data date range
await publish_job_progress(job_id, tenant_id, 10, "Validando fechas de datos de venta",
step_details="Aplicando restricciones de fuentes de datos")
sales_date_range = self._extract_sales_date_range(sales_data)
logger.info(f"Sales data range detected: {sales_date_range.start} to {sales_date_range.end}")
# Step 2: Apply date alignment across all data sources
await publish_job_progress(job_id, tenant_id, 15, "Alinear el rango de fechas",
step_details="Aplicar la alineación de fechas en todas las fuentes de datos")
aligned_range = self.date_alignment_service.validate_and_align_dates(
user_sales_range=sales_date_range,
requested_start=requested_start,
@@ -99,21 +93,15 @@ class TrainingDataOrchestrator:
logger.info(f"Applied constraints: {aligned_range.constraints}")
# Step 3: Filter sales data to aligned date range
await publish_job_progress(job_id, tenant_id, 20, "Alinear el rango de las ventas",
step_details="Aplicar la alineación de fechas de las ventas")
filtered_sales = self._filter_sales_data(sales_data, aligned_range)
# Step 4: Collect external data sources concurrently
logger.info("Collecting external data sources...")
await publish_job_progress(job_id, tenant_id, 25, "Recopilación de fuentes de datos externas",
step_details="Recopilación de fuentes de datos externas")
weather_data, traffic_data = await self._collect_external_data(
aligned_range, bakery_location, tenant_id
)
# Step 5: Validate data quality
await publish_job_progress(job_id, tenant_id, 30, "Validando la calidad de los datos",
step_details="Validando la calidad de los datos")
data_quality_results = self._validate_data_sources(
filtered_sales, weather_data, traffic_data, aligned_range
)
@@ -140,8 +128,6 @@ class TrainingDataOrchestrator:
)
# Step 7: Final validation
await publish_job_progress(job_id, tenant_id, 35, "Validancion final de los datos",
step_details="Validancion final de los datos")
final_validation = self.validate_training_data_quality(training_dataset)
training_dataset.metadata["final_validation"] = final_validation

View File

@@ -78,7 +78,6 @@ class TrainingService:
# Step 1: Prepare training dataset with date alignment and orchestration
logger.info("Step 1: Preparing and aligning training data")
await publish_job_progress(job_id, tenant_id, 0, "Extrayendo datos de ventas")
training_dataset = await self.orchestrator.prepare_training_data(
tenant_id=tenant_id,
bakery_location=bakery_location,
@@ -86,10 +85,10 @@ class TrainingService:
requested_end=requested_end,
job_id=job_id
)
await publish_job_progress(job_id, tenant_id, 10, "data_validation", estimated_time_remaining_minutes=8)
# Step 2: Execute ML training pipeline
logger.info("Step 2: Starting ML training pipeline")
await publish_job_progress(job_id, tenant_id, 35, "Starting ML training pipeline")
training_results = await self.trainer.train_tenant_models(
tenant_id=tenant_id,
training_dataset=training_dataset,
@@ -117,7 +116,7 @@ class TrainingService:
}
logger.info(f"Training job {job_id} completed successfully")
await publish_job_completed(job_id, tenant_id, final_result);
await publish_job_completed(job_id, tenant_id, final_result)
return TrainingService.create_detailed_training_response(final_result)
except Exception as e: