This commit is contained in:
2026-05-10 22:49:07 +03:30
parent 2d1f7da89e
commit 2a6321a263
15 changed files with 2667 additions and 162 deletions
+204
View File
@@ -201,6 +201,210 @@ class RemoteSensingApiTests(TestCase):
self.assertEqual(payload["run"]["stage"], "completed")
self.assertEqual(payload["run"]["selected_features"], ["ndvi"])
@patch("location_data.views._get_remote_sensing_async_result")
def test_run_status_endpoint_returns_detailed_task_progress(self, mock_async_result):
mock_async_result.return_value = SimpleNamespace(
state="STARTED",
result=None,
info={"message": "fetching_remote_metrics"},
ready=lambda: False,
successful=lambda: False,
failed=lambda: False,
)
task_id = "e723ba3e-c53c-401b-b3a0-5f7013c7b401"
run = RemoteSensingRun.objects.create(
soil_location=self.location,
block_subdivision=self.subdivision,
block_code="",
chunk_size_sqm=900,
temporal_start=self.temporal_start,
temporal_end=self.temporal_end,
status=RemoteSensingRun.STATUS_RUNNING,
metadata={
"task_id": task_id,
"stage": "fetching_remote_metrics",
"selected_features": ["ndvi", "ndwi"],
"timestamps": {
"queued_at": "2026-05-10T08:00:00Z",
"started_at": "2026-05-10T08:00:03Z",
"fetching_remote_metrics_at": "2026-05-10T08:00:12Z",
},
"stage_details": {
"fetching_remote_metrics": {
"requested_cell_count": 2,
"metric_progress": {
"total_metrics": 2,
"completed_metric_count": 1,
"active_metric": "ndwi",
"completed_metrics": ["ndvi"],
"failed_metrics": [],
"states": [
{"metric": "ndvi", "status": "completed"},
{"metric": "ndwi", "status": "running"},
],
},
}
},
},
)
response = self.client.get(f"/remote-sensing/runs/{task_id}/status/")
self.assertEqual(response.status_code, 200)
payload = response.json()["data"]
self.assertEqual(payload["status"], "running")
self.assertEqual(payload["task"]["current_stage"], "fetching_remote_metrics")
self.assertEqual(payload["task"]["metric_progress"]["active_metric"], "ndwi")
self.assertEqual(payload["task"]["stages"][-1]["status"], "running")
self.assertEqual(payload["task"]["celery"]["state"], "STARTED")
self.assertEqual(payload["task"]["celery"]["info"]["message"], "fetching_remote_metrics")
self.assertEqual(payload["run"]["id"], run.id)
@patch("location_data.views._get_remote_sensing_async_result")
def test_run_status_endpoint_returns_retrying_status_when_celery_is_retrying(self, mock_async_result):
mock_async_result.return_value = SimpleNamespace(
state="RETRY",
result="temporary openEO timeout",
info="temporary openEO timeout",
ready=lambda: False,
successful=lambda: False,
failed=lambda: False,
)
task_id = "e723ba3e-c53c-401b-b3a0-5f7013c7b401"
run = RemoteSensingRun.objects.create(
soil_location=self.location,
block_subdivision=self.subdivision,
block_code="",
chunk_size_sqm=900,
temporal_start=self.temporal_start,
temporal_end=self.temporal_end,
status=RemoteSensingRun.STATUS_RUNNING,
metadata={
"task_id": task_id,
"stage": "retrying",
"status_label": "retrying",
"failed_stage": "observations_persisted",
"timestamps": {
"failed_at": "2026-05-10T08:10:00Z",
"retrying_at": "2026-05-10T08:11:00Z",
},
"stage_details": {
"retrying": {
"retry_count": 2,
"retry_delay_seconds": 120,
"last_error": "temporary openEO timeout",
"failed_stage": "observations_persisted",
"failed_stage_details": {"created_count": 12, "updated_count": 0},
}
},
},
)
response = self.client.get(f"/remote-sensing/runs/{task_id}/status/")
self.assertEqual(response.status_code, 200)
payload = response.json()["data"]
self.assertEqual(payload["status"], "retrying")
self.assertEqual(payload["run"]["pipeline_status"], "retrying")
self.assertEqual(payload["task"]["current_stage"], "retrying")
self.assertEqual(payload["task"]["retry"]["retry_count"], 2)
self.assertEqual(payload["task"]["last_error"], "temporary openEO timeout")
self.assertNotIn("failure_reason", payload["task"])
self.assertEqual(payload["task"]["celery"]["state"], "RETRY")
@patch("location_data.views._get_remote_sensing_async_result")
def test_run_status_endpoint_overrides_stale_failed_db_state_when_celery_is_retrying(self, mock_async_result):
mock_async_result.return_value = SimpleNamespace(
state="RETRY",
result="temporary openEO timeout",
info="temporary openEO timeout",
ready=lambda: False,
successful=lambda: False,
failed=lambda: False,
)
task_id = "e723ba3e-c53c-401b-b3a0-5f7013c7b401"
run = RemoteSensingRun.objects.create(
soil_location=self.location,
block_subdivision=self.subdivision,
block_code="",
chunk_size_sqm=900,
temporal_start=self.temporal_start,
temporal_end=self.temporal_end,
status=RemoteSensingRun.STATUS_FAILURE,
error_message="temporary openEO timeout",
metadata={
"task_id": task_id,
"stage": "failed",
"status_label": "failed",
"failed_stage": "observations_persisted",
"failure_reason": "temporary openEO timeout",
"timestamps": {"failed_at": "2026-05-10T08:10:00Z"},
"stage_details": {
"failed": {
"failed_stage": "observations_persisted",
"error_message": "temporary openEO timeout",
"failed_stage_details": {"created_count": 12, "updated_count": 0},
}
},
},
)
response = self.client.get(f"/remote-sensing/runs/{task_id}/status/")
self.assertEqual(response.status_code, 200)
payload = response.json()["data"]
self.assertEqual(payload["status"], "retrying")
self.assertEqual(payload["run"]["status"], RemoteSensingRun.STATUS_FAILURE)
self.assertEqual(payload["run"]["status_label"], "retrying")
self.assertEqual(payload["run"]["pipeline_status"], "retrying")
self.assertEqual(payload["run"]["stage"], "retrying")
self.assertEqual(payload["task"]["current_stage"], "retrying")
self.assertEqual(payload["task"]["retry"]["failed_stage"], "observations_persisted")
self.assertEqual(payload["task"]["stages"][-1]["name"], "retrying")
self.assertEqual(payload["task"]["stages"][-1]["status"], "running")
self.assertNotIn("failure_reason", payload["task"])
self.assertEqual(payload["task"]["celery"]["state"], "RETRY")
self.assertEqual(payload["run"]["id"], run.id)
def test_run_status_endpoint_returns_failed_task_details(self):
task_id = "e723ba3e-c53c-401b-b3a0-5f7013c7b401"
run = RemoteSensingRun.objects.create(
soil_location=self.location,
block_subdivision=self.subdivision,
block_code="",
chunk_size_sqm=900,
temporal_start=self.temporal_start,
temporal_end=self.temporal_end,
status=RemoteSensingRun.STATUS_FAILURE,
error_message="openEO timeout",
metadata={
"task_id": task_id,
"stage": "failed",
"failed_stage": "observations_persisted",
"failure_reason": "openEO timeout",
"timestamps": {"failed_at": "2026-05-10T08:10:00Z"},
"stage_details": {
"failed": {
"failed_stage": "observations_persisted",
"error_message": "openEO timeout",
"failed_stage_details": {"created_count": 12, "updated_count": 0},
}
},
},
)
response = self.client.get(f"/remote-sensing/runs/{task_id}/status/")
self.assertEqual(response.status_code, 200)
payload = response.json()["data"]
self.assertEqual(payload["status"], "failed")
self.assertEqual(payload["task"]["current_stage"], "failed")
self.assertEqual(payload["task"]["failed_stage"], "observations_persisted")
self.assertEqual(payload["task"]["failure_reason"], "openEO timeout")
self.assertEqual(payload["task"]["current_stage_details"]["failed_stage"], "observations_persisted")
self.assertEqual(payload["task"]["stages"][-1]["status"], "failed")
self.assertEqual(payload["run"]["id"], run.id)
def test_run_result_endpoint_returns_paginated_assignments(self):
run = RemoteSensingRun.objects.create(
soil_location=self.location,