2026-05-10 02:02:48 +03:30
|
|
|
from datetime import date, timedelta
|
2026-05-09 16:55:06 +03:30
|
|
|
from types import SimpleNamespace
|
|
|
|
|
from unittest.mock import patch
|
|
|
|
|
|
|
|
|
|
from django.test import TestCase, override_settings
|
2026-05-10 02:02:48 +03:30
|
|
|
from django.utils import timezone
|
2026-05-09 16:55:06 +03:30
|
|
|
from rest_framework.test import APIClient
|
|
|
|
|
|
2026-05-10 02:02:48 +03:30
|
|
|
from farm_data.models import SensorData
|
|
|
|
|
from location_data.data_driven_subdivision import DEFAULT_CLUSTER_FEATURES
|
2026-05-09 16:55:06 +03:30
|
|
|
from location_data.models import (
|
|
|
|
|
AnalysisGridCell,
|
|
|
|
|
AnalysisGridObservation,
|
2026-05-11 04:38:44 +03:30
|
|
|
RemoteSensingClusterBlock,
|
2026-05-09 16:55:06 +03:30
|
|
|
BlockSubdivision,
|
|
|
|
|
RemoteSensingClusterAssignment,
|
|
|
|
|
RemoteSensingRun,
|
|
|
|
|
RemoteSensingSubdivisionResult,
|
|
|
|
|
SoilLocation,
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@override_settings(ROOT_URLCONF="location_data.urls")
|
|
|
|
|
class RemoteSensingApiTests(TestCase):
|
|
|
|
|
def setUp(self):
|
|
|
|
|
self.client = APIClient()
|
|
|
|
|
self.boundary = {
|
|
|
|
|
"type": "Polygon",
|
|
|
|
|
"coordinates": [
|
|
|
|
|
[
|
|
|
|
|
[51.3890, 35.6890],
|
|
|
|
|
[51.3900, 35.6890],
|
|
|
|
|
[51.3900, 35.6900],
|
|
|
|
|
[51.3890, 35.6900],
|
|
|
|
|
[51.3890, 35.6890],
|
|
|
|
|
]
|
|
|
|
|
],
|
|
|
|
|
}
|
|
|
|
|
self.location = SoilLocation.objects.create(
|
|
|
|
|
latitude="35.689200",
|
|
|
|
|
longitude="51.389000",
|
|
|
|
|
farm_boundary=self.boundary,
|
|
|
|
|
)
|
|
|
|
|
self.location.set_input_block_count(1)
|
|
|
|
|
self.location.save(update_fields=["input_block_count", "block_layout", "updated_at"])
|
2026-05-10 02:02:48 +03:30
|
|
|
self.farm = SensorData.objects.create(
|
|
|
|
|
farm_uuid="11111111-1111-1111-1111-111111111111",
|
|
|
|
|
center_location=self.location,
|
|
|
|
|
payload={},
|
|
|
|
|
)
|
|
|
|
|
self.temporal_end = timezone.localdate() - timedelta(days=1)
|
|
|
|
|
self.temporal_start = self.temporal_end - timedelta(days=30)
|
2026-05-09 16:55:06 +03:30
|
|
|
self.subdivision = BlockSubdivision.objects.create(
|
|
|
|
|
soil_location=self.location,
|
|
|
|
|
block_code="block-1",
|
|
|
|
|
source_boundary=self.boundary,
|
|
|
|
|
chunk_size_sqm=900,
|
|
|
|
|
status="created",
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
def test_post_remote_sensing_returns_404_when_location_missing(self):
|
|
|
|
|
response = self.client.post(
|
|
|
|
|
"/remote-sensing/",
|
|
|
|
|
data={
|
2026-05-10 02:02:48 +03:30
|
|
|
"farm_uuid": "22222222-2222-2222-2222-222222222222",
|
2026-05-09 16:55:06 +03:30
|
|
|
},
|
|
|
|
|
format="json",
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
self.assertEqual(response.status_code, 404)
|
|
|
|
|
self.assertEqual(response.json()["msg"], "location پیدا نشد.")
|
|
|
|
|
|
|
|
|
|
@patch("location_data.views.run_remote_sensing_analysis_task.delay")
|
|
|
|
|
def test_post_remote_sensing_enqueues_task_and_returns_processing(self, mock_delay):
|
2026-05-10 02:02:48 +03:30
|
|
|
mock_delay.return_value = SimpleNamespace(id="e723ba3e-c53c-401b-b3a0-5f7013c7b401")
|
2026-05-09 16:55:06 +03:30
|
|
|
|
|
|
|
|
response = self.client.post(
|
|
|
|
|
"/remote-sensing/",
|
|
|
|
|
data={
|
2026-05-10 02:02:48 +03:30
|
|
|
"farm_uuid": str(self.farm.farm_uuid),
|
2026-05-09 16:55:06 +03:30
|
|
|
"force_refresh": False,
|
|
|
|
|
},
|
|
|
|
|
format="json",
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
self.assertEqual(response.status_code, 202)
|
|
|
|
|
payload = response.json()["data"]
|
|
|
|
|
self.assertEqual(payload["status"], "processing")
|
|
|
|
|
self.assertEqual(payload["source"], "processing")
|
2026-05-10 02:02:48 +03:30
|
|
|
self.assertEqual(payload["task_id"], "e723ba3e-c53c-401b-b3a0-5f7013c7b401")
|
|
|
|
|
self.assertEqual(payload["block_code"], "")
|
2026-05-09 16:55:06 +03:30
|
|
|
self.assertEqual(payload["summary"]["cell_count"], 0)
|
|
|
|
|
run = RemoteSensingRun.objects.get(id=payload["run"]["id"])
|
2026-05-10 02:02:48 +03:30
|
|
|
self.assertEqual(run.block_code, "")
|
|
|
|
|
self.assertEqual(run.temporal_start, self.temporal_start)
|
|
|
|
|
self.assertEqual(run.temporal_end, self.temporal_end)
|
2026-05-09 16:55:06 +03:30
|
|
|
self.assertEqual(run.status, RemoteSensingRun.STATUS_PENDING)
|
|
|
|
|
self.assertEqual(run.metadata["stage"], "queued")
|
2026-05-10 02:02:48 +03:30
|
|
|
self.assertEqual(run.metadata["selected_features"], DEFAULT_CLUSTER_FEATURES)
|
2026-05-09 16:55:06 +03:30
|
|
|
mock_delay.assert_called_once()
|
|
|
|
|
|
|
|
|
|
def test_get_remote_sensing_returns_processing_when_run_exists_without_results(self):
|
|
|
|
|
RemoteSensingRun.objects.create(
|
|
|
|
|
soil_location=self.location,
|
|
|
|
|
block_subdivision=self.subdivision,
|
2026-05-10 02:02:48 +03:30
|
|
|
block_code="",
|
2026-05-09 16:55:06 +03:30
|
|
|
chunk_size_sqm=900,
|
2026-05-10 02:02:48 +03:30
|
|
|
temporal_start=self.temporal_start,
|
|
|
|
|
temporal_end=self.temporal_end,
|
2026-05-09 16:55:06 +03:30
|
|
|
status=RemoteSensingRun.STATUS_RUNNING,
|
2026-05-10 02:02:48 +03:30
|
|
|
metadata={"task_id": "e723ba3e-c53c-401b-b3a0-5f7013c7b401"},
|
2026-05-09 16:55:06 +03:30
|
|
|
)
|
|
|
|
|
|
|
|
|
|
response = self.client.get(
|
|
|
|
|
"/remote-sensing/",
|
|
|
|
|
data={
|
2026-05-10 02:02:48 +03:30
|
|
|
"farm_uuid": str(self.farm.farm_uuid),
|
2026-05-09 16:55:06 +03:30
|
|
|
},
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
self.assertEqual(response.status_code, 200)
|
|
|
|
|
payload = response.json()["data"]
|
|
|
|
|
self.assertEqual(payload["status"], "processing")
|
|
|
|
|
self.assertEqual(payload["source"], "processing")
|
|
|
|
|
self.assertEqual(payload["cells"], [])
|
|
|
|
|
self.assertEqual(payload["run"]["status"], RemoteSensingRun.STATUS_RUNNING)
|
|
|
|
|
|
|
|
|
|
def test_get_remote_sensing_returns_cached_results(self):
|
|
|
|
|
run = RemoteSensingRun.objects.create(
|
|
|
|
|
soil_location=self.location,
|
|
|
|
|
block_subdivision=self.subdivision,
|
2026-05-10 02:02:48 +03:30
|
|
|
block_code="",
|
2026-05-09 16:55:06 +03:30
|
|
|
chunk_size_sqm=900,
|
2026-05-10 02:02:48 +03:30
|
|
|
temporal_start=self.temporal_start,
|
|
|
|
|
temporal_end=self.temporal_end,
|
2026-05-09 16:55:06 +03:30
|
|
|
status=RemoteSensingRun.STATUS_SUCCESS,
|
|
|
|
|
)
|
|
|
|
|
cell = AnalysisGridCell.objects.create(
|
|
|
|
|
soil_location=self.location,
|
|
|
|
|
block_subdivision=self.subdivision,
|
2026-05-10 02:02:48 +03:30
|
|
|
block_code="",
|
2026-05-09 16:55:06 +03:30
|
|
|
cell_code="cell-1",
|
|
|
|
|
chunk_size_sqm=900,
|
|
|
|
|
geometry=self.boundary,
|
|
|
|
|
centroid_lat="35.689500",
|
|
|
|
|
centroid_lon="51.389500",
|
|
|
|
|
)
|
|
|
|
|
AnalysisGridObservation.objects.create(
|
|
|
|
|
cell=cell,
|
|
|
|
|
run=run,
|
2026-05-10 02:02:48 +03:30
|
|
|
temporal_start=self.temporal_start,
|
|
|
|
|
temporal_end=self.temporal_end,
|
2026-05-09 16:55:06 +03:30
|
|
|
ndvi=0.61,
|
|
|
|
|
ndwi=0.22,
|
|
|
|
|
soil_vv=0.13,
|
|
|
|
|
soil_vv_db=-8.860566,
|
|
|
|
|
dem_m=1550.0,
|
|
|
|
|
slope_deg=4.2,
|
|
|
|
|
metadata={"backend_name": "openeo"},
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
response = self.client.get(
|
|
|
|
|
"/remote-sensing/",
|
|
|
|
|
data={
|
2026-05-10 02:02:48 +03:30
|
|
|
"farm_uuid": str(self.farm.farm_uuid),
|
2026-05-09 16:55:06 +03:30
|
|
|
},
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
self.assertEqual(response.status_code, 200)
|
|
|
|
|
payload = response.json()["data"]
|
|
|
|
|
self.assertEqual(payload["status"], "success")
|
|
|
|
|
self.assertEqual(payload["source"], "database")
|
|
|
|
|
self.assertEqual(payload["summary"]["cell_count"], 1)
|
|
|
|
|
self.assertEqual(payload["summary"]["ndvi_mean"], 0.61)
|
|
|
|
|
self.assertEqual(payload["summary"]["soil_vv_db_mean"], -8.860566)
|
|
|
|
|
self.assertEqual(len(payload["cells"]), 1)
|
|
|
|
|
self.assertEqual(payload["cells"][0]["cell_code"], "cell-1")
|
|
|
|
|
|
|
|
|
|
def test_run_status_endpoint_returns_normalized_status(self):
|
|
|
|
|
run = RemoteSensingRun.objects.create(
|
|
|
|
|
soil_location=self.location,
|
|
|
|
|
block_subdivision=self.subdivision,
|
2026-05-10 02:02:48 +03:30
|
|
|
block_code="",
|
2026-05-09 16:55:06 +03:30
|
|
|
chunk_size_sqm=900,
|
2026-05-10 02:02:48 +03:30
|
|
|
temporal_start=self.temporal_start,
|
|
|
|
|
temporal_end=self.temporal_end,
|
2026-05-09 16:55:06 +03:30
|
|
|
status=RemoteSensingRun.STATUS_SUCCESS,
|
|
|
|
|
metadata={"stage": "completed", "selected_features": ["ndvi"]},
|
|
|
|
|
)
|
|
|
|
|
|
2026-05-10 02:02:48 +03:30
|
|
|
task_id = "e723ba3e-c53c-401b-b3a0-5f7013c7b401"
|
|
|
|
|
run.metadata = {**run.metadata, "task_id": task_id}
|
|
|
|
|
run.save(update_fields=["metadata", "updated_at"])
|
|
|
|
|
|
|
|
|
|
response = self.client.get(f"/remote-sensing/runs/{task_id}/status/")
|
2026-05-09 16:55:06 +03:30
|
|
|
|
|
|
|
|
self.assertEqual(response.status_code, 200)
|
|
|
|
|
payload = response.json()["data"]
|
|
|
|
|
self.assertEqual(payload["status"], "completed")
|
|
|
|
|
self.assertEqual(payload["run"]["pipeline_status"], "completed")
|
|
|
|
|
self.assertEqual(payload["run"]["stage"], "completed")
|
|
|
|
|
self.assertEqual(payload["run"]["selected_features"], ["ndvi"])
|
|
|
|
|
|
2026-05-10 22:49:07 +03:30
|
|
|
@patch("location_data.views._get_remote_sensing_async_result")
|
|
|
|
|
def test_run_status_endpoint_returns_detailed_task_progress(self, mock_async_result):
|
|
|
|
|
mock_async_result.return_value = SimpleNamespace(
|
|
|
|
|
state="STARTED",
|
|
|
|
|
result=None,
|
|
|
|
|
info={"message": "fetching_remote_metrics"},
|
|
|
|
|
ready=lambda: False,
|
|
|
|
|
successful=lambda: False,
|
|
|
|
|
failed=lambda: False,
|
|
|
|
|
)
|
|
|
|
|
task_id = "e723ba3e-c53c-401b-b3a0-5f7013c7b401"
|
|
|
|
|
run = RemoteSensingRun.objects.create(
|
|
|
|
|
soil_location=self.location,
|
|
|
|
|
block_subdivision=self.subdivision,
|
|
|
|
|
block_code="",
|
|
|
|
|
chunk_size_sqm=900,
|
|
|
|
|
temporal_start=self.temporal_start,
|
|
|
|
|
temporal_end=self.temporal_end,
|
|
|
|
|
status=RemoteSensingRun.STATUS_RUNNING,
|
|
|
|
|
metadata={
|
|
|
|
|
"task_id": task_id,
|
|
|
|
|
"stage": "fetching_remote_metrics",
|
|
|
|
|
"selected_features": ["ndvi", "ndwi"],
|
|
|
|
|
"timestamps": {
|
|
|
|
|
"queued_at": "2026-05-10T08:00:00Z",
|
|
|
|
|
"started_at": "2026-05-10T08:00:03Z",
|
|
|
|
|
"fetching_remote_metrics_at": "2026-05-10T08:00:12Z",
|
|
|
|
|
},
|
|
|
|
|
"stage_details": {
|
|
|
|
|
"fetching_remote_metrics": {
|
|
|
|
|
"requested_cell_count": 2,
|
|
|
|
|
"metric_progress": {
|
|
|
|
|
"total_metrics": 2,
|
|
|
|
|
"completed_metric_count": 1,
|
|
|
|
|
"active_metric": "ndwi",
|
|
|
|
|
"completed_metrics": ["ndvi"],
|
|
|
|
|
"failed_metrics": [],
|
|
|
|
|
"states": [
|
|
|
|
|
{"metric": "ndvi", "status": "completed"},
|
|
|
|
|
{"metric": "ndwi", "status": "running"},
|
|
|
|
|
],
|
|
|
|
|
},
|
|
|
|
|
}
|
|
|
|
|
},
|
|
|
|
|
},
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
response = self.client.get(f"/remote-sensing/runs/{task_id}/status/")
|
|
|
|
|
|
|
|
|
|
self.assertEqual(response.status_code, 200)
|
|
|
|
|
payload = response.json()["data"]
|
|
|
|
|
self.assertEqual(payload["status"], "running")
|
|
|
|
|
self.assertEqual(payload["task"]["current_stage"], "fetching_remote_metrics")
|
|
|
|
|
self.assertEqual(payload["task"]["metric_progress"]["active_metric"], "ndwi")
|
|
|
|
|
self.assertEqual(payload["task"]["stages"][-1]["status"], "running")
|
|
|
|
|
self.assertEqual(payload["task"]["celery"]["state"], "STARTED")
|
|
|
|
|
self.assertEqual(payload["task"]["celery"]["info"]["message"], "fetching_remote_metrics")
|
|
|
|
|
self.assertEqual(payload["run"]["id"], run.id)
|
|
|
|
|
|
|
|
|
|
@patch("location_data.views._get_remote_sensing_async_result")
|
|
|
|
|
def test_run_status_endpoint_returns_retrying_status_when_celery_is_retrying(self, mock_async_result):
|
|
|
|
|
mock_async_result.return_value = SimpleNamespace(
|
|
|
|
|
state="RETRY",
|
|
|
|
|
result="temporary openEO timeout",
|
|
|
|
|
info="temporary openEO timeout",
|
|
|
|
|
ready=lambda: False,
|
|
|
|
|
successful=lambda: False,
|
|
|
|
|
failed=lambda: False,
|
|
|
|
|
)
|
|
|
|
|
task_id = "e723ba3e-c53c-401b-b3a0-5f7013c7b401"
|
|
|
|
|
run = RemoteSensingRun.objects.create(
|
|
|
|
|
soil_location=self.location,
|
|
|
|
|
block_subdivision=self.subdivision,
|
|
|
|
|
block_code="",
|
|
|
|
|
chunk_size_sqm=900,
|
|
|
|
|
temporal_start=self.temporal_start,
|
|
|
|
|
temporal_end=self.temporal_end,
|
|
|
|
|
status=RemoteSensingRun.STATUS_RUNNING,
|
|
|
|
|
metadata={
|
|
|
|
|
"task_id": task_id,
|
|
|
|
|
"stage": "retrying",
|
|
|
|
|
"status_label": "retrying",
|
|
|
|
|
"failed_stage": "observations_persisted",
|
|
|
|
|
"timestamps": {
|
|
|
|
|
"failed_at": "2026-05-10T08:10:00Z",
|
|
|
|
|
"retrying_at": "2026-05-10T08:11:00Z",
|
|
|
|
|
},
|
|
|
|
|
"stage_details": {
|
|
|
|
|
"retrying": {
|
|
|
|
|
"retry_count": 2,
|
|
|
|
|
"retry_delay_seconds": 120,
|
|
|
|
|
"last_error": "temporary openEO timeout",
|
|
|
|
|
"failed_stage": "observations_persisted",
|
|
|
|
|
"failed_stage_details": {"created_count": 12, "updated_count": 0},
|
|
|
|
|
}
|
|
|
|
|
},
|
|
|
|
|
},
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
response = self.client.get(f"/remote-sensing/runs/{task_id}/status/")
|
|
|
|
|
|
|
|
|
|
self.assertEqual(response.status_code, 200)
|
|
|
|
|
payload = response.json()["data"]
|
|
|
|
|
self.assertEqual(payload["status"], "retrying")
|
|
|
|
|
self.assertEqual(payload["run"]["pipeline_status"], "retrying")
|
|
|
|
|
self.assertEqual(payload["task"]["current_stage"], "retrying")
|
|
|
|
|
self.assertEqual(payload["task"]["retry"]["retry_count"], 2)
|
|
|
|
|
self.assertEqual(payload["task"]["last_error"], "temporary openEO timeout")
|
|
|
|
|
self.assertNotIn("failure_reason", payload["task"])
|
|
|
|
|
self.assertEqual(payload["task"]["celery"]["state"], "RETRY")
|
|
|
|
|
|
|
|
|
|
@patch("location_data.views._get_remote_sensing_async_result")
|
|
|
|
|
def test_run_status_endpoint_overrides_stale_failed_db_state_when_celery_is_retrying(self, mock_async_result):
|
|
|
|
|
mock_async_result.return_value = SimpleNamespace(
|
|
|
|
|
state="RETRY",
|
|
|
|
|
result="temporary openEO timeout",
|
|
|
|
|
info="temporary openEO timeout",
|
|
|
|
|
ready=lambda: False,
|
|
|
|
|
successful=lambda: False,
|
|
|
|
|
failed=lambda: False,
|
|
|
|
|
)
|
|
|
|
|
task_id = "e723ba3e-c53c-401b-b3a0-5f7013c7b401"
|
|
|
|
|
run = RemoteSensingRun.objects.create(
|
|
|
|
|
soil_location=self.location,
|
|
|
|
|
block_subdivision=self.subdivision,
|
|
|
|
|
block_code="",
|
|
|
|
|
chunk_size_sqm=900,
|
|
|
|
|
temporal_start=self.temporal_start,
|
|
|
|
|
temporal_end=self.temporal_end,
|
|
|
|
|
status=RemoteSensingRun.STATUS_FAILURE,
|
|
|
|
|
error_message="temporary openEO timeout",
|
|
|
|
|
metadata={
|
|
|
|
|
"task_id": task_id,
|
|
|
|
|
"stage": "failed",
|
|
|
|
|
"status_label": "failed",
|
|
|
|
|
"failed_stage": "observations_persisted",
|
|
|
|
|
"failure_reason": "temporary openEO timeout",
|
|
|
|
|
"timestamps": {"failed_at": "2026-05-10T08:10:00Z"},
|
|
|
|
|
"stage_details": {
|
|
|
|
|
"failed": {
|
|
|
|
|
"failed_stage": "observations_persisted",
|
|
|
|
|
"error_message": "temporary openEO timeout",
|
|
|
|
|
"failed_stage_details": {"created_count": 12, "updated_count": 0},
|
|
|
|
|
}
|
|
|
|
|
},
|
|
|
|
|
},
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
response = self.client.get(f"/remote-sensing/runs/{task_id}/status/")
|
|
|
|
|
|
|
|
|
|
self.assertEqual(response.status_code, 200)
|
|
|
|
|
payload = response.json()["data"]
|
|
|
|
|
self.assertEqual(payload["status"], "retrying")
|
|
|
|
|
self.assertEqual(payload["run"]["status"], RemoteSensingRun.STATUS_FAILURE)
|
|
|
|
|
self.assertEqual(payload["run"]["status_label"], "retrying")
|
|
|
|
|
self.assertEqual(payload["run"]["pipeline_status"], "retrying")
|
|
|
|
|
self.assertEqual(payload["run"]["stage"], "retrying")
|
|
|
|
|
self.assertEqual(payload["task"]["current_stage"], "retrying")
|
|
|
|
|
self.assertEqual(payload["task"]["retry"]["failed_stage"], "observations_persisted")
|
|
|
|
|
self.assertEqual(payload["task"]["stages"][-1]["name"], "retrying")
|
|
|
|
|
self.assertEqual(payload["task"]["stages"][-1]["status"], "running")
|
|
|
|
|
self.assertNotIn("failure_reason", payload["task"])
|
|
|
|
|
self.assertEqual(payload["task"]["celery"]["state"], "RETRY")
|
|
|
|
|
self.assertEqual(payload["run"]["id"], run.id)
|
|
|
|
|
|
|
|
|
|
def test_run_status_endpoint_returns_failed_task_details(self):
|
|
|
|
|
task_id = "e723ba3e-c53c-401b-b3a0-5f7013c7b401"
|
|
|
|
|
run = RemoteSensingRun.objects.create(
|
|
|
|
|
soil_location=self.location,
|
|
|
|
|
block_subdivision=self.subdivision,
|
|
|
|
|
block_code="",
|
|
|
|
|
chunk_size_sqm=900,
|
|
|
|
|
temporal_start=self.temporal_start,
|
|
|
|
|
temporal_end=self.temporal_end,
|
|
|
|
|
status=RemoteSensingRun.STATUS_FAILURE,
|
|
|
|
|
error_message="openEO timeout",
|
|
|
|
|
metadata={
|
|
|
|
|
"task_id": task_id,
|
|
|
|
|
"stage": "failed",
|
|
|
|
|
"failed_stage": "observations_persisted",
|
|
|
|
|
"failure_reason": "openEO timeout",
|
|
|
|
|
"timestamps": {"failed_at": "2026-05-10T08:10:00Z"},
|
|
|
|
|
"stage_details": {
|
|
|
|
|
"failed": {
|
|
|
|
|
"failed_stage": "observations_persisted",
|
|
|
|
|
"error_message": "openEO timeout",
|
|
|
|
|
"failed_stage_details": {"created_count": 12, "updated_count": 0},
|
|
|
|
|
}
|
|
|
|
|
},
|
|
|
|
|
},
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
response = self.client.get(f"/remote-sensing/runs/{task_id}/status/")
|
|
|
|
|
|
|
|
|
|
self.assertEqual(response.status_code, 200)
|
|
|
|
|
payload = response.json()["data"]
|
|
|
|
|
self.assertEqual(payload["status"], "failed")
|
|
|
|
|
self.assertEqual(payload["task"]["current_stage"], "failed")
|
|
|
|
|
self.assertEqual(payload["task"]["failed_stage"], "observations_persisted")
|
|
|
|
|
self.assertEqual(payload["task"]["failure_reason"], "openEO timeout")
|
|
|
|
|
self.assertEqual(payload["task"]["current_stage_details"]["failed_stage"], "observations_persisted")
|
|
|
|
|
self.assertEqual(payload["task"]["stages"][-1]["status"], "failed")
|
|
|
|
|
self.assertEqual(payload["run"]["id"], run.id)
|
|
|
|
|
|
2026-05-09 16:55:06 +03:30
|
|
|
def test_run_result_endpoint_returns_paginated_assignments(self):
|
|
|
|
|
run = RemoteSensingRun.objects.create(
|
|
|
|
|
soil_location=self.location,
|
|
|
|
|
block_subdivision=self.subdivision,
|
2026-05-10 02:02:48 +03:30
|
|
|
block_code="",
|
2026-05-09 16:55:06 +03:30
|
|
|
chunk_size_sqm=900,
|
2026-05-10 02:02:48 +03:30
|
|
|
temporal_start=self.temporal_start,
|
|
|
|
|
temporal_end=self.temporal_end,
|
2026-05-09 16:55:06 +03:30
|
|
|
status=RemoteSensingRun.STATUS_SUCCESS,
|
|
|
|
|
metadata={"stage": "completed"},
|
|
|
|
|
)
|
|
|
|
|
cell = AnalysisGridCell.objects.create(
|
|
|
|
|
soil_location=self.location,
|
|
|
|
|
block_subdivision=self.subdivision,
|
2026-05-10 02:02:48 +03:30
|
|
|
block_code="",
|
2026-05-09 16:55:06 +03:30
|
|
|
cell_code="cell-1",
|
|
|
|
|
chunk_size_sqm=900,
|
|
|
|
|
geometry=self.boundary,
|
|
|
|
|
centroid_lat="35.689500",
|
|
|
|
|
centroid_lon="51.389500",
|
|
|
|
|
)
|
|
|
|
|
AnalysisGridObservation.objects.create(
|
|
|
|
|
cell=cell,
|
|
|
|
|
run=run,
|
2026-05-10 02:02:48 +03:30
|
|
|
temporal_start=self.temporal_start,
|
|
|
|
|
temporal_end=self.temporal_end,
|
2026-05-09 16:55:06 +03:30
|
|
|
ndvi=0.61,
|
|
|
|
|
ndwi=0.22,
|
|
|
|
|
soil_vv=0.13,
|
|
|
|
|
soil_vv_db=-8.860566,
|
|
|
|
|
dem_m=1550.0,
|
|
|
|
|
slope_deg=4.2,
|
|
|
|
|
metadata={"backend_name": "openeo"},
|
|
|
|
|
)
|
|
|
|
|
result = RemoteSensingSubdivisionResult.objects.create(
|
|
|
|
|
soil_location=self.location,
|
|
|
|
|
run=run,
|
|
|
|
|
block_subdivision=self.subdivision,
|
2026-05-10 02:02:48 +03:30
|
|
|
block_code="",
|
2026-05-09 16:55:06 +03:30
|
|
|
chunk_size_sqm=900,
|
2026-05-10 02:02:48 +03:30
|
|
|
temporal_start=self.temporal_start,
|
|
|
|
|
temporal_end=self.temporal_end,
|
2026-05-09 16:55:06 +03:30
|
|
|
cluster_count=1,
|
|
|
|
|
selected_features=["ndvi"],
|
|
|
|
|
metadata={"used_cell_count": 1, "skipped_cell_count": 0},
|
|
|
|
|
)
|
|
|
|
|
RemoteSensingClusterAssignment.objects.create(
|
|
|
|
|
result=result,
|
|
|
|
|
cell=cell,
|
|
|
|
|
cluster_label=0,
|
|
|
|
|
raw_feature_values={"ndvi": 0.61},
|
|
|
|
|
scaled_feature_values={"ndvi": 0.0},
|
|
|
|
|
)
|
2026-05-11 04:38:44 +03:30
|
|
|
cluster_block = RemoteSensingClusterBlock.objects.create(
|
|
|
|
|
result=result,
|
|
|
|
|
soil_location=self.location,
|
|
|
|
|
block_subdivision=self.subdivision,
|
|
|
|
|
block_code="",
|
|
|
|
|
sub_block_code="cluster-0",
|
|
|
|
|
cluster_label=0,
|
|
|
|
|
chunk_size_sqm=900,
|
|
|
|
|
centroid_lat="35.689500",
|
|
|
|
|
centroid_lon="51.389500",
|
|
|
|
|
cell_count=1,
|
|
|
|
|
cell_codes=["cell-1"],
|
|
|
|
|
geometry=self.boundary,
|
|
|
|
|
metadata={"source": "analysis_grid_cells"},
|
|
|
|
|
)
|
2026-05-09 16:55:06 +03:30
|
|
|
|
2026-05-10 02:02:48 +03:30
|
|
|
task_id = "e723ba3e-c53c-401b-b3a0-5f7013c7b401"
|
|
|
|
|
run.metadata = {**run.metadata, "task_id": task_id}
|
|
|
|
|
run.save(update_fields=["metadata", "updated_at"])
|
|
|
|
|
|
|
|
|
|
response = self.client.get(f"/remote-sensing/runs/{task_id}/status/", data={"page": 1, "page_size": 10})
|
2026-05-09 16:55:06 +03:30
|
|
|
|
|
|
|
|
self.assertEqual(response.status_code, 200)
|
|
|
|
|
payload = response.json()["data"]
|
|
|
|
|
self.assertEqual(payload["status"], "completed")
|
|
|
|
|
self.assertEqual(payload["subdivision_result"]["cluster_count"], 1)
|
2026-05-11 04:38:44 +03:30
|
|
|
self.assertEqual(payload["subdivision_result"]["cluster_blocks"][0]["uuid"], str(cluster_block.uuid))
|
2026-05-09 16:55:06 +03:30
|
|
|
self.assertEqual(len(payload["subdivision_result"]["assignments"]), 1)
|
|
|
|
|
self.assertEqual(payload["pagination"]["assignments"]["total_items"], 1)
|