UPDATE
This commit is contained in:
@@ -2,6 +2,8 @@ from __future__ import annotations
|
||||
|
||||
from copy import deepcopy
|
||||
from dataclasses import dataclass
|
||||
from datetime import date, datetime
|
||||
from decimal import Decimal
|
||||
from typing import Any
|
||||
|
||||
from django.db.models import Avg
|
||||
@@ -9,7 +11,7 @@ from django.db.models import Avg
|
||||
from crop_simulation.growth_simulation import GrowthSimulationContext, _run_projection_engine
|
||||
from crop_simulation.services import PcseSimulationManager, build_simulation_payload_from_farm
|
||||
from farm_data.services import get_canonical_farm_record, get_farm_plant_assignments
|
||||
from .models import AnalysisGridObservation, RemoteSensingClusterBlock
|
||||
from .models import AnalysisGridObservation, RemoteSensingClusterBlock, RemoteSensingSubdivisionResult
|
||||
from .satellite_snapshot import build_location_block_satellite_snapshots
|
||||
|
||||
|
||||
@@ -70,6 +72,23 @@ def _clamp(value: float, minimum: float, maximum: float) -> float:
|
||||
return max(minimum, min(value, maximum))
|
||||
|
||||
|
||||
def _json_safe(value: Any) -> Any:
|
||||
if isinstance(value, Decimal):
|
||||
return float(value)
|
||||
if isinstance(value, datetime):
|
||||
formatted = value.isoformat()
|
||||
if formatted.endswith("+00:00"):
|
||||
return formatted[:-6] + "Z"
|
||||
return formatted
|
||||
if isinstance(value, date):
|
||||
return value.isoformat()
|
||||
if isinstance(value, dict):
|
||||
return {str(key): _json_safe(item) for key, item in value.items()}
|
||||
if isinstance(value, (list, tuple)):
|
||||
return [_json_safe(item) for item in value]
|
||||
return value
|
||||
|
||||
|
||||
def _build_cluster_entries(
|
||||
snapshots: list[dict[str, Any]],
|
||||
*,
|
||||
@@ -353,6 +372,21 @@ def build_cluster_crop_recommendations(farm_uuid: str) -> dict[str, Any]:
|
||||
if not cluster_entries:
|
||||
raise ClusterRecommendationNotFound("برای این مزرعه هنوز کلاستر قابل استفاده پیدا نشد.")
|
||||
|
||||
recommendation_result_ids = sorted(
|
||||
{
|
||||
int(cluster_block.result_id)
|
||||
for cluster_block in cluster_blocks_by_uuid.values()
|
||||
if cluster_block.result_id
|
||||
}
|
||||
)
|
||||
cached_payload = _load_cached_cluster_recommendations(
|
||||
farm_uuid=str(farm.farm_uuid),
|
||||
result_ids=recommendation_result_ids,
|
||||
plant_assignments=plant_assignments,
|
||||
)
|
||||
if cached_payload is not None:
|
||||
return cached_payload
|
||||
|
||||
base_payloads: dict[str, dict[str, Any]] = {}
|
||||
for assignment in plant_assignments:
|
||||
plant_name = str(getattr(assignment.plant, "name", "") or "").strip()
|
||||
@@ -392,7 +426,7 @@ def build_cluster_crop_recommendations(farm_uuid: str) -> dict[str, Any]:
|
||||
}
|
||||
)
|
||||
|
||||
return {
|
||||
payload = {
|
||||
"farm_uuid": str(farm.farm_uuid),
|
||||
"location_id": location.id,
|
||||
"evaluated_plant_count": len(base_payloads),
|
||||
@@ -413,3 +447,68 @@ def build_cluster_crop_recommendations(farm_uuid: str) -> dict[str, Any]:
|
||||
"snapshot_block_count": len(snapshots),
|
||||
},
|
||||
}
|
||||
_store_cached_cluster_recommendations(
|
||||
farm_uuid=str(farm.farm_uuid),
|
||||
result_ids=recommendation_result_ids,
|
||||
plant_assignments=plant_assignments,
|
||||
payload=payload,
|
||||
)
|
||||
return payload
|
||||
|
||||
|
||||
def _build_assignment_cache_signature(plant_assignments: list[Any]) -> list[dict[str, Any]]:
|
||||
return [
|
||||
{
|
||||
"plant_id": getattr(assignment.plant, "backend_plant_id", None),
|
||||
"position": int(assignment.position or 0),
|
||||
"stage": str(assignment.stage or ""),
|
||||
}
|
||||
for assignment in plant_assignments
|
||||
]
|
||||
|
||||
|
||||
def _load_cached_cluster_recommendations(
|
||||
*,
|
||||
farm_uuid: str,
|
||||
result_ids: list[int],
|
||||
plant_assignments: list[Any],
|
||||
) -> dict[str, Any] | None:
|
||||
if not result_ids:
|
||||
return None
|
||||
cache_key = f"farm::{farm_uuid}"
|
||||
assignment_signature = _build_assignment_cache_signature(plant_assignments)
|
||||
for result in RemoteSensingSubdivisionResult.objects.filter(id__in=result_ids):
|
||||
metadata = dict(result.metadata or {})
|
||||
recommendation_cache = dict(metadata.get("cluster_recommendations") or {})
|
||||
cached_entry = recommendation_cache.get(cache_key)
|
||||
if not isinstance(cached_entry, dict):
|
||||
continue
|
||||
if cached_entry.get("assignment_signature") != assignment_signature:
|
||||
continue
|
||||
payload = cached_entry.get("payload")
|
||||
if isinstance(payload, dict):
|
||||
return payload
|
||||
return None
|
||||
|
||||
|
||||
def _store_cached_cluster_recommendations(
|
||||
*,
|
||||
farm_uuid: str,
|
||||
result_ids: list[int],
|
||||
plant_assignments: list[Any],
|
||||
payload: dict[str, Any],
|
||||
) -> None:
|
||||
if not result_ids:
|
||||
return
|
||||
cache_key = f"farm::{farm_uuid}"
|
||||
assignment_signature = _build_assignment_cache_signature(plant_assignments)
|
||||
for result in RemoteSensingSubdivisionResult.objects.filter(id__in=result_ids):
|
||||
metadata = dict(result.metadata or {})
|
||||
recommendation_cache = dict(metadata.get("cluster_recommendations") or {})
|
||||
recommendation_cache[cache_key] = {
|
||||
"assignment_signature": assignment_signature,
|
||||
"payload": _json_safe(payload),
|
||||
}
|
||||
metadata["cluster_recommendations"] = recommendation_cache
|
||||
result.metadata = metadata
|
||||
result.save(update_fields=["metadata", "updated_at"])
|
||||
|
||||
@@ -8,6 +8,7 @@ from rest_framework.test import APIClient
|
||||
|
||||
from location_data.models import (
|
||||
AnalysisGridCell,
|
||||
AnalysisGridObservation,
|
||||
BlockSubdivision,
|
||||
RemoteSensingClusterBlock,
|
||||
RemoteSensingRun,
|
||||
@@ -193,3 +194,61 @@ class RemoteSensingClusterBlockLiveApiTests(TestCase):
|
||||
expected_start = expected_end - timedelta(days=6)
|
||||
self.assertEqual(kwargs["temporal_start"], expected_start)
|
||||
self.assertEqual(kwargs["temporal_end"], expected_end)
|
||||
|
||||
@patch("location_data.views.compute_remote_sensing_metrics")
|
||||
def test_get_cluster_block_live_uses_database_cache_for_matching_window(self, compute_mock):
|
||||
cell_1 = AnalysisGridCell.objects.create(
|
||||
soil_location=self.location,
|
||||
block_subdivision=self.subdivision,
|
||||
block_code="block-1",
|
||||
cell_code="cell-1",
|
||||
chunk_size_sqm=900,
|
||||
geometry=self.boundary,
|
||||
centroid_lat="35.689250",
|
||||
centroid_lon="51.389250",
|
||||
)
|
||||
cell_2 = AnalysisGridCell.objects.create(
|
||||
soil_location=self.location,
|
||||
block_subdivision=self.subdivision,
|
||||
block_code="block-1",
|
||||
cell_code="cell-2",
|
||||
chunk_size_sqm=900,
|
||||
geometry=self.boundary,
|
||||
centroid_lat="35.689750",
|
||||
centroid_lon="51.389750",
|
||||
)
|
||||
AnalysisGridObservation.objects.create(
|
||||
cell=cell_1,
|
||||
run=self.run,
|
||||
temporal_start=date(2025, 1, 1),
|
||||
temporal_end=date(2025, 1, 31),
|
||||
ndvi=0.44,
|
||||
ndwi=0.12,
|
||||
soil_vv=0.09,
|
||||
soil_vv_db=-11.0,
|
||||
metadata={"backend_name": "openeo"},
|
||||
)
|
||||
AnalysisGridObservation.objects.create(
|
||||
cell=cell_2,
|
||||
run=self.run,
|
||||
temporal_start=date(2025, 1, 1),
|
||||
temporal_end=date(2025, 1, 31),
|
||||
ndvi=0.64,
|
||||
ndwi=0.22,
|
||||
soil_vv=0.19,
|
||||
soil_vv_db=-7.0,
|
||||
metadata={"backend_name": "openeo"},
|
||||
)
|
||||
|
||||
response = self.client.get(
|
||||
f"/remote-sensing/cluster-blocks/{self.cluster_block.uuid}/live/",
|
||||
data={"temporal_start": "2025-01-01", "temporal_end": "2025-01-31"},
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
payload = response.json()["data"]
|
||||
self.assertEqual(payload["source"], "database")
|
||||
self.assertTrue(payload["metadata"]["cache_hit"])
|
||||
self.assertEqual(payload["summary"]["ndvi_mean"], 0.54)
|
||||
self.assertEqual(payload["metrics"]["soil_vv_db"], -9.0)
|
||||
compute_mock.assert_not_called()
|
||||
|
||||
@@ -279,3 +279,37 @@ class RemoteSensingClusterRecommendationApiTests(TestCase):
|
||||
response.json()["msg"],
|
||||
"برای این مزرعه هنوز هیچ گیاهی در farm_data ثبت نشده است.",
|
||||
)
|
||||
|
||||
@patch("location_data.cluster_recommendation._simulate_candidate")
|
||||
def test_cluster_recommendations_use_cached_payload_for_same_farm_assignments(self, simulate_mock):
|
||||
simulate_mock.return_value = (
|
||||
{
|
||||
"engine": "pcse",
|
||||
"model_name": "Wofost81_NWLP_CWB_CNB",
|
||||
"metrics": {
|
||||
"yield_estimate": 100.0,
|
||||
"biomass": 200.0,
|
||||
"max_lai": 3.1,
|
||||
},
|
||||
},
|
||||
None,
|
||||
)
|
||||
|
||||
first_response = self.client.get(
|
||||
"/remote-sensing/cluster-recommendations/",
|
||||
data={"farm_uuid": str(self.farm.farm_uuid)},
|
||||
)
|
||||
self.assertEqual(first_response.status_code, 200)
|
||||
self.assertGreater(simulate_mock.call_count, 0)
|
||||
|
||||
simulate_mock.reset_mock()
|
||||
simulate_mock.side_effect = AssertionError("cached recommendations should skip simulation")
|
||||
|
||||
second_response = self.client.get(
|
||||
"/remote-sensing/cluster-recommendations/",
|
||||
data={"farm_uuid": str(self.farm.farm_uuid)},
|
||||
)
|
||||
|
||||
self.assertEqual(second_response.status_code, 200)
|
||||
self.assertEqual(first_response.json()["data"], second_response.json()["data"])
|
||||
simulate_mock.assert_not_called()
|
||||
|
||||
@@ -46,7 +46,7 @@ class RemoteSensingApiTests(TestCase):
|
||||
self.farm = SensorData.objects.create(
|
||||
farm_uuid="11111111-1111-1111-1111-111111111111",
|
||||
center_location=self.location,
|
||||
payload={},
|
||||
sensor_payload={},
|
||||
)
|
||||
self.temporal_end = timezone.localdate() - timedelta(days=1)
|
||||
self.temporal_start = self.temporal_end - timedelta(days=30)
|
||||
@@ -176,6 +176,241 @@ class RemoteSensingApiTests(TestCase):
|
||||
self.assertEqual(len(payload["cells"]), 1)
|
||||
self.assertEqual(payload["cells"][0]["cell_code"], "cell-1")
|
||||
|
||||
@patch("location_data.views.run_remote_sensing_analysis_task.delay")
|
||||
def test_post_remote_sensing_reuses_latest_completed_farm_cache_when_window_differs(self, mock_delay):
|
||||
fallback_start = self.temporal_start - timedelta(days=1)
|
||||
fallback_end = self.temporal_end - timedelta(days=1)
|
||||
run = RemoteSensingRun.objects.create(
|
||||
soil_location=self.location,
|
||||
block_subdivision=self.subdivision,
|
||||
block_code="",
|
||||
chunk_size_sqm=900,
|
||||
temporal_start=fallback_start,
|
||||
temporal_end=fallback_end,
|
||||
status=RemoteSensingRun.STATUS_SUCCESS,
|
||||
metadata={"farm_uuid": str(self.farm.farm_uuid), "stage": "completed"},
|
||||
)
|
||||
cell = AnalysisGridCell.objects.create(
|
||||
soil_location=self.location,
|
||||
block_subdivision=self.subdivision,
|
||||
block_code="",
|
||||
cell_code="cell-seeded-1",
|
||||
chunk_size_sqm=900,
|
||||
geometry=self.boundary,
|
||||
centroid_lat="35.689500",
|
||||
centroid_lon="51.389500",
|
||||
)
|
||||
AnalysisGridObservation.objects.create(
|
||||
cell=cell,
|
||||
run=run,
|
||||
temporal_start=fallback_start,
|
||||
temporal_end=fallback_end,
|
||||
ndvi=0.49,
|
||||
ndwi=0.17,
|
||||
soil_vv=0.10,
|
||||
soil_vv_db=-9.8,
|
||||
metadata={"backend_name": "openeo"},
|
||||
)
|
||||
|
||||
response = self.client.post(
|
||||
"/remote-sensing/",
|
||||
data={"farm_uuid": str(self.farm.farm_uuid), "force_refresh": False},
|
||||
format="json",
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
payload = response.json()["data"]
|
||||
self.assertEqual(payload["status"], "success")
|
||||
self.assertEqual(payload["source"], "database")
|
||||
self.assertEqual(payload["temporal_extent"]["start_date"], fallback_start.isoformat())
|
||||
self.assertEqual(payload["temporal_extent"]["end_date"], fallback_end.isoformat())
|
||||
self.assertEqual(payload["metadata"]["cache_match"], "latest_completed_for_farm")
|
||||
self.assertEqual(payload["cells"][0]["cell_code"], "cell-seeded-1")
|
||||
self.assertEqual(payload["run"]["id"], run.id)
|
||||
self.assertNotIn("task_id", payload)
|
||||
mock_delay.assert_not_called()
|
||||
|
||||
@patch("location_data.views.run_remote_sensing_analysis_task.delay")
|
||||
def test_post_remote_sensing_returns_cached_results_without_enqueuing(self, mock_delay):
|
||||
run = RemoteSensingRun.objects.create(
|
||||
soil_location=self.location,
|
||||
block_subdivision=self.subdivision,
|
||||
block_code="",
|
||||
chunk_size_sqm=900,
|
||||
temporal_start=self.temporal_start,
|
||||
temporal_end=self.temporal_end,
|
||||
status=RemoteSensingRun.STATUS_SUCCESS,
|
||||
metadata={"farm_uuid": str(self.farm.farm_uuid), "stage": "completed"},
|
||||
)
|
||||
cell = AnalysisGridCell.objects.create(
|
||||
soil_location=self.location,
|
||||
block_subdivision=self.subdivision,
|
||||
block_code="",
|
||||
cell_code="cell-cache-1",
|
||||
chunk_size_sqm=900,
|
||||
geometry=self.boundary,
|
||||
centroid_lat="35.689500",
|
||||
centroid_lon="51.389500",
|
||||
)
|
||||
AnalysisGridObservation.objects.create(
|
||||
cell=cell,
|
||||
run=run,
|
||||
temporal_start=self.temporal_start,
|
||||
temporal_end=self.temporal_end,
|
||||
ndvi=0.52,
|
||||
ndwi=0.18,
|
||||
soil_vv=0.11,
|
||||
soil_vv_db=-9.2,
|
||||
metadata={"backend_name": "openeo"},
|
||||
)
|
||||
|
||||
response = self.client.post(
|
||||
"/remote-sensing/",
|
||||
data={"farm_uuid": str(self.farm.farm_uuid), "force_refresh": False},
|
||||
format="json",
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
payload = response.json()["data"]
|
||||
self.assertEqual(payload["status"], "success")
|
||||
self.assertEqual(payload["source"], "database")
|
||||
self.assertTrue(payload["metadata"]["cache_hit"])
|
||||
self.assertEqual(payload["cells"][0]["cell_code"], "cell-cache-1")
|
||||
self.assertEqual(payload["run"]["id"], run.id)
|
||||
self.assertEqual(payload["run"]["status"], RemoteSensingRun.STATUS_SUCCESS)
|
||||
self.assertNotIn("task_id", payload)
|
||||
self.assertEqual(RemoteSensingRun.objects.count(), 1)
|
||||
mock_delay.assert_not_called()
|
||||
|
||||
@patch("location_data.views.run_remote_sensing_analysis_task.delay")
|
||||
def test_post_remote_sensing_cached_results_do_not_create_status_run(self, mock_delay):
|
||||
source_run = RemoteSensingRun.objects.create(
|
||||
soil_location=self.location,
|
||||
block_subdivision=self.subdivision,
|
||||
block_code="",
|
||||
chunk_size_sqm=900,
|
||||
temporal_start=self.temporal_start,
|
||||
temporal_end=self.temporal_end,
|
||||
status=RemoteSensingRun.STATUS_SUCCESS,
|
||||
metadata={"farm_uuid": str(self.farm.farm_uuid), "stage": "completed"},
|
||||
)
|
||||
cell = AnalysisGridCell.objects.create(
|
||||
soil_location=self.location,
|
||||
block_subdivision=self.subdivision,
|
||||
block_code="",
|
||||
cell_code="cell-status-cache-1",
|
||||
chunk_size_sqm=900,
|
||||
geometry=self.boundary,
|
||||
centroid_lat="35.689500",
|
||||
centroid_lon="51.389500",
|
||||
)
|
||||
AnalysisGridObservation.objects.create(
|
||||
cell=cell,
|
||||
run=source_run,
|
||||
temporal_start=self.temporal_start,
|
||||
temporal_end=self.temporal_end,
|
||||
ndvi=0.57,
|
||||
ndwi=0.19,
|
||||
soil_vv=0.12,
|
||||
soil_vv_db=-8.7,
|
||||
metadata={"backend_name": "openeo"},
|
||||
)
|
||||
|
||||
post_response = self.client.post(
|
||||
"/remote-sensing/",
|
||||
data={"farm_uuid": str(self.farm.farm_uuid), "force_refresh": False},
|
||||
format="json",
|
||||
)
|
||||
|
||||
self.assertEqual(post_response.status_code, 200)
|
||||
payload = post_response.json()["data"]
|
||||
self.assertEqual(payload["status"], "success")
|
||||
self.assertEqual(payload["run"]["id"], source_run.id)
|
||||
self.assertEqual(payload["summary"]["cell_count"], 1)
|
||||
self.assertEqual(payload["cells"][0]["cell_code"], "cell-status-cache-1")
|
||||
self.assertNotIn("task_id", payload)
|
||||
self.assertEqual(RemoteSensingRun.objects.count(), 1)
|
||||
mock_delay.assert_not_called()
|
||||
|
||||
@patch("location_data.views.run_remote_sensing_analysis_task.delay")
|
||||
def test_post_remote_sensing_returns_existing_processing_run_without_enqueuing(self, mock_delay):
|
||||
run = RemoteSensingRun.objects.create(
|
||||
soil_location=self.location,
|
||||
block_subdivision=self.subdivision,
|
||||
block_code="",
|
||||
chunk_size_sqm=900,
|
||||
temporal_start=self.temporal_start,
|
||||
temporal_end=self.temporal_end,
|
||||
status=RemoteSensingRun.STATUS_PENDING,
|
||||
metadata={
|
||||
"farm_uuid": str(self.farm.farm_uuid),
|
||||
"task_id": "e723ba3e-c53c-401b-b3a0-5f7013c7b401",
|
||||
"stage": "queued",
|
||||
},
|
||||
)
|
||||
|
||||
response = self.client.post(
|
||||
"/remote-sensing/",
|
||||
data={"farm_uuid": str(self.farm.farm_uuid), "force_refresh": False},
|
||||
format="json",
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 202)
|
||||
payload = response.json()["data"]
|
||||
self.assertEqual(payload["status"], "processing")
|
||||
self.assertEqual(payload["source"], "processing")
|
||||
self.assertEqual(payload["run"]["id"], run.id)
|
||||
mock_delay.assert_not_called()
|
||||
|
||||
@patch("location_data.views.run_remote_sensing_analysis_task.delay")
|
||||
def test_post_remote_sensing_ignores_other_farm_cache_on_same_location(self, mock_delay):
|
||||
other_farm_uuid = "33333333-3333-3333-3333-333333333333"
|
||||
mock_delay.return_value = SimpleNamespace(id="f723ba3e-c53c-401b-b3a0-5f7013c7b402")
|
||||
other_run = RemoteSensingRun.objects.create(
|
||||
soil_location=self.location,
|
||||
block_subdivision=self.subdivision,
|
||||
block_code="",
|
||||
chunk_size_sqm=900,
|
||||
temporal_start=self.temporal_start,
|
||||
temporal_end=self.temporal_end,
|
||||
status=RemoteSensingRun.STATUS_SUCCESS,
|
||||
metadata={"farm_uuid": other_farm_uuid, "stage": "completed"},
|
||||
)
|
||||
other_cell = AnalysisGridCell.objects.create(
|
||||
soil_location=self.location,
|
||||
block_subdivision=self.subdivision,
|
||||
block_code="",
|
||||
cell_code="cell-other-farm",
|
||||
chunk_size_sqm=900,
|
||||
geometry=self.boundary,
|
||||
centroid_lat="35.689510",
|
||||
centroid_lon="51.389510",
|
||||
)
|
||||
AnalysisGridObservation.objects.create(
|
||||
cell=other_cell,
|
||||
run=other_run,
|
||||
temporal_start=self.temporal_start,
|
||||
temporal_end=self.temporal_end,
|
||||
ndvi=0.66,
|
||||
ndwi=0.31,
|
||||
soil_vv=0.15,
|
||||
soil_vv_db=-8.1,
|
||||
metadata={"backend_name": "openeo"},
|
||||
)
|
||||
|
||||
response = self.client.post(
|
||||
"/remote-sensing/",
|
||||
data={"farm_uuid": str(self.farm.farm_uuid), "force_refresh": False},
|
||||
format="json",
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 202)
|
||||
payload = response.json()["data"]
|
||||
self.assertEqual(payload["status"], "processing")
|
||||
self.assertEqual(RemoteSensingRun.objects.count(), 2)
|
||||
self.assertNotEqual(payload["run"]["id"], other_run.id)
|
||||
mock_delay.assert_called_once()
|
||||
|
||||
def test_run_status_endpoint_returns_normalized_status(self):
|
||||
run = RemoteSensingRun.objects.create(
|
||||
soil_location=self.location,
|
||||
|
||||
+436
-100
@@ -1,6 +1,7 @@
|
||||
from datetime import timedelta
|
||||
from types import SimpleNamespace
|
||||
from typing import Any
|
||||
from uuid import uuid4
|
||||
|
||||
from django.apps import apps
|
||||
from django.core.paginator import EmptyPage, Paginator
|
||||
@@ -416,9 +417,16 @@ class RemoteSensingAnalysisView(APIView):
|
||||
@extend_schema(
|
||||
tags=["Location Data"],
|
||||
summary="اجرای async تحلیل سنجشازدور و subdivision دادهمحور",
|
||||
description="برای location موجود، pipeline کامل grid + openEO + observation persistence + KMeans clustering در Celery صف میشود و sync اجرا نمیشود.",
|
||||
description=(
|
||||
"اگر خروجی cache شده برای مزرعه موجود باشد، همان داده مستقیم برگردانده میشود. "
|
||||
"در غیر این صورت pipeline کامل grid + openEO + observation persistence + KMeans clustering در Celery صف میشود."
|
||||
),
|
||||
request=RemoteSensingFarmRequestSerializer,
|
||||
responses={
|
||||
200: build_response(
|
||||
RemoteSensingEnvelopeSerializer,
|
||||
"خروجی cache شده remote sensing بدون enqueue کردن Celery بازگردانده شد.",
|
||||
),
|
||||
202: build_response(
|
||||
RemoteSensingQueuedEnvelopeSerializer,
|
||||
"درخواست تحلیل سنجشازدور در صف قرار گرفت.",
|
||||
@@ -462,6 +470,28 @@ class RemoteSensingAnalysisView(APIView):
|
||||
|
||||
temporal_end = timezone.localdate() - timedelta(days=1)
|
||||
temporal_start = temporal_end - timedelta(days=30)
|
||||
if not payload.get("force_refresh", False):
|
||||
cached_response = _build_cached_remote_sensing_response(
|
||||
location=location,
|
||||
farm_uuid=str(payload["farm_uuid"]),
|
||||
block_code="",
|
||||
start_date=temporal_start,
|
||||
end_date=temporal_end,
|
||||
page=payload.get("page", 1),
|
||||
page_size=payload.get("page_size", 100),
|
||||
)
|
||||
if cached_response is not None:
|
||||
processing = cached_response.get("status") == "processing"
|
||||
status_code = status.HTTP_202_ACCEPTED if processing else status.HTTP_200_OK
|
||||
response_payload = cached_response
|
||||
return Response(
|
||||
{
|
||||
"code": 202 if status_code == status.HTTP_202_ACCEPTED else 200,
|
||||
"msg": "success" if processing else "داده cache شده بازگردانده شد.",
|
||||
"data": response_payload,
|
||||
},
|
||||
status=status_code,
|
||||
)
|
||||
run = RemoteSensingRun.objects.create(
|
||||
soil_location=location,
|
||||
block_code="",
|
||||
@@ -471,6 +501,7 @@ class RemoteSensingAnalysisView(APIView):
|
||||
status=RemoteSensingRun.STATUS_PENDING,
|
||||
metadata={
|
||||
"requested_via": "api",
|
||||
"stage": "queued",
|
||||
"status_label": "pending",
|
||||
"requested_cluster_count": None,
|
||||
"selected_features": list(DEFAULT_CLUSTER_FEATURES),
|
||||
@@ -585,92 +616,15 @@ class RemoteSensingAnalysisView(APIView):
|
||||
|
||||
temporal_end = timezone.localdate() - timedelta(days=1)
|
||||
temporal_start = temporal_end - timedelta(days=30)
|
||||
block_code = ""
|
||||
observations = _get_remote_sensing_observations(
|
||||
response_payload = _build_cached_remote_sensing_response(
|
||||
location=location,
|
||||
block_code=block_code,
|
||||
farm_uuid=str(payload["farm_uuid"]),
|
||||
block_code="",
|
||||
start_date=temporal_start,
|
||||
end_date=temporal_end,
|
||||
)
|
||||
run = _get_latest_remote_sensing_run(
|
||||
location=location,
|
||||
block_code=block_code,
|
||||
start_date=temporal_start,
|
||||
end_date=temporal_end,
|
||||
)
|
||||
subdivision_result = _get_remote_sensing_subdivision_result(
|
||||
location=location,
|
||||
block_code=block_code,
|
||||
start_date=temporal_start,
|
||||
end_date=temporal_end,
|
||||
)
|
||||
|
||||
if not observations.exists():
|
||||
processing = run is not None and run.status in {
|
||||
RemoteSensingRun.STATUS_PENDING,
|
||||
RemoteSensingRun.STATUS_RUNNING,
|
||||
}
|
||||
response_payload = {
|
||||
"status": "processing" if processing else "not_found",
|
||||
"source": "processing" if processing else "database",
|
||||
"location": SoilLocationResponseSerializer(location).data,
|
||||
"block_code": "",
|
||||
"chunk_size_sqm": getattr(run, "chunk_size_sqm", None),
|
||||
"temporal_extent": {
|
||||
"start_date": temporal_start.isoformat(),
|
||||
"end_date": temporal_end.isoformat(),
|
||||
},
|
||||
"summary": _empty_remote_sensing_summary(),
|
||||
"cells": [],
|
||||
"run": RemoteSensingRunSerializer(run).data if run else None,
|
||||
"subdivision_result": None,
|
||||
}
|
||||
return Response(
|
||||
{"code": 200, "msg": "success", "data": response_payload},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
paginated_observations = _paginate_observations(
|
||||
observations,
|
||||
page=payload["page"],
|
||||
page_size=payload["page_size"],
|
||||
)
|
||||
paginated_assignments = []
|
||||
pagination = {"cells": paginated_observations["pagination"]}
|
||||
if subdivision_result is not None:
|
||||
paginated = _paginate_assignments(
|
||||
subdivision_result,
|
||||
page=payload["page"],
|
||||
page_size=payload["page_size"],
|
||||
)
|
||||
paginated_assignments = paginated["items"]
|
||||
pagination["assignments"] = paginated["pagination"]
|
||||
|
||||
cells_data = RemoteSensingCellObservationSerializer(paginated_observations["items"], many=True).data
|
||||
subdivision_data = None
|
||||
if subdivision_result is not None:
|
||||
subdivision_data = RemoteSensingSubdivisionResultSerializer(
|
||||
subdivision_result,
|
||||
context={"paginated_assignments": paginated_assignments},
|
||||
).data
|
||||
|
||||
response_payload = {
|
||||
"status": "success",
|
||||
"source": "database",
|
||||
"location": SoilLocationResponseSerializer(location).data,
|
||||
"block_code": "",
|
||||
"chunk_size_sqm": observations.first().cell.chunk_size_sqm,
|
||||
"temporal_extent": {
|
||||
"start_date": temporal_start.isoformat(),
|
||||
"end_date": temporal_end.isoformat(),
|
||||
},
|
||||
"summary": _build_remote_sensing_summary(observations),
|
||||
"cells": cells_data,
|
||||
"run": RemoteSensingRunSerializer(run).data if run else None,
|
||||
"subdivision_result": subdivision_data,
|
||||
}
|
||||
if pagination is not None:
|
||||
response_payload["pagination"] = pagination
|
||||
return Response(
|
||||
{"code": 200, "msg": "success", "data": response_payload},
|
||||
status=status.HTTP_200_OK,
|
||||
@@ -805,6 +759,16 @@ class RemoteSensingClusterBlockLiveView(APIView):
|
||||
)
|
||||
|
||||
temporal_start, temporal_end = _resolve_live_remote_sensing_window(serializer.validated_data)
|
||||
cached_cluster_payload = _build_cached_cluster_block_live_payload(
|
||||
cluster_block=cluster_block,
|
||||
temporal_start=temporal_start,
|
||||
temporal_end=temporal_end,
|
||||
)
|
||||
if cached_cluster_payload is not None:
|
||||
return Response(
|
||||
{"code": 200, "msg": "success", "data": cached_cluster_payload},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
virtual_cell = _build_virtual_cluster_block_cell(cluster_block=cluster_block, geometry=geometry)
|
||||
try:
|
||||
remote_payload = compute_remote_sensing_metrics(
|
||||
@@ -1055,23 +1019,25 @@ def _build_remote_sensing_run_status_payload(run: RemoteSensingRun, *, page: int
|
||||
if run.status == RemoteSensingRun.STATUS_FAILURE:
|
||||
return status_payload
|
||||
|
||||
source_run = _resolve_status_source_run(run)
|
||||
location = _get_location_by_lat_lon(run.soil_location.latitude, run.soil_location.longitude, prefetch=True)
|
||||
observations = _get_remote_sensing_observations(
|
||||
location=run.soil_location,
|
||||
block_code=run.block_code,
|
||||
start_date=run.temporal_start,
|
||||
end_date=run.temporal_end,
|
||||
location=source_run.soil_location,
|
||||
block_code=source_run.block_code,
|
||||
start_date=source_run.temporal_start,
|
||||
end_date=source_run.temporal_end,
|
||||
run=source_run,
|
||||
)
|
||||
subdivision_result = getattr(run, "subdivision_result", None)
|
||||
subdivision_result = _resolve_status_subdivision_result(run, source_run=source_run)
|
||||
|
||||
response_payload = {
|
||||
**status_payload,
|
||||
"location": SoilLocationResponseSerializer(location).data,
|
||||
"block_code": run.block_code,
|
||||
"chunk_size_sqm": run.chunk_size_sqm,
|
||||
"block_code": source_run.block_code,
|
||||
"chunk_size_sqm": source_run.chunk_size_sqm,
|
||||
"temporal_extent": {
|
||||
"start_date": run.temporal_start.isoformat() if run.temporal_start else None,
|
||||
"end_date": run.temporal_end.isoformat() if run.temporal_end else None,
|
||||
"start_date": source_run.temporal_start.isoformat() if source_run.temporal_start else None,
|
||||
"end_date": source_run.temporal_end.isoformat() if source_run.temporal_end else None,
|
||||
},
|
||||
"summary": _empty_remote_sensing_summary(),
|
||||
"cells": [],
|
||||
@@ -1287,6 +1253,73 @@ def _build_remote_sensing_celery_payload(task_id: str) -> dict | None:
|
||||
return payload
|
||||
|
||||
|
||||
def _create_cached_status_run(
|
||||
*,
|
||||
location: SoilLocation,
|
||||
farm_uuid: str,
|
||||
block_code: str,
|
||||
temporal_start,
|
||||
temporal_end,
|
||||
cached_response: dict[str, Any],
|
||||
) -> RemoteSensingRun:
|
||||
source_run_id = ((cached_response.get("run") or {}).get("id"))
|
||||
source_result_id = ((cached_response.get("subdivision_result") or {}).get("id"))
|
||||
task_id = str(uuid4())
|
||||
return RemoteSensingRun.objects.create(
|
||||
soil_location=location,
|
||||
block_subdivision=None,
|
||||
block_code=block_code or "",
|
||||
chunk_size_sqm=int(cached_response.get("chunk_size_sqm") or _resolve_chunk_size_for_location(location, block_code)),
|
||||
temporal_start=temporal_start,
|
||||
temporal_end=temporal_end,
|
||||
status=RemoteSensingRun.STATUS_SUCCESS,
|
||||
started_at=timezone.now(),
|
||||
finished_at=timezone.now(),
|
||||
metadata={
|
||||
"requested_via": "api",
|
||||
"farm_uuid": farm_uuid,
|
||||
"task_id": task_id,
|
||||
"stage": "completed",
|
||||
"status_label": "completed",
|
||||
"selected_features": list(
|
||||
((cached_response.get("subdivision_result") or {}).get("selected_features"))
|
||||
or ((cached_response.get("run") or {}).get("selected_features"))
|
||||
or DEFAULT_CLUSTER_FEATURES
|
||||
),
|
||||
"scope": "all_blocks",
|
||||
"cache_hit": True,
|
||||
"source_run_id": source_run_id,
|
||||
"source_result_id": source_result_id,
|
||||
"timestamps": {
|
||||
"queued_at": timezone.now().isoformat(),
|
||||
"completed_at": timezone.now().isoformat(),
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
def _resolve_status_source_run(run: RemoteSensingRun) -> RemoteSensingRun:
|
||||
source_run_id = dict(run.metadata or {}).get("source_run_id")
|
||||
if not source_run_id:
|
||||
return run
|
||||
return RemoteSensingRun.objects.filter(pk=source_run_id).select_related("soil_location").first() or run
|
||||
|
||||
|
||||
def _resolve_status_subdivision_result(
|
||||
run: RemoteSensingRun,
|
||||
*,
|
||||
source_run: RemoteSensingRun,
|
||||
) -> RemoteSensingSubdivisionResult | None:
|
||||
source_result_id = dict(run.metadata or {}).get("source_result_id")
|
||||
if source_result_id:
|
||||
return (
|
||||
RemoteSensingSubdivisionResult.objects.filter(pk=source_result_id)
|
||||
.prefetch_related("assignments__cell", "cluster_blocks")
|
||||
.first()
|
||||
)
|
||||
return getattr(source_run, "subdivision_result", None)
|
||||
|
||||
|
||||
def _get_location_by_lat_lon(lat, lon, *, prefetch: bool = False):
|
||||
lat_rounded = round(lat, 6)
|
||||
lon_rounded = round(lon, 6)
|
||||
@@ -1428,6 +1461,210 @@ def _resolve_chunk_size_for_location(location: SoilLocation, block_code: str) ->
|
||||
return 900
|
||||
|
||||
|
||||
def _build_cached_remote_sensing_response(
|
||||
*,
|
||||
location: SoilLocation,
|
||||
farm_uuid: str,
|
||||
block_code: str,
|
||||
start_date,
|
||||
end_date,
|
||||
page: int,
|
||||
page_size: int,
|
||||
) -> dict[str, Any] | None:
|
||||
run = _get_latest_remote_sensing_run(
|
||||
location=location,
|
||||
farm_uuid=farm_uuid,
|
||||
block_code=block_code,
|
||||
start_date=start_date,
|
||||
end_date=end_date,
|
||||
)
|
||||
subdivision_result = _get_remote_sensing_subdivision_result(
|
||||
location=location,
|
||||
farm_uuid=farm_uuid,
|
||||
block_code=block_code,
|
||||
start_date=start_date,
|
||||
end_date=end_date,
|
||||
)
|
||||
observations = _get_remote_sensing_observations(
|
||||
location=location,
|
||||
block_code=block_code,
|
||||
start_date=start_date,
|
||||
end_date=end_date,
|
||||
run=run if run is not None else getattr(subdivision_result, "run", None),
|
||||
)
|
||||
if run is None and subdivision_result is None:
|
||||
observations = observations.none()
|
||||
|
||||
if not observations.exists():
|
||||
fallback_cached_response = _build_fallback_cached_remote_sensing_response(
|
||||
location=location,
|
||||
farm_uuid=farm_uuid,
|
||||
block_code=block_code,
|
||||
page=page,
|
||||
page_size=page_size,
|
||||
)
|
||||
if fallback_cached_response is not None:
|
||||
return fallback_cached_response
|
||||
if run is None:
|
||||
return None
|
||||
processing = run.status in {
|
||||
RemoteSensingRun.STATUS_PENDING,
|
||||
RemoteSensingRun.STATUS_RUNNING,
|
||||
}
|
||||
source = "processing" if processing else "database"
|
||||
status_label = "processing" if processing else "not_found"
|
||||
payload = {
|
||||
"status": status_label,
|
||||
"source": source,
|
||||
"location": SoilLocationResponseSerializer(location).data,
|
||||
"block_code": block_code or "",
|
||||
"chunk_size_sqm": getattr(run, "chunk_size_sqm", None),
|
||||
"temporal_extent": {
|
||||
"start_date": start_date.isoformat(),
|
||||
"end_date": end_date.isoformat(),
|
||||
},
|
||||
"summary": _empty_remote_sensing_summary(),
|
||||
"cells": [],
|
||||
"run": RemoteSensingRunSerializer(run).data,
|
||||
"subdivision_result": None,
|
||||
"metadata": {
|
||||
"farm_uuid": farm_uuid,
|
||||
"cache_hit": True,
|
||||
},
|
||||
}
|
||||
return payload
|
||||
|
||||
paginated_observations = _paginate_observations(
|
||||
observations,
|
||||
page=page,
|
||||
page_size=page_size,
|
||||
)
|
||||
paginated_assignments = []
|
||||
pagination = {"cells": paginated_observations["pagination"]}
|
||||
if subdivision_result is not None:
|
||||
paginated = _paginate_assignments(
|
||||
subdivision_result,
|
||||
page=page,
|
||||
page_size=page_size,
|
||||
)
|
||||
paginated_assignments = paginated["items"]
|
||||
pagination["assignments"] = paginated["pagination"]
|
||||
|
||||
subdivision_data = None
|
||||
if subdivision_result is not None:
|
||||
subdivision_data = RemoteSensingSubdivisionResultSerializer(
|
||||
subdivision_result,
|
||||
context={"paginated_assignments": paginated_assignments},
|
||||
).data
|
||||
|
||||
payload = {
|
||||
"status": "success",
|
||||
"source": "database",
|
||||
"location": SoilLocationResponseSerializer(location).data,
|
||||
"block_code": block_code or "",
|
||||
"chunk_size_sqm": observations.first().cell.chunk_size_sqm,
|
||||
"temporal_extent": {
|
||||
"start_date": start_date.isoformat(),
|
||||
"end_date": end_date.isoformat(),
|
||||
},
|
||||
"summary": _build_remote_sensing_summary(observations),
|
||||
"cells": RemoteSensingCellObservationSerializer(
|
||||
paginated_observations["items"],
|
||||
many=True,
|
||||
).data,
|
||||
"run": RemoteSensingRunSerializer(run).data if run else None,
|
||||
"subdivision_result": subdivision_data,
|
||||
"pagination": pagination,
|
||||
"metadata": {
|
||||
"farm_uuid": farm_uuid,
|
||||
"cache_hit": True,
|
||||
},
|
||||
}
|
||||
return payload
|
||||
|
||||
|
||||
def _build_fallback_cached_remote_sensing_response(
|
||||
*,
|
||||
location: SoilLocation,
|
||||
farm_uuid: str,
|
||||
block_code: str,
|
||||
page: int,
|
||||
page_size: int,
|
||||
) -> dict[str, Any] | None:
|
||||
fallback_run = _get_latest_completed_remote_sensing_run(
|
||||
location=location,
|
||||
farm_uuid=farm_uuid,
|
||||
block_code=block_code,
|
||||
)
|
||||
if fallback_run is None:
|
||||
return None
|
||||
|
||||
fallback_observations = _get_remote_sensing_observations(
|
||||
location=location,
|
||||
block_code=block_code,
|
||||
start_date=fallback_run.temporal_start,
|
||||
end_date=fallback_run.temporal_end,
|
||||
run=fallback_run,
|
||||
)
|
||||
if not fallback_observations.exists():
|
||||
return None
|
||||
|
||||
fallback_result = _get_remote_sensing_subdivision_result(
|
||||
location=location,
|
||||
farm_uuid=farm_uuid,
|
||||
block_code=block_code,
|
||||
start_date=fallback_run.temporal_start,
|
||||
end_date=fallback_run.temporal_end,
|
||||
)
|
||||
paginated_observations = _paginate_observations(
|
||||
fallback_observations,
|
||||
page=page,
|
||||
page_size=page_size,
|
||||
)
|
||||
paginated_assignments = []
|
||||
pagination = {"cells": paginated_observations["pagination"]}
|
||||
if fallback_result is not None:
|
||||
paginated = _paginate_assignments(
|
||||
fallback_result,
|
||||
page=page,
|
||||
page_size=page_size,
|
||||
)
|
||||
paginated_assignments = paginated["items"]
|
||||
pagination["assignments"] = paginated["pagination"]
|
||||
|
||||
subdivision_data = None
|
||||
if fallback_result is not None:
|
||||
subdivision_data = RemoteSensingSubdivisionResultSerializer(
|
||||
fallback_result,
|
||||
context={"paginated_assignments": paginated_assignments},
|
||||
).data
|
||||
|
||||
return {
|
||||
"status": "success",
|
||||
"source": "database",
|
||||
"location": SoilLocationResponseSerializer(location).data,
|
||||
"block_code": block_code or "",
|
||||
"chunk_size_sqm": fallback_run.chunk_size_sqm,
|
||||
"temporal_extent": {
|
||||
"start_date": fallback_run.temporal_start.isoformat() if fallback_run.temporal_start else None,
|
||||
"end_date": fallback_run.temporal_end.isoformat() if fallback_run.temporal_end else None,
|
||||
},
|
||||
"summary": _build_remote_sensing_summary(fallback_observations),
|
||||
"cells": RemoteSensingCellObservationSerializer(
|
||||
paginated_observations["items"],
|
||||
many=True,
|
||||
).data,
|
||||
"run": RemoteSensingRunSerializer(fallback_run).data,
|
||||
"subdivision_result": subdivision_data,
|
||||
"pagination": pagination,
|
||||
"metadata": {
|
||||
"farm_uuid": farm_uuid,
|
||||
"cache_hit": True,
|
||||
"cache_match": "latest_completed_for_farm",
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def _resolve_live_remote_sensing_window(payload: dict[str, Any]):
|
||||
temporal_start = payload.get("temporal_start")
|
||||
temporal_end = payload.get("temporal_end")
|
||||
@@ -1484,7 +1721,66 @@ def _build_virtual_cluster_block_cell(
|
||||
)
|
||||
|
||||
|
||||
def _get_remote_sensing_observations(*, location, block_code: str, start_date, end_date):
|
||||
def _build_cached_cluster_block_live_payload(
|
||||
*,
|
||||
cluster_block: RemoteSensingClusterBlock,
|
||||
temporal_start,
|
||||
temporal_end,
|
||||
) -> dict[str, Any] | None:
|
||||
result = cluster_block.result
|
||||
if result.temporal_start != temporal_start or result.temporal_end != temporal_end:
|
||||
return None
|
||||
|
||||
observations = (
|
||||
AnalysisGridObservation.objects.select_related("cell")
|
||||
.filter(
|
||||
cell__soil_location=cluster_block.soil_location,
|
||||
cell__cell_code__in=list(cluster_block.cell_codes or []),
|
||||
temporal_start=temporal_start,
|
||||
temporal_end=temporal_end,
|
||||
)
|
||||
.order_by("cell__cell_code")
|
||||
)
|
||||
if not observations.exists():
|
||||
return None
|
||||
|
||||
metrics = observations.aggregate(
|
||||
ndvi=Avg("ndvi"),
|
||||
ndwi=Avg("ndwi"),
|
||||
soil_vv=Avg("soil_vv"),
|
||||
soil_vv_db=Avg("soil_vv_db"),
|
||||
)
|
||||
return {
|
||||
"status": "success",
|
||||
"source": "database",
|
||||
"cluster_block": RemoteSensingClusterBlockSerializer(cluster_block).data,
|
||||
"temporal_extent": {
|
||||
"start_date": temporal_start.isoformat(),
|
||||
"end_date": temporal_end.isoformat(),
|
||||
},
|
||||
"selected_features": list(DEFAULT_CLUSTER_FEATURES),
|
||||
"summary": {
|
||||
"cell_count": int(cluster_block.cell_count or observations.count()),
|
||||
"ndvi_mean": _round_or_none(metrics.get("ndvi")),
|
||||
"ndwi_mean": _round_or_none(metrics.get("ndwi")),
|
||||
"soil_vv_db_mean": _round_or_none(metrics.get("soil_vv_db")),
|
||||
},
|
||||
"metrics": {
|
||||
"ndvi": _round_or_none(metrics.get("ndvi")),
|
||||
"ndwi": _round_or_none(metrics.get("ndwi")),
|
||||
"soil_vv": _round_or_none(metrics.get("soil_vv")),
|
||||
"soil_vv_db": _round_or_none(metrics.get("soil_vv_db")),
|
||||
},
|
||||
"metadata": {
|
||||
"requested_cluster_uuid": str(cluster_block.uuid),
|
||||
"cache_hit": True,
|
||||
"source_run_id": result.run_id,
|
||||
"source_result_id": result.id,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def _get_remote_sensing_observations(*, location, block_code: str, start_date, end_date, run=None):
|
||||
queryset = (
|
||||
AnalysisGridObservation.objects.select_related("cell", "run")
|
||||
.filter(
|
||||
@@ -1494,24 +1790,56 @@ def _get_remote_sensing_observations(*, location, block_code: str, start_date, e
|
||||
)
|
||||
.order_by("cell__cell_code")
|
||||
)
|
||||
return queryset.filter(cell__block_code=block_code or "")
|
||||
queryset = queryset.filter(cell__block_code=block_code or "")
|
||||
if run is not None:
|
||||
queryset = queryset.filter(run=run)
|
||||
return queryset
|
||||
|
||||
|
||||
def _get_latest_remote_sensing_run(*, location, block_code: str, start_date, end_date):
|
||||
return (
|
||||
def _select_farm_scoped_run(runs, farm_uuid: str):
|
||||
legacy_candidate = None
|
||||
for run in runs:
|
||||
metadata = dict(run.metadata or {})
|
||||
scoped_farm_uuid = metadata.get("farm_uuid")
|
||||
if scoped_farm_uuid == farm_uuid:
|
||||
return run
|
||||
if scoped_farm_uuid in (None, "") and legacy_candidate is None:
|
||||
legacy_candidate = run
|
||||
return legacy_candidate
|
||||
|
||||
|
||||
def _get_latest_remote_sensing_run(*, location, farm_uuid: str, block_code: str, start_date, end_date):
|
||||
runs = list(
|
||||
RemoteSensingRun.objects.filter(
|
||||
soil_location=location,
|
||||
block_code=block_code or "",
|
||||
temporal_start=start_date,
|
||||
temporal_end=end_date,
|
||||
)
|
||||
.order_by("-created_at", "-id")
|
||||
.first()
|
||||
).order_by("-created_at", "-id")
|
||||
)
|
||||
return _select_farm_scoped_run(runs, farm_uuid)
|
||||
|
||||
|
||||
def _get_remote_sensing_subdivision_result(*, location, block_code: str, start_date, end_date):
|
||||
return (
|
||||
def _get_latest_completed_remote_sensing_run(*, location, farm_uuid: str, block_code: str):
|
||||
runs = list(
|
||||
RemoteSensingRun.objects.filter(
|
||||
soil_location=location,
|
||||
block_code=block_code or "",
|
||||
status=RemoteSensingRun.STATUS_SUCCESS,
|
||||
).order_by("-created_at", "-id")
|
||||
)
|
||||
return _select_farm_scoped_run(runs, farm_uuid)
|
||||
|
||||
|
||||
def _get_remote_sensing_subdivision_result(
|
||||
*,
|
||||
location,
|
||||
farm_uuid: str,
|
||||
block_code: str,
|
||||
start_date,
|
||||
end_date,
|
||||
):
|
||||
results = list(
|
||||
RemoteSensingSubdivisionResult.objects.filter(
|
||||
soil_location=location,
|
||||
block_code=block_code or "",
|
||||
@@ -1521,8 +1849,16 @@ def _get_remote_sensing_subdivision_result(*, location, block_code: str, start_d
|
||||
.select_related("run")
|
||||
.prefetch_related("assignments__cell", "cluster_blocks")
|
||||
.order_by("-created_at", "-id")
|
||||
.first()
|
||||
)
|
||||
legacy_candidate = None
|
||||
for result in results:
|
||||
run = getattr(result, "run", None)
|
||||
scoped_farm_uuid = dict(getattr(run, "metadata", {}) or {}).get("farm_uuid")
|
||||
if scoped_farm_uuid == farm_uuid:
|
||||
return result
|
||||
if scoped_farm_uuid in (None, "") and legacy_candidate is None:
|
||||
legacy_candidate = result
|
||||
return legacy_candidate
|
||||
|
||||
|
||||
def _build_remote_sensing_summary(observations):
|
||||
|
||||
Reference in New Issue
Block a user