This commit is contained in:
2026-05-13 22:28:56 +03:30
parent 46fe62fa04
commit 45fee1dfd3
26 changed files with 2329 additions and 878 deletions
+436 -100
View File
@@ -1,6 +1,7 @@
from datetime import timedelta
from types import SimpleNamespace
from typing import Any
from uuid import uuid4
from django.apps import apps
from django.core.paginator import EmptyPage, Paginator
@@ -416,9 +417,16 @@ class RemoteSensingAnalysisView(APIView):
@extend_schema(
tags=["Location Data"],
summary="اجرای async تحلیل سنجش‌ازدور و subdivision داده‌محور",
description="برای location موجود، pipeline کامل grid + openEO + observation persistence + KMeans clustering در Celery صف می‌شود و sync اجرا نمی‌شود.",
description=(
"اگر خروجی cache شده برای مزرعه موجود باشد، همان داده مستقیم برگردانده می‌شود. "
"در غیر این صورت pipeline کامل grid + openEO + observation persistence + KMeans clustering در Celery صف می‌شود."
),
request=RemoteSensingFarmRequestSerializer,
responses={
200: build_response(
RemoteSensingEnvelopeSerializer,
"خروجی cache شده remote sensing بدون enqueue کردن Celery بازگردانده شد.",
),
202: build_response(
RemoteSensingQueuedEnvelopeSerializer,
"درخواست تحلیل سنجش‌ازدور در صف قرار گرفت.",
@@ -462,6 +470,28 @@ class RemoteSensingAnalysisView(APIView):
temporal_end = timezone.localdate() - timedelta(days=1)
temporal_start = temporal_end - timedelta(days=30)
if not payload.get("force_refresh", False):
cached_response = _build_cached_remote_sensing_response(
location=location,
farm_uuid=str(payload["farm_uuid"]),
block_code="",
start_date=temporal_start,
end_date=temporal_end,
page=payload.get("page", 1),
page_size=payload.get("page_size", 100),
)
if cached_response is not None:
processing = cached_response.get("status") == "processing"
status_code = status.HTTP_202_ACCEPTED if processing else status.HTTP_200_OK
response_payload = cached_response
return Response(
{
"code": 202 if status_code == status.HTTP_202_ACCEPTED else 200,
"msg": "success" if processing else "داده cache شده بازگردانده شد.",
"data": response_payload,
},
status=status_code,
)
run = RemoteSensingRun.objects.create(
soil_location=location,
block_code="",
@@ -471,6 +501,7 @@ class RemoteSensingAnalysisView(APIView):
status=RemoteSensingRun.STATUS_PENDING,
metadata={
"requested_via": "api",
"stage": "queued",
"status_label": "pending",
"requested_cluster_count": None,
"selected_features": list(DEFAULT_CLUSTER_FEATURES),
@@ -585,92 +616,15 @@ class RemoteSensingAnalysisView(APIView):
temporal_end = timezone.localdate() - timedelta(days=1)
temporal_start = temporal_end - timedelta(days=30)
block_code = ""
observations = _get_remote_sensing_observations(
response_payload = _build_cached_remote_sensing_response(
location=location,
block_code=block_code,
farm_uuid=str(payload["farm_uuid"]),
block_code="",
start_date=temporal_start,
end_date=temporal_end,
)
run = _get_latest_remote_sensing_run(
location=location,
block_code=block_code,
start_date=temporal_start,
end_date=temporal_end,
)
subdivision_result = _get_remote_sensing_subdivision_result(
location=location,
block_code=block_code,
start_date=temporal_start,
end_date=temporal_end,
)
if not observations.exists():
processing = run is not None and run.status in {
RemoteSensingRun.STATUS_PENDING,
RemoteSensingRun.STATUS_RUNNING,
}
response_payload = {
"status": "processing" if processing else "not_found",
"source": "processing" if processing else "database",
"location": SoilLocationResponseSerializer(location).data,
"block_code": "",
"chunk_size_sqm": getattr(run, "chunk_size_sqm", None),
"temporal_extent": {
"start_date": temporal_start.isoformat(),
"end_date": temporal_end.isoformat(),
},
"summary": _empty_remote_sensing_summary(),
"cells": [],
"run": RemoteSensingRunSerializer(run).data if run else None,
"subdivision_result": None,
}
return Response(
{"code": 200, "msg": "success", "data": response_payload},
status=status.HTTP_200_OK,
)
paginated_observations = _paginate_observations(
observations,
page=payload["page"],
page_size=payload["page_size"],
)
paginated_assignments = []
pagination = {"cells": paginated_observations["pagination"]}
if subdivision_result is not None:
paginated = _paginate_assignments(
subdivision_result,
page=payload["page"],
page_size=payload["page_size"],
)
paginated_assignments = paginated["items"]
pagination["assignments"] = paginated["pagination"]
cells_data = RemoteSensingCellObservationSerializer(paginated_observations["items"], many=True).data
subdivision_data = None
if subdivision_result is not None:
subdivision_data = RemoteSensingSubdivisionResultSerializer(
subdivision_result,
context={"paginated_assignments": paginated_assignments},
).data
response_payload = {
"status": "success",
"source": "database",
"location": SoilLocationResponseSerializer(location).data,
"block_code": "",
"chunk_size_sqm": observations.first().cell.chunk_size_sqm,
"temporal_extent": {
"start_date": temporal_start.isoformat(),
"end_date": temporal_end.isoformat(),
},
"summary": _build_remote_sensing_summary(observations),
"cells": cells_data,
"run": RemoteSensingRunSerializer(run).data if run else None,
"subdivision_result": subdivision_data,
}
if pagination is not None:
response_payload["pagination"] = pagination
return Response(
{"code": 200, "msg": "success", "data": response_payload},
status=status.HTTP_200_OK,
@@ -805,6 +759,16 @@ class RemoteSensingClusterBlockLiveView(APIView):
)
temporal_start, temporal_end = _resolve_live_remote_sensing_window(serializer.validated_data)
cached_cluster_payload = _build_cached_cluster_block_live_payload(
cluster_block=cluster_block,
temporal_start=temporal_start,
temporal_end=temporal_end,
)
if cached_cluster_payload is not None:
return Response(
{"code": 200, "msg": "success", "data": cached_cluster_payload},
status=status.HTTP_200_OK,
)
virtual_cell = _build_virtual_cluster_block_cell(cluster_block=cluster_block, geometry=geometry)
try:
remote_payload = compute_remote_sensing_metrics(
@@ -1055,23 +1019,25 @@ def _build_remote_sensing_run_status_payload(run: RemoteSensingRun, *, page: int
if run.status == RemoteSensingRun.STATUS_FAILURE:
return status_payload
source_run = _resolve_status_source_run(run)
location = _get_location_by_lat_lon(run.soil_location.latitude, run.soil_location.longitude, prefetch=True)
observations = _get_remote_sensing_observations(
location=run.soil_location,
block_code=run.block_code,
start_date=run.temporal_start,
end_date=run.temporal_end,
location=source_run.soil_location,
block_code=source_run.block_code,
start_date=source_run.temporal_start,
end_date=source_run.temporal_end,
run=source_run,
)
subdivision_result = getattr(run, "subdivision_result", None)
subdivision_result = _resolve_status_subdivision_result(run, source_run=source_run)
response_payload = {
**status_payload,
"location": SoilLocationResponseSerializer(location).data,
"block_code": run.block_code,
"chunk_size_sqm": run.chunk_size_sqm,
"block_code": source_run.block_code,
"chunk_size_sqm": source_run.chunk_size_sqm,
"temporal_extent": {
"start_date": run.temporal_start.isoformat() if run.temporal_start else None,
"end_date": run.temporal_end.isoformat() if run.temporal_end else None,
"start_date": source_run.temporal_start.isoformat() if source_run.temporal_start else None,
"end_date": source_run.temporal_end.isoformat() if source_run.temporal_end else None,
},
"summary": _empty_remote_sensing_summary(),
"cells": [],
@@ -1287,6 +1253,73 @@ def _build_remote_sensing_celery_payload(task_id: str) -> dict | None:
return payload
def _create_cached_status_run(
*,
location: SoilLocation,
farm_uuid: str,
block_code: str,
temporal_start,
temporal_end,
cached_response: dict[str, Any],
) -> RemoteSensingRun:
source_run_id = ((cached_response.get("run") or {}).get("id"))
source_result_id = ((cached_response.get("subdivision_result") or {}).get("id"))
task_id = str(uuid4())
return RemoteSensingRun.objects.create(
soil_location=location,
block_subdivision=None,
block_code=block_code or "",
chunk_size_sqm=int(cached_response.get("chunk_size_sqm") or _resolve_chunk_size_for_location(location, block_code)),
temporal_start=temporal_start,
temporal_end=temporal_end,
status=RemoteSensingRun.STATUS_SUCCESS,
started_at=timezone.now(),
finished_at=timezone.now(),
metadata={
"requested_via": "api",
"farm_uuid": farm_uuid,
"task_id": task_id,
"stage": "completed",
"status_label": "completed",
"selected_features": list(
((cached_response.get("subdivision_result") or {}).get("selected_features"))
or ((cached_response.get("run") or {}).get("selected_features"))
or DEFAULT_CLUSTER_FEATURES
),
"scope": "all_blocks",
"cache_hit": True,
"source_run_id": source_run_id,
"source_result_id": source_result_id,
"timestamps": {
"queued_at": timezone.now().isoformat(),
"completed_at": timezone.now().isoformat(),
},
},
)
def _resolve_status_source_run(run: RemoteSensingRun) -> RemoteSensingRun:
source_run_id = dict(run.metadata or {}).get("source_run_id")
if not source_run_id:
return run
return RemoteSensingRun.objects.filter(pk=source_run_id).select_related("soil_location").first() or run
def _resolve_status_subdivision_result(
run: RemoteSensingRun,
*,
source_run: RemoteSensingRun,
) -> RemoteSensingSubdivisionResult | None:
source_result_id = dict(run.metadata or {}).get("source_result_id")
if source_result_id:
return (
RemoteSensingSubdivisionResult.objects.filter(pk=source_result_id)
.prefetch_related("assignments__cell", "cluster_blocks")
.first()
)
return getattr(source_run, "subdivision_result", None)
def _get_location_by_lat_lon(lat, lon, *, prefetch: bool = False):
lat_rounded = round(lat, 6)
lon_rounded = round(lon, 6)
@@ -1428,6 +1461,210 @@ def _resolve_chunk_size_for_location(location: SoilLocation, block_code: str) ->
return 900
def _build_cached_remote_sensing_response(
*,
location: SoilLocation,
farm_uuid: str,
block_code: str,
start_date,
end_date,
page: int,
page_size: int,
) -> dict[str, Any] | None:
run = _get_latest_remote_sensing_run(
location=location,
farm_uuid=farm_uuid,
block_code=block_code,
start_date=start_date,
end_date=end_date,
)
subdivision_result = _get_remote_sensing_subdivision_result(
location=location,
farm_uuid=farm_uuid,
block_code=block_code,
start_date=start_date,
end_date=end_date,
)
observations = _get_remote_sensing_observations(
location=location,
block_code=block_code,
start_date=start_date,
end_date=end_date,
run=run if run is not None else getattr(subdivision_result, "run", None),
)
if run is None and subdivision_result is None:
observations = observations.none()
if not observations.exists():
fallback_cached_response = _build_fallback_cached_remote_sensing_response(
location=location,
farm_uuid=farm_uuid,
block_code=block_code,
page=page,
page_size=page_size,
)
if fallback_cached_response is not None:
return fallback_cached_response
if run is None:
return None
processing = run.status in {
RemoteSensingRun.STATUS_PENDING,
RemoteSensingRun.STATUS_RUNNING,
}
source = "processing" if processing else "database"
status_label = "processing" if processing else "not_found"
payload = {
"status": status_label,
"source": source,
"location": SoilLocationResponseSerializer(location).data,
"block_code": block_code or "",
"chunk_size_sqm": getattr(run, "chunk_size_sqm", None),
"temporal_extent": {
"start_date": start_date.isoformat(),
"end_date": end_date.isoformat(),
},
"summary": _empty_remote_sensing_summary(),
"cells": [],
"run": RemoteSensingRunSerializer(run).data,
"subdivision_result": None,
"metadata": {
"farm_uuid": farm_uuid,
"cache_hit": True,
},
}
return payload
paginated_observations = _paginate_observations(
observations,
page=page,
page_size=page_size,
)
paginated_assignments = []
pagination = {"cells": paginated_observations["pagination"]}
if subdivision_result is not None:
paginated = _paginate_assignments(
subdivision_result,
page=page,
page_size=page_size,
)
paginated_assignments = paginated["items"]
pagination["assignments"] = paginated["pagination"]
subdivision_data = None
if subdivision_result is not None:
subdivision_data = RemoteSensingSubdivisionResultSerializer(
subdivision_result,
context={"paginated_assignments": paginated_assignments},
).data
payload = {
"status": "success",
"source": "database",
"location": SoilLocationResponseSerializer(location).data,
"block_code": block_code or "",
"chunk_size_sqm": observations.first().cell.chunk_size_sqm,
"temporal_extent": {
"start_date": start_date.isoformat(),
"end_date": end_date.isoformat(),
},
"summary": _build_remote_sensing_summary(observations),
"cells": RemoteSensingCellObservationSerializer(
paginated_observations["items"],
many=True,
).data,
"run": RemoteSensingRunSerializer(run).data if run else None,
"subdivision_result": subdivision_data,
"pagination": pagination,
"metadata": {
"farm_uuid": farm_uuid,
"cache_hit": True,
},
}
return payload
def _build_fallback_cached_remote_sensing_response(
*,
location: SoilLocation,
farm_uuid: str,
block_code: str,
page: int,
page_size: int,
) -> dict[str, Any] | None:
fallback_run = _get_latest_completed_remote_sensing_run(
location=location,
farm_uuid=farm_uuid,
block_code=block_code,
)
if fallback_run is None:
return None
fallback_observations = _get_remote_sensing_observations(
location=location,
block_code=block_code,
start_date=fallback_run.temporal_start,
end_date=fallback_run.temporal_end,
run=fallback_run,
)
if not fallback_observations.exists():
return None
fallback_result = _get_remote_sensing_subdivision_result(
location=location,
farm_uuid=farm_uuid,
block_code=block_code,
start_date=fallback_run.temporal_start,
end_date=fallback_run.temporal_end,
)
paginated_observations = _paginate_observations(
fallback_observations,
page=page,
page_size=page_size,
)
paginated_assignments = []
pagination = {"cells": paginated_observations["pagination"]}
if fallback_result is not None:
paginated = _paginate_assignments(
fallback_result,
page=page,
page_size=page_size,
)
paginated_assignments = paginated["items"]
pagination["assignments"] = paginated["pagination"]
subdivision_data = None
if fallback_result is not None:
subdivision_data = RemoteSensingSubdivisionResultSerializer(
fallback_result,
context={"paginated_assignments": paginated_assignments},
).data
return {
"status": "success",
"source": "database",
"location": SoilLocationResponseSerializer(location).data,
"block_code": block_code or "",
"chunk_size_sqm": fallback_run.chunk_size_sqm,
"temporal_extent": {
"start_date": fallback_run.temporal_start.isoformat() if fallback_run.temporal_start else None,
"end_date": fallback_run.temporal_end.isoformat() if fallback_run.temporal_end else None,
},
"summary": _build_remote_sensing_summary(fallback_observations),
"cells": RemoteSensingCellObservationSerializer(
paginated_observations["items"],
many=True,
).data,
"run": RemoteSensingRunSerializer(fallback_run).data,
"subdivision_result": subdivision_data,
"pagination": pagination,
"metadata": {
"farm_uuid": farm_uuid,
"cache_hit": True,
"cache_match": "latest_completed_for_farm",
},
}
def _resolve_live_remote_sensing_window(payload: dict[str, Any]):
temporal_start = payload.get("temporal_start")
temporal_end = payload.get("temporal_end")
@@ -1484,7 +1721,66 @@ def _build_virtual_cluster_block_cell(
)
def _get_remote_sensing_observations(*, location, block_code: str, start_date, end_date):
def _build_cached_cluster_block_live_payload(
*,
cluster_block: RemoteSensingClusterBlock,
temporal_start,
temporal_end,
) -> dict[str, Any] | None:
result = cluster_block.result
if result.temporal_start != temporal_start or result.temporal_end != temporal_end:
return None
observations = (
AnalysisGridObservation.objects.select_related("cell")
.filter(
cell__soil_location=cluster_block.soil_location,
cell__cell_code__in=list(cluster_block.cell_codes or []),
temporal_start=temporal_start,
temporal_end=temporal_end,
)
.order_by("cell__cell_code")
)
if not observations.exists():
return None
metrics = observations.aggregate(
ndvi=Avg("ndvi"),
ndwi=Avg("ndwi"),
soil_vv=Avg("soil_vv"),
soil_vv_db=Avg("soil_vv_db"),
)
return {
"status": "success",
"source": "database",
"cluster_block": RemoteSensingClusterBlockSerializer(cluster_block).data,
"temporal_extent": {
"start_date": temporal_start.isoformat(),
"end_date": temporal_end.isoformat(),
},
"selected_features": list(DEFAULT_CLUSTER_FEATURES),
"summary": {
"cell_count": int(cluster_block.cell_count or observations.count()),
"ndvi_mean": _round_or_none(metrics.get("ndvi")),
"ndwi_mean": _round_or_none(metrics.get("ndwi")),
"soil_vv_db_mean": _round_or_none(metrics.get("soil_vv_db")),
},
"metrics": {
"ndvi": _round_or_none(metrics.get("ndvi")),
"ndwi": _round_or_none(metrics.get("ndwi")),
"soil_vv": _round_or_none(metrics.get("soil_vv")),
"soil_vv_db": _round_or_none(metrics.get("soil_vv_db")),
},
"metadata": {
"requested_cluster_uuid": str(cluster_block.uuid),
"cache_hit": True,
"source_run_id": result.run_id,
"source_result_id": result.id,
},
}
def _get_remote_sensing_observations(*, location, block_code: str, start_date, end_date, run=None):
queryset = (
AnalysisGridObservation.objects.select_related("cell", "run")
.filter(
@@ -1494,24 +1790,56 @@ def _get_remote_sensing_observations(*, location, block_code: str, start_date, e
)
.order_by("cell__cell_code")
)
return queryset.filter(cell__block_code=block_code or "")
queryset = queryset.filter(cell__block_code=block_code or "")
if run is not None:
queryset = queryset.filter(run=run)
return queryset
def _get_latest_remote_sensing_run(*, location, block_code: str, start_date, end_date):
return (
def _select_farm_scoped_run(runs, farm_uuid: str):
legacy_candidate = None
for run in runs:
metadata = dict(run.metadata or {})
scoped_farm_uuid = metadata.get("farm_uuid")
if scoped_farm_uuid == farm_uuid:
return run
if scoped_farm_uuid in (None, "") and legacy_candidate is None:
legacy_candidate = run
return legacy_candidate
def _get_latest_remote_sensing_run(*, location, farm_uuid: str, block_code: str, start_date, end_date):
runs = list(
RemoteSensingRun.objects.filter(
soil_location=location,
block_code=block_code or "",
temporal_start=start_date,
temporal_end=end_date,
)
.order_by("-created_at", "-id")
.first()
).order_by("-created_at", "-id")
)
return _select_farm_scoped_run(runs, farm_uuid)
def _get_remote_sensing_subdivision_result(*, location, block_code: str, start_date, end_date):
return (
def _get_latest_completed_remote_sensing_run(*, location, farm_uuid: str, block_code: str):
runs = list(
RemoteSensingRun.objects.filter(
soil_location=location,
block_code=block_code or "",
status=RemoteSensingRun.STATUS_SUCCESS,
).order_by("-created_at", "-id")
)
return _select_farm_scoped_run(runs, farm_uuid)
def _get_remote_sensing_subdivision_result(
*,
location,
farm_uuid: str,
block_code: str,
start_date,
end_date,
):
results = list(
RemoteSensingSubdivisionResult.objects.filter(
soil_location=location,
block_code=block_code or "",
@@ -1521,8 +1849,16 @@ def _get_remote_sensing_subdivision_result(*, location, block_code: str, start_d
.select_related("run")
.prefetch_related("assignments__cell", "cluster_blocks")
.order_by("-created_at", "-id")
.first()
)
legacy_candidate = None
for result in results:
run = getattr(result, "run", None)
scoped_farm_uuid = dict(getattr(run, "metadata", {}) or {}).get("farm_uuid")
if scoped_farm_uuid == farm_uuid:
return result
if scoped_farm_uuid in (None, "") and legacy_candidate is None:
legacy_candidate = result
return legacy_candidate
def _build_remote_sensing_summary(observations):