Files
Ai/location_data/views.py
T

1551 lines
59 KiB
Python
Raw Normal View History

2026-05-10 02:02:48 +03:30
from datetime import timedelta
2026-05-11 04:38:44 +03:30
from types import SimpleNamespace
2026-05-10 22:49:07 +03:30
from typing import Any
2026-05-10 02:02:48 +03:30
2026-04-25 17:22:41 +03:30
from django.apps import apps
2026-05-09 16:55:06 +03:30
from django.core.paginator import EmptyPage, Paginator
from django.db.models import Avg
from django.db import transaction
2026-05-10 02:02:48 +03:30
from django.utils import timezone
from rest_framework import status
2026-03-19 22:54:29 +03:30
from drf_spectacular.utils import (
OpenApiExample,
2026-05-10 02:02:48 +03:30
OpenApiParameter,
2026-03-19 22:54:29 +03:30
OpenApiResponse,
extend_schema,
inline_serializer,
)
from rest_framework import serializers as drf_serializers
from rest_framework.response import Response
from rest_framework.views import APIView
2026-03-25 01:56:41 +03:30
from config.openapi import (
build_envelope_serializer,
build_response,
)
2026-05-09 16:55:06 +03:30
from .models import (
AnalysisGridCell,
AnalysisGridObservation,
BlockSubdivision,
2026-05-11 04:38:44 +03:30
RemoteSensingClusterBlock,
2026-05-09 16:55:06 +03:30
RemoteSensingRun,
RemoteSensingSubdivisionResult,
2026-05-11 04:38:44 +03:30
RemoteSensingSubdivisionOption,
2026-05-09 16:55:06 +03:30
SoilLocation,
)
2026-05-10 02:02:48 +03:30
from farm_data.models import SensorData
from .data_driven_subdivision import DEFAULT_CLUSTER_FEATURES
2026-05-11 04:38:44 +03:30
from .data_driven_subdivision import activate_subdivision_option
from .serializers import (
2026-05-09 16:55:06 +03:30
BlockSubdivisionSerializer,
2026-04-25 17:22:41 +03:30
NdviHealthRequestSerializer,
NdviHealthResponseSerializer,
2026-05-09 16:55:06 +03:30
RemoteSensingCellObservationSerializer,
RemoteSensingResponseSerializer,
2026-05-10 02:02:48 +03:30
RemoteSensingFarmRequestSerializer,
2026-05-09 16:55:06 +03:30
RemoteSensingRunSerializer,
RemoteSensingRunStatusResponseSerializer,
2026-05-11 04:38:44 +03:30
RemoteSensingClusterBlockLiveRequestSerializer,
RemoteSensingClusterBlockLiveResponseSerializer,
RemoteSensingClusterBlockSerializer,
RemoteSensingSubdivisionOptionActivateResponseSerializer,
RemoteSensingSubdivisionOptionActivateSerializer,
RemoteSensingSubdivisionOptionListResponseSerializer,
RemoteSensingSubdivisionOptionSerializer,
2026-05-09 16:55:06 +03:30
RemoteSensingSummarySerializer,
RemoteSensingSubdivisionResultSerializer,
SoilDataRequestSerializer,
SoilLocationResponseSerializer,
)
2026-05-09 16:55:06 +03:30
from .tasks import run_remote_sensing_analysis_task
2026-05-11 04:38:44 +03:30
from .openeo_service import (
OpenEOAuthenticationError,
OpenEOExecutionError,
OpenEOServiceError,
compute_remote_sensing_metrics,
)
2026-05-09 16:55:06 +03:30
MAX_REMOTE_SENSING_PAGE_SIZE = 200
2026-05-10 22:49:07 +03:30
REMOTE_SENSING_RUN_STAGE_ORDER = (
"queued",
"running",
"preparing_analysis_grid",
"analysis_grid_ready",
"analysis_cells_selected",
"using_cached_observations",
"fetching_remote_metrics",
"remote_metrics_fetched",
"observations_persisted",
"clustering_completed",
"completed",
"failed",
"retrying",
)
2026-03-25 01:56:41 +03:30
SoilLocationPayloadSerializer = inline_serializer(
name="SoilLocationPayloadSerializer",
fields={
"source": drf_serializers.CharField(),
"id": drf_serializers.IntegerField(),
"lon": drf_serializers.DecimalField(max_digits=9, decimal_places=6),
"lat": drf_serializers.DecimalField(max_digits=9, decimal_places=6),
2026-05-09 16:55:06 +03:30
"input_block_count": drf_serializers.IntegerField(),
"farm_boundary": drf_serializers.JSONField(),
"block_layout": drf_serializers.JSONField(),
"block_subdivisions": BlockSubdivisionSerializer(many=True),
"satellite_snapshots": drf_serializers.JSONField(),
2026-03-25 01:56:41 +03:30
},
)
SoilDataResponseSerializer = build_envelope_serializer(
"SoilDataResponseSerializer",
SoilLocationPayloadSerializer,
)
SoilErrorResponseSerializer = build_envelope_serializer(
"SoilErrorResponseSerializer",
data_required=False,
allow_null=True,
)
2026-04-25 17:22:41 +03:30
NdviHealthEnvelopeSerializer = build_envelope_serializer(
"NdviHealthEnvelopeSerializer",
NdviHealthResponseSerializer,
)
2026-05-09 16:55:06 +03:30
RemoteSensingEnvelopeSerializer = build_envelope_serializer(
"RemoteSensingEnvelopeSerializer",
RemoteSensingResponseSerializer,
)
RemoteSensingQueuedEnvelopeSerializer = build_envelope_serializer(
"RemoteSensingQueuedEnvelopeSerializer",
inline_serializer(
name="RemoteSensingQueuedPayloadSerializer",
fields={
"status": drf_serializers.CharField(),
"source": drf_serializers.CharField(),
"location": drf_serializers.JSONField(),
"block_code": drf_serializers.CharField(),
"chunk_size_sqm": drf_serializers.IntegerField(allow_null=True),
"temporal_extent": drf_serializers.JSONField(),
"summary": RemoteSensingSummarySerializer(),
"cells": drf_serializers.JSONField(),
"run": drf_serializers.JSONField(allow_null=True),
2026-05-10 02:02:48 +03:30
"task_id": drf_serializers.UUIDField(),
2026-05-09 16:55:06 +03:30
},
),
)
RemoteSensingRunStatusEnvelopeSerializer = build_envelope_serializer(
"RemoteSensingRunStatusEnvelopeSerializer",
RemoteSensingRunStatusResponseSerializer,
)
2026-05-11 04:38:44 +03:30
RemoteSensingClusterBlockLiveEnvelopeSerializer = build_envelope_serializer(
"RemoteSensingClusterBlockLiveEnvelopeSerializer",
RemoteSensingClusterBlockLiveResponseSerializer,
)
RemoteSensingSubdivisionOptionListEnvelopeSerializer = build_envelope_serializer(
"RemoteSensingSubdivisionOptionListEnvelopeSerializer",
RemoteSensingSubdivisionOptionListResponseSerializer,
)
RemoteSensingSubdivisionOptionActivateEnvelopeSerializer = build_envelope_serializer(
"RemoteSensingSubdivisionOptionActivateEnvelopeSerializer",
RemoteSensingSubdivisionOptionActivateResponseSerializer,
)
class SoilDataView(APIView):
"""
2026-05-09 16:55:06 +03:30
ثبت مختصات گوشه‌های مزرعه و بلوک‌های تعریف‌شده توسط کشاورز.
"""
2026-03-19 22:54:29 +03:30
@extend_schema(
2026-05-10 02:02:48 +03:30
tags=["Location Data"],
2026-05-09 16:55:06 +03:30
summary="خواندن ساختار مزرعه و بلوک‌ها (GET)",
description="با ارسال lat و lon، ساختار ذخیره‌شده مزرعه، بلوک‌ها و آخرین خلاصه سنجش‌ازدور هر بلوک بازگردانده می‌شود.",
2026-03-19 22:54:29 +03:30
parameters=[
{
"name": "lat",
"in": "query",
"required": True,
"schema": {"type": "number"},
"description": "عرض جغرافیایی",
},
{
"name": "lon",
"in": "query",
"required": True,
"schema": {"type": "number"},
"description": "طول جغرافیایی",
},
2026-05-09 16:55:06 +03:30
{
"name": "block_code",
"in": "query",
"required": False,
"schema": {"type": "string", "default": "block-1"},
"description": "در GET فقط برای فیلتر کلاینتی است و الگوریتمی اجرا نمی‌کند.",
},
2026-03-19 22:54:29 +03:30
],
responses={
2026-03-25 01:56:41 +03:30
200: build_response(
SoilDataResponseSerializer,
2026-05-09 16:55:06 +03:30
"ساختار بلوک‌های زمین از دیتابیس بازگردانده شد.",
2026-03-25 01:56:41 +03:30
),
2026-05-09 16:55:06 +03:30
404: build_response(
SoilErrorResponseSerializer,
"location موردنظر پیدا نشد.",
2026-03-25 01:56:41 +03:30
),
400: build_response(
SoilErrorResponseSerializer,
"پارامترهای ورودی نامعتبر هستند.",
),
2026-03-19 22:54:29 +03:30
},
)
def get(self, request):
2026-05-09 16:55:06 +03:30
serializer = SoilDataRequestSerializer(data=request.query_params)
if not serializer.is_valid():
return Response(
{"code": 400, "msg": "داده نامعتبر.", "data": serializer.errors},
status=status.HTTP_400_BAD_REQUEST,
)
lat = serializer.validated_data["lat"]
lon = serializer.validated_data["lon"]
location = _get_location_by_lat_lon(lat, lon, prefetch=True)
if location is None:
return Response(
{"code": 404, "msg": "location پیدا نشد.", "data": None},
status=status.HTTP_404_NOT_FOUND,
)
data_serializer = SoilLocationResponseSerializer(location)
return Response(
{"code": 200, "msg": "success", "data": {"source": "database", **data_serializer.data}},
status=status.HTTP_200_OK,
)
2026-03-19 22:54:29 +03:30
@extend_schema(
2026-05-10 02:02:48 +03:30
tags=["Location Data"],
2026-05-09 16:55:06 +03:30
summary="ثبت مزرعه و بلوک‌های کشاورز (POST)",
description="مختصات گوشه‌های مزرعه و boundary هر بلوک کشاورز ذخیره می‌شود. هیچ subdivision سنکرونی اجرا نمی‌شود.",
2026-03-19 22:54:29 +03:30
request=SoilDataRequestSerializer,
responses={
2026-03-25 01:56:41 +03:30
200: build_response(
SoilDataResponseSerializer,
2026-05-09 16:55:06 +03:30
"اطلاعات location ذخیره یا به‌روزرسانی شد.",
2026-03-25 01:56:41 +03:30
),
400: build_response(
SoilErrorResponseSerializer,
"پارامترهای ورودی نامعتبر هستند.",
),
2026-03-19 22:54:29 +03:30
},
examples=[
OpenApiExample(
"نمونه درخواست",
2026-05-09 16:55:06 +03:30
value={
"lat": 35.6892,
"lon": 51.3890,
"farm_boundary": {
"type": "Polygon",
"coordinates": [
[
[51.3890, 35.6890],
[51.3902, 35.6890],
[51.3902, 35.6900],
[51.3890, 35.6900],
[51.3890, 35.6890],
]
],
},
"blocks": [
{
"block_code": "block-1",
"boundary": {
"type": "Polygon",
"coordinates": [
[
[51.3890, 35.6890],
[51.3896, 35.6890],
[51.3896, 35.6900],
[51.3890, 35.6900],
[51.3890, 35.6890],
]
],
},
}
],
},
2026-03-19 22:54:29 +03:30
request_only=True,
),
],
)
def post(self, request):
2026-05-09 16:55:06 +03:30
serializer = SoilDataRequestSerializer(
data=request.data,
context={"require_farm_boundary": True},
)
if not serializer.is_valid():
return Response(
{"code": 400, "msg": "داده نامعتبر.", "data": serializer.errors},
status=status.HTTP_400_BAD_REQUEST,
)
2026-05-09 16:55:06 +03:30
lat = serializer.validated_data["lat"]
lon = serializer.validated_data["lon"]
2026-05-09 16:55:06 +03:30
block_count = serializer.validated_data.get("block_count", 1)
farm_boundary = serializer.validated_data.get("farm_boundary")
blocks = serializer.validated_data.get("blocks") or []
farm_boundary_changed = False
lat_rounded = round(lat, 6)
lon_rounded = round(lon, 6)
2026-05-09 16:55:06 +03:30
location, created = SoilLocation.objects.get_or_create(
latitude=lat_rounded,
longitude=lon_rounded,
defaults={
"input_block_count": block_count,
"farm_boundary": farm_boundary or {},
},
)
2026-05-09 16:55:06 +03:30
if created:
location.set_input_block_count(block_count, blocks=blocks or None)
if farm_boundary is not None:
location.farm_boundary = farm_boundary
location.save(update_fields=["input_block_count", "farm_boundary", "block_layout", "updated_at"])
else:
changed_fields = []
if block_count != location.input_block_count or blocks:
location.set_input_block_count(block_count, blocks=blocks or None)
changed_fields.extend(["input_block_count", "block_layout"])
if farm_boundary is not None and location.farm_boundary != farm_boundary:
location.farm_boundary = farm_boundary
changed_fields.append("farm_boundary")
farm_boundary_changed = True
if changed_fields:
changed_fields.append("updated_at")
location.save(update_fields=changed_fields)
if farm_boundary_changed:
_clear_block_analysis_state(location, "")
2026-05-09 16:55:06 +03:30
if not (farm_boundary or location.farm_boundary):
return Response(
{
2026-05-09 16:55:06 +03:30
"code": 400,
"msg": "داده نامعتبر.",
"data": {"farm_boundary": ["برای ثبت location باید گوشه‌های کل زمین ارسال یا قبلاً ذخیره شده باشد."]},
},
2026-05-09 16:55:06 +03:30
status=status.HTTP_400_BAD_REQUEST,
)
2026-05-09 16:55:06 +03:30
_sync_defined_blocks(location, blocks)
location = _get_location_by_lat_lon(lat, lon, prefetch=True)
data_serializer = SoilLocationResponseSerializer(location)
return Response(
{
2026-05-09 16:55:06 +03:30
"code": 200,
"msg": "success",
"data": {
"source": "created" if created else "database",
**data_serializer.data,
},
},
status=status.HTTP_200_OK,
)
2026-04-25 17:22:41 +03:30
class NdviHealthView(APIView):
@extend_schema(
2026-05-10 02:02:48 +03:30
tags=["Location Data"],
2026-04-25 17:22:41 +03:30
summary="دریافت NDVI سلامت مزرعه",
description="با دریافت farm_uuid، داده NDVI سلامت پوشش گیاهی مزرعه را به صورت مستقل از dashboard برمی گرداند.",
request=NdviHealthRequestSerializer,
responses={
200: build_response(
NdviHealthEnvelopeSerializer,
"داده NDVI مزرعه با موفقیت بازگردانده شد.",
),
400: build_response(
SoilErrorResponseSerializer,
"داده ورودی نامعتبر است.",
),
404: build_response(
SoilErrorResponseSerializer,
"مزرعه یافت نشد.",
),
},
examples=[
OpenApiExample(
"نمونه درخواست NDVI",
value={"farm_uuid": "11111111-1111-1111-1111-111111111111"},
request_only=True,
)
],
)
def post(self, request):
serializer = NdviHealthRequestSerializer(data=request.data)
if not serializer.is_valid():
return Response(
{"code": 400, "msg": "داده نامعتبر.", "data": serializer.errors},
status=status.HTTP_400_BAD_REQUEST,
)
service = apps.get_app_config("location_data").get_ndvi_health_service()
try:
data = service.get_ndvi_health(
farm_uuid=str(serializer.validated_data["farm_uuid"])
)
except ValueError as exc:
return Response(
{"code": 404, "msg": str(exc), "data": None},
status=status.HTTP_404_NOT_FOUND,
)
return Response(
{"code": 200, "msg": "success", "data": data},
status=status.HTTP_200_OK,
)
2026-05-09 16:55:06 +03:30
class RemoteSensingAnalysisView(APIView):
@extend_schema(
2026-05-10 02:02:48 +03:30
tags=["Location Data"],
2026-05-09 16:55:06 +03:30
summary="اجرای async تحلیل سنجش‌ازدور و subdivision داده‌محور",
description="برای location موجود، pipeline کامل grid + openEO + observation persistence + KMeans clustering در Celery صف می‌شود و sync اجرا نمی‌شود.",
2026-05-10 02:02:48 +03:30
request=RemoteSensingFarmRequestSerializer,
2026-05-09 16:55:06 +03:30
responses={
202: build_response(
RemoteSensingQueuedEnvelopeSerializer,
"درخواست تحلیل سنجش‌ازدور در صف قرار گرفت.",
),
400: build_response(
SoilErrorResponseSerializer,
"داده ورودی نامعتبر است.",
),
404: build_response(
SoilErrorResponseSerializer,
"location موردنظر پیدا نشد.",
),
},
examples=[
OpenApiExample(
"نمونه درخواست remote sensing",
value={
2026-05-10 02:02:48 +03:30
"farm_uuid": "11111111-1111-1111-1111-111111111111",
2026-05-09 16:55:06 +03:30
"force_refresh": False,
},
request_only=True,
),
],
)
def post(self, request):
2026-05-10 02:02:48 +03:30
serializer = RemoteSensingFarmRequestSerializer(data=request.data)
2026-05-09 16:55:06 +03:30
if not serializer.is_valid():
return Response(
{"code": 400, "msg": "داده نامعتبر.", "data": serializer.errors},
status=status.HTTP_400_BAD_REQUEST,
)
payload = serializer.validated_data
2026-05-10 02:02:48 +03:30
farm = SensorData.objects.select_related("center_location").filter(farm_uuid=payload["farm_uuid"]).first()
location = getattr(farm, "center_location", None)
2026-05-09 16:55:06 +03:30
if location is None:
return Response(
{"code": 404, "msg": "location پیدا نشد.", "data": None},
status=status.HTTP_404_NOT_FOUND,
)
2026-05-10 02:02:48 +03:30
temporal_end = timezone.localdate() - timedelta(days=1)
temporal_start = temporal_end - timedelta(days=30)
2026-05-09 16:55:06 +03:30
run = RemoteSensingRun.objects.create(
soil_location=location,
2026-05-10 02:02:48 +03:30
block_code="",
chunk_size_sqm=_resolve_chunk_size_for_location(location, ""),
temporal_start=temporal_start,
temporal_end=temporal_end,
2026-05-09 16:55:06 +03:30
status=RemoteSensingRun.STATUS_PENDING,
metadata={
"requested_via": "api",
"status_label": "pending",
2026-05-10 02:02:48 +03:30
"requested_cluster_count": None,
"selected_features": list(DEFAULT_CLUSTER_FEATURES),
"farm_uuid": str(payload["farm_uuid"]),
"scope": "all_blocks",
2026-05-09 16:55:06 +03:30
},
)
task_result = run_remote_sensing_analysis_task.delay(
soil_location_id=location.id,
2026-05-10 02:02:48 +03:30
block_code="",
temporal_start=temporal_start.isoformat(),
temporal_end=temporal_end.isoformat(),
2026-05-09 16:55:06 +03:30
force_refresh=payload.get("force_refresh", False),
run_id=run.id,
2026-05-10 02:02:48 +03:30
cluster_count=None,
selected_features=list(DEFAULT_CLUSTER_FEATURES),
2026-05-09 16:55:06 +03:30
)
run.metadata = {**(run.metadata or {}), "task_id": task_result.id}
run.save(update_fields=["metadata", "updated_at"])
location_data = SoilLocationResponseSerializer(location).data
response_payload = {
"status": "processing",
"source": "processing",
"location": location_data,
2026-05-10 02:02:48 +03:30
"block_code": "",
2026-05-09 16:55:06 +03:30
"chunk_size_sqm": run.chunk_size_sqm,
"temporal_extent": {
2026-05-10 02:02:48 +03:30
"start_date": temporal_start.isoformat(),
"end_date": temporal_end.isoformat(),
2026-05-09 16:55:06 +03:30
},
"summary": _empty_remote_sensing_summary(),
"cells": [],
"run": RemoteSensingRunSerializer(run).data,
"task_id": task_result.id,
}
return Response(
{"code": 202, "msg": "تحلیل سنجش‌ازدور در صف قرار گرفت.", "data": response_payload},
status=status.HTTP_202_ACCEPTED,
)
@extend_schema(
2026-05-10 02:02:48 +03:30
tags=["Location Data"],
2026-05-09 16:55:06 +03:30
summary="خواندن نتایج cache شده سنجش‌ازدور و subdivision",
description="فقط نتایج ذخیره‌شده remote sensing و clustering را برمی‌گرداند و هیچ پردازش sync اجرا نمی‌کند.",
parameters=[
2026-05-11 00:52:12 +03:30
OpenApiParameter(
name="farm_uuid",
type=str,
location=OpenApiParameter.QUERY,
required=True,
description="شناسه یکتای مزرعه",
default="11111111-1111-1111-1111-111111111111",
),
OpenApiParameter(
name="page",
type=int,
location=OpenApiParameter.QUERY,
required=False,
default=1,
),
OpenApiParameter(
name="page_size",
type=int,
location=OpenApiParameter.QUERY,
required=False,
default=100,
),
2026-05-09 16:55:06 +03:30
],
responses={
200: build_response(
RemoteSensingEnvelopeSerializer,
"نتایج cache شده remote sensing بازگردانده شد.",
),
404: build_response(
SoilErrorResponseSerializer,
"location موردنظر پیدا نشد.",
),
400: build_response(
SoilErrorResponseSerializer,
"داده ورودی نامعتبر است.",
),
},
2026-05-11 00:52:12 +03:30
examples=[
OpenApiExample(
"نمونه درخواست GET remote sensing",
value={
"farm_uuid": "11111111-1111-1111-1111-111111111111",
"page": 1,
"page_size": 100,
},
parameter_only=("farm_uuid", "query"),
),
],
2026-05-09 16:55:06 +03:30
)
def get(self, request):
2026-05-10 02:02:48 +03:30
serializer = RemoteSensingFarmRequestSerializer(data=request.query_params)
2026-05-09 16:55:06 +03:30
if not serializer.is_valid():
return Response(
{"code": 400, "msg": "داده نامعتبر.", "data": serializer.errors},
status=status.HTTP_400_BAD_REQUEST,
)
payload = serializer.validated_data
2026-05-10 02:02:48 +03:30
farm = SensorData.objects.select_related("center_location").filter(farm_uuid=payload["farm_uuid"]).first()
location = getattr(farm, "center_location", None)
2026-05-09 16:55:06 +03:30
if location is None:
return Response(
{"code": 404, "msg": "location پیدا نشد.", "data": None},
status=status.HTTP_404_NOT_FOUND,
)
2026-05-10 02:02:48 +03:30
temporal_end = timezone.localdate() - timedelta(days=1)
temporal_start = temporal_end - timedelta(days=30)
block_code = ""
2026-05-09 16:55:06 +03:30
observations = _get_remote_sensing_observations(
location=location,
block_code=block_code,
2026-05-10 02:02:48 +03:30
start_date=temporal_start,
end_date=temporal_end,
2026-05-09 16:55:06 +03:30
)
run = _get_latest_remote_sensing_run(
location=location,
block_code=block_code,
2026-05-10 02:02:48 +03:30
start_date=temporal_start,
end_date=temporal_end,
2026-05-09 16:55:06 +03:30
)
subdivision_result = _get_remote_sensing_subdivision_result(
location=location,
block_code=block_code,
2026-05-10 02:02:48 +03:30
start_date=temporal_start,
end_date=temporal_end,
2026-05-09 16:55:06 +03:30
)
if not observations.exists():
processing = run is not None and run.status in {
RemoteSensingRun.STATUS_PENDING,
RemoteSensingRun.STATUS_RUNNING,
}
response_payload = {
"status": "processing" if processing else "not_found",
"source": "processing" if processing else "database",
"location": SoilLocationResponseSerializer(location).data,
2026-05-10 02:02:48 +03:30
"block_code": "",
2026-05-09 16:55:06 +03:30
"chunk_size_sqm": getattr(run, "chunk_size_sqm", None),
"temporal_extent": {
2026-05-10 02:02:48 +03:30
"start_date": temporal_start.isoformat(),
"end_date": temporal_end.isoformat(),
2026-05-09 16:55:06 +03:30
},
"summary": _empty_remote_sensing_summary(),
"cells": [],
"run": RemoteSensingRunSerializer(run).data if run else None,
"subdivision_result": None,
}
return Response(
{"code": 200, "msg": "success", "data": response_payload},
status=status.HTTP_200_OK,
)
paginated_observations = _paginate_observations(
observations,
page=payload["page"],
page_size=payload["page_size"],
)
paginated_assignments = []
pagination = {"cells": paginated_observations["pagination"]}
if subdivision_result is not None:
paginated = _paginate_assignments(
subdivision_result,
page=payload["page"],
page_size=payload["page_size"],
)
paginated_assignments = paginated["items"]
pagination["assignments"] = paginated["pagination"]
cells_data = RemoteSensingCellObservationSerializer(paginated_observations["items"], many=True).data
subdivision_data = None
if subdivision_result is not None:
subdivision_data = RemoteSensingSubdivisionResultSerializer(
subdivision_result,
context={"paginated_assignments": paginated_assignments},
).data
response_payload = {
"status": "success",
"source": "database",
"location": SoilLocationResponseSerializer(location).data,
2026-05-10 02:02:48 +03:30
"block_code": "",
2026-05-09 16:55:06 +03:30
"chunk_size_sqm": observations.first().cell.chunk_size_sqm,
"temporal_extent": {
2026-05-10 02:02:48 +03:30
"start_date": temporal_start.isoformat(),
"end_date": temporal_end.isoformat(),
2026-05-09 16:55:06 +03:30
},
"summary": _build_remote_sensing_summary(observations),
"cells": cells_data,
"run": RemoteSensingRunSerializer(run).data if run else None,
"subdivision_result": subdivision_data,
}
if pagination is not None:
response_payload["pagination"] = pagination
return Response(
{"code": 200, "msg": "success", "data": response_payload},
status=status.HTTP_200_OK,
)
class RemoteSensingRunStatusView(APIView):
@extend_schema(
2026-05-10 02:02:48 +03:30
tags=["Location Data"],
2026-05-09 16:55:06 +03:30
summary="وضعیت run تحلیل سنجش‌ازدور",
2026-05-10 02:02:48 +03:30
description="وضعیت async pipeline را با task_id از نوع UUID برمی‌گرداند. این task_id همان شناسه تسک Celery ذخیره‌شده در metadata.run است.",
parameters=[
OpenApiParameter(
name="run_id",
type={"type": "string", "format": "uuid"},
location=OpenApiParameter.PATH,
required=True,
description="شناسه UUID تسک async (task_id).",
),
],
2026-05-09 16:55:06 +03:30
responses={
200: build_response(
RemoteSensingRunStatusEnvelopeSerializer,
2026-05-10 02:02:48 +03:30
"وضعیت run بازگردانده شد و بعد از اتمام، نتیجه نهایی نیز از همین route برگردانده می‌شود.",
2026-05-09 16:55:06 +03:30
),
404: build_response(
SoilErrorResponseSerializer,
"run موردنظر پیدا نشد.",
),
},
)
def get(self, request, run_id):
2026-05-10 02:02:48 +03:30
page = _safe_positive_int(request.query_params.get("page"), default=1)
page_size = min(_safe_positive_int(request.query_params.get("page_size"), default=100), MAX_REMOTE_SENSING_PAGE_SIZE)
run = RemoteSensingRun.objects.filter(metadata__task_id=str(run_id)).select_related("soil_location").first()
2026-05-09 16:55:06 +03:30
if run is None:
return Response(
2026-05-10 02:02:48 +03:30
{"code": 404, "msg": "run با این task_id پیدا نشد.", "data": None},
2026-05-09 16:55:06 +03:30
status=status.HTTP_404_NOT_FOUND,
)
2026-05-10 02:02:48 +03:30
response_payload = _build_remote_sensing_run_status_payload(run, page=page, page_size=page_size)
2026-05-09 16:55:06 +03:30
return Response(
{"code": 200, "msg": "success", "data": response_payload},
status=status.HTTP_200_OK,
)
2026-05-11 04:38:44 +03:30
class RemoteSensingClusterBlockLiveView(APIView):
@extend_schema(
tags=["Location Data"],
summary="دریافت زنده remote sensing برای زیر‌بلاک KMeans",
description="با دریافت UUID زیر‌بلاک ساخته‌شده توسط KMeans، هندسه همان زیر‌بلاک از دیتابیس خوانده می‌شود و داده تازه ماهواره‌ای از openEO برگردانده می‌شود.",
parameters=[
OpenApiParameter(
name="cluster_uuid",
type={"type": "string", "format": "uuid"},
location=OpenApiParameter.PATH,
required=True,
description="شناسه UUID زیر‌بلاک KMeans.",
),
OpenApiParameter(
name="temporal_start",
type={"type": "string", "format": "date"},
location=OpenApiParameter.QUERY,
required=False,
description="شروع بازه سفارشی. اگر ست شود، temporal_end هم باید ارسال شود.",
),
OpenApiParameter(
name="temporal_end",
type={"type": "string", "format": "date"},
location=OpenApiParameter.QUERY,
required=False,
description="پایان بازه سفارشی. اگر ست شود، temporal_start هم باید ارسال شود.",
),
OpenApiParameter(
name="days",
type=int,
location=OpenApiParameter.QUERY,
required=False,
default=30,
description="اگر بازه سفارشی ارسال نشود، از yesterday backfill با این تعداد روز استفاده می‌شود.",
),
],
responses={
200: build_response(
RemoteSensingClusterBlockLiveEnvelopeSerializer,
"داده زنده openEO برای زیر‌بلاک KMeans بازگردانده شد.",
),
400: build_response(
SoilErrorResponseSerializer,
"پارامترهای ورودی یا هندسه زیر‌بلاک نامعتبر است.",
),
404: build_response(
SoilErrorResponseSerializer,
"زیر‌بلاک KMeans پیدا نشد.",
),
502: build_response(
SoilErrorResponseSerializer,
"خواندن داده از openEO ناموفق بود.",
),
},
)
def get(self, request, cluster_uuid):
serializer = RemoteSensingClusterBlockLiveRequestSerializer(data=request.query_params)
if not serializer.is_valid():
return Response(
{"code": 400, "msg": "داده نامعتبر.", "data": serializer.errors},
status=status.HTTP_400_BAD_REQUEST,
)
cluster_block = (
RemoteSensingClusterBlock.objects.select_related("soil_location", "block_subdivision", "result")
.filter(uuid=cluster_uuid)
.first()
)
if cluster_block is None:
return Response(
{"code": 404, "msg": "زیر‌بلاک KMeans پیدا نشد.", "data": None},
status=status.HTTP_404_NOT_FOUND,
)
geometry = _resolve_cluster_block_geometry(cluster_block)
if not geometry:
return Response(
{
"code": 400,
"msg": "هندسه زیر‌بلاک KMeans نامعتبر است.",
"data": {"cluster_uuid": [str(cluster_block.uuid)]},
},
status=status.HTTP_400_BAD_REQUEST,
)
temporal_start, temporal_end = _resolve_live_remote_sensing_window(serializer.validated_data)
virtual_cell = _build_virtual_cluster_block_cell(cluster_block=cluster_block, geometry=geometry)
try:
remote_payload = compute_remote_sensing_metrics(
[virtual_cell],
temporal_start=temporal_start,
temporal_end=temporal_end,
selected_features=list(DEFAULT_CLUSTER_FEATURES),
)
except (OpenEOAuthenticationError, OpenEOExecutionError, OpenEOServiceError) as exc:
return Response(
{"code": 502, "msg": "خواندن داده از openEO ناموفق بود.", "data": {"detail": str(exc)}},
status=status.HTTP_502_BAD_GATEWAY,
)
metrics = dict(remote_payload.get("results", {}).get(virtual_cell.cell_code, {}) or {})
response_payload = {
"status": "success",
"source": "openeo",
"cluster_block": RemoteSensingClusterBlockSerializer(cluster_block).data,
"temporal_extent": {
"start_date": temporal_start.isoformat(),
"end_date": temporal_end.isoformat(),
},
"selected_features": list(DEFAULT_CLUSTER_FEATURES),
"summary": {
"cell_count": int(cluster_block.cell_count or 0),
"ndvi_mean": _round_or_none(metrics.get("ndvi")),
"ndwi_mean": _round_or_none(metrics.get("ndwi")),
"soil_vv_db_mean": _round_or_none(metrics.get("soil_vv_db")),
},
"metrics": {
"ndvi": _round_or_none(metrics.get("ndvi")),
"ndwi": _round_or_none(metrics.get("ndwi")),
"soil_vv": _round_or_none(metrics.get("soil_vv")),
"soil_vv_db": _round_or_none(metrics.get("soil_vv_db")),
},
"metadata": {
**dict(remote_payload.get("metadata") or {}),
"requested_cluster_uuid": str(cluster_block.uuid),
"block_code": cluster_block.block_code,
"sub_block_code": cluster_block.sub_block_code,
},
}
return Response(
{"code": 200, "msg": "success", "data": response_payload},
status=status.HTTP_200_OK,
)
class RemoteSensingSubdivisionOptionListView(APIView):
@extend_schema(
tags=["Location Data"],
summary="فهرست همه گزینه‌های K ذخیره‌شده برای یک subdivision result",
description="همه ترکیب‌های K که برای این run/result محاسبه و ذخیره شده‌اند را برمی‌گرداند و مشخص می‌کند کدام‌یک active و کدام‌یک recommended است.",
responses={
200: build_response(
RemoteSensingSubdivisionOptionListEnvelopeSerializer,
"فهرست گزینه‌های K بازگردانده شد.",
),
404: build_response(
SoilErrorResponseSerializer,
"subdivision result موردنظر پیدا نشد.",
),
},
)
def get(self, request, result_id):
result = (
RemoteSensingSubdivisionResult.objects.filter(pk=result_id)
.prefetch_related("options__cluster_blocks")
.first()
)
if result is None:
return Response(
{"code": 404, "msg": "subdivision result پیدا نشد.", "data": None},
status=status.HTTP_404_NOT_FOUND,
)
options = list(result.options.all().order_by("requested_k"))
response_payload = {
"result_id": result.id,
"active_requested_k": next((option.requested_k for option in options if option.is_active), None),
"recommended_requested_k": next((option.requested_k for option in options if option.is_recommended), None),
"options": RemoteSensingSubdivisionOptionSerializer(options, many=True).data,
}
return Response(
{"code": 200, "msg": "success", "data": response_payload},
status=status.HTTP_200_OK,
)
class RemoteSensingSubdivisionOptionActivateView(APIView):
@extend_schema(
tags=["Location Data"],
summary="فعال‌سازی یک K ذخیره‌شده برای subdivision result",
description="کاربر می‌تواند یکی از Kهای از قبل محاسبه‌شده و ذخیره‌شده را انتخاب کند تا active شود و خروجی اصلی subdivision بر همان مبنا sync شود.",
request=RemoteSensingSubdivisionOptionActivateSerializer,
responses={
200: build_response(
RemoteSensingSubdivisionOptionActivateEnvelopeSerializer,
"K انتخابی فعال شد.",
),
400: build_response(
SoilErrorResponseSerializer,
"درخواست نامعتبر است یا K انتخابی موجود نیست.",
),
404: build_response(
SoilErrorResponseSerializer,
"subdivision result موردنظر پیدا نشد.",
),
},
)
def post(self, request, result_id):
serializer = RemoteSensingSubdivisionOptionActivateSerializer(data=request.data)
if not serializer.is_valid():
return Response(
{"code": 400, "msg": "داده نامعتبر.", "data": serializer.errors},
status=status.HTTP_400_BAD_REQUEST,
)
result = (
RemoteSensingSubdivisionResult.objects.filter(pk=result_id)
.select_related("soil_location", "block_subdivision")
.prefetch_related("options__cluster_blocks", "options__assignments__cell")
.first()
)
if result is None:
return Response(
{"code": 404, "msg": "subdivision result پیدا نشد.", "data": None},
status=status.HTTP_404_NOT_FOUND,
)
requested_k = serializer.validated_data["requested_k"]
option = next(
(candidate for candidate in result.options.all() if candidate.requested_k == requested_k),
None,
)
if option is None:
return Response(
{
"code": 400,
"msg": "K انتخابی برای این subdivision result موجود نیست.",
"data": {"requested_k": [requested_k]},
},
status=status.HTTP_400_BAD_REQUEST,
)
activate_subdivision_option(option=option, selection_source="user")
result.refresh_from_db()
result = (
RemoteSensingSubdivisionResult.objects.filter(pk=result.pk)
.prefetch_related("assignments__cell", "cluster_blocks", "options__cluster_blocks")
.first()
)
response_payload = {
"result_id": result.id,
"activated_requested_k": requested_k,
"subdivision_result": RemoteSensingSubdivisionResultSerializer(result).data,
}
return Response(
{"code": 200, "msg": "success", "data": response_payload},
status=status.HTTP_200_OK,
)
2026-05-10 02:02:48 +03:30
def _build_remote_sensing_run_status_payload(run: RemoteSensingRun, *, page: int, page_size: int) -> dict:
run_data = RemoteSensingRunSerializer(run).data
task_id = (run.metadata or {}).get("task_id")
2026-05-10 22:49:07 +03:30
task_data = _build_remote_sensing_task_payload(run)
effective_status = _apply_live_retry_state_override(run_data, task_data)
status_payload = {
"status": effective_status or run_data["status_label"],
"source": "database",
"run": run_data,
"task_id": task_id,
"task": task_data,
}
2026-05-10 02:02:48 +03:30
if run.status in {RemoteSensingRun.STATUS_PENDING, RemoteSensingRun.STATUS_RUNNING}:
2026-05-10 22:49:07 +03:30
return status_payload
if run.status == RemoteSensingRun.STATUS_FAILURE:
return status_payload
2026-05-10 02:02:48 +03:30
location = _get_location_by_lat_lon(run.soil_location.latitude, run.soil_location.longitude, prefetch=True)
observations = _get_remote_sensing_observations(
location=run.soil_location,
block_code=run.block_code,
start_date=run.temporal_start,
end_date=run.temporal_end,
)
subdivision_result = getattr(run, "subdivision_result", None)
response_payload = {
2026-05-10 22:49:07 +03:30
**status_payload,
2026-05-10 02:02:48 +03:30
"location": SoilLocationResponseSerializer(location).data,
"block_code": run.block_code,
"chunk_size_sqm": run.chunk_size_sqm,
"temporal_extent": {
"start_date": run.temporal_start.isoformat() if run.temporal_start else None,
"end_date": run.temporal_end.isoformat() if run.temporal_end else None,
2026-05-09 16:55:06 +03:30
},
2026-05-10 02:02:48 +03:30
"summary": _empty_remote_sensing_summary(),
"cells": [],
"subdivision_result": None,
}
if not observations.exists():
return response_payload
paginated_observations = _paginate_observations(
observations,
page=page,
page_size=page_size,
2026-05-09 16:55:06 +03:30
)
2026-05-10 02:02:48 +03:30
paginated_assignments = []
pagination = {"cells": paginated_observations["pagination"]}
if subdivision_result is not None:
paginated = _paginate_assignments(
subdivision_result,
2026-05-09 16:55:06 +03:30
page=page,
page_size=page_size,
)
2026-05-10 02:02:48 +03:30
paginated_assignments = paginated["items"]
pagination["assignments"] = paginated["pagination"]
2026-05-09 16:55:06 +03:30
2026-05-10 02:02:48 +03:30
response_payload["summary"] = _build_remote_sensing_summary(observations)
response_payload["cells"] = RemoteSensingCellObservationSerializer(
paginated_observations["items"],
many=True,
).data
response_payload["pagination"] = pagination
2026-05-09 16:55:06 +03:30
2026-05-10 02:02:48 +03:30
if subdivision_result is not None:
response_payload["subdivision_result"] = RemoteSensingSubdivisionResultSerializer(
subdivision_result,
context={"paginated_assignments": paginated_assignments},
).data
return response_payload
2026-05-09 16:55:06 +03:30
2026-05-10 22:49:07 +03:30
def _get_remote_sensing_async_result(task_id: str):
try:
from celery.result import AsyncResult
except ImportError: # pragma: no cover - fallback when Celery is absent
return None
try:
return AsyncResult(task_id)
except Exception: # pragma: no cover - depends on Celery backend configuration
return None
def _serialize_task_value(value):
if value is None or isinstance(value, (str, int, float, bool)):
return value
if isinstance(value, dict):
return {str(key): _serialize_task_value(item) for key, item in value.items()}
if isinstance(value, (list, tuple)):
return [_serialize_task_value(item) for item in value]
return str(value)
def _build_remote_sensing_task_payload(run: RemoteSensingRun) -> dict:
metadata = dict(run.metadata or {})
timestamps = dict(metadata.get("timestamps") or {})
stage_details = dict(metadata.get("stage_details") or {})
current_stage = metadata.get("stage")
failed_stage = metadata.get("failed_stage")
task_payload = {
"current_stage": current_stage,
"current_stage_details": stage_details.get(current_stage, {}),
"timestamps": timestamps,
"stages": _build_remote_sensing_stage_entries(
current_stage=current_stage,
stage_details=stage_details,
timestamps=timestamps,
run_status=run.status,
),
}
if failed_stage:
task_payload["failed_stage"] = failed_stage
metric_progress = (stage_details.get("fetching_remote_metrics") or {}).get("metric_progress")
if metric_progress:
task_payload["metric_progress"] = metric_progress
retry_context = stage_details.get("retrying")
if retry_context:
task_payload["retry"] = retry_context
task_payload["last_error"] = retry_context.get("last_error")
failure_reason = None
if metadata.get("stage") == "failed" or run.status == RemoteSensingRun.STATUS_FAILURE:
failure_reason = metadata.get("failure_reason") or run.error_message
if failure_reason:
task_payload["failure_reason"] = failure_reason
task_id = metadata.get("task_id")
celery_payload = _build_remote_sensing_celery_payload(str(task_id)) if task_id else None
if celery_payload is not None:
task_payload["celery"] = celery_payload
return task_payload
def _apply_live_retry_state_override(run_data: dict[str, Any], task_data: dict[str, Any]) -> str | None:
celery_payload = task_data.get("celery") or {}
if celery_payload.get("state") != "RETRY":
return None
retry_context = dict(task_data.get("retry") or {})
if not retry_context:
retry_context = {
"retry_count": None,
"retry_delay_seconds": None,
"last_error": task_data.get("failure_reason") or celery_payload.get("info"),
"failed_stage": task_data.get("failed_stage"),
"failed_stage_details": (
task_data.get("current_stage_details", {})
if task_data.get("current_stage") == "failed"
else {}
),
}
task_data["retry"] = retry_context
task_data["last_error"] = retry_context.get("last_error") or celery_payload.get("info")
task_data["current_stage"] = "retrying"
task_data["current_stage_details"] = retry_context
task_data.pop("failure_reason", None)
_upsert_retrying_stage_entry(task_data, retry_context)
run_data["status_label"] = "retrying"
run_data["pipeline_status"] = "retrying"
run_data["stage"] = "retrying"
return "retrying"
def _upsert_retrying_stage_entry(task_data: dict[str, Any], retry_context: dict[str, Any]) -> None:
stages = list(task_data.get("stages") or [])
retrying_entry = {
"name": "retrying",
"status": "running",
"entered_at": (task_data.get("timestamps") or {}).get("retrying_at"),
"details": retry_context,
}
for index, entry in enumerate(stages):
if entry.get("name") == "retrying":
stages[index] = retrying_entry
task_data["stages"] = stages
return
stages.append(retrying_entry)
task_data["stages"] = stages
def _build_remote_sensing_stage_entries(
*,
current_stage: str | None,
stage_details: dict,
timestamps: dict,
run_status: str,
) -> list[dict]:
stage_names = []
for stage_name in REMOTE_SENSING_RUN_STAGE_ORDER:
if stage_name == current_stage or stage_name in stage_details or f"{stage_name}_at" in timestamps:
stage_names.append(stage_name)
if current_stage and current_stage not in stage_names:
stage_names.append(current_stage)
entries = []
for stage_name in stage_names:
if run_status == RemoteSensingRun.STATUS_FAILURE and stage_name == current_stage:
stage_status = "failed"
elif stage_name == current_stage and run_status == RemoteSensingRun.STATUS_PENDING:
stage_status = "pending"
elif stage_name == current_stage and run_status == RemoteSensingRun.STATUS_RUNNING:
stage_status = "running"
else:
stage_status = "completed"
entries.append(
{
"name": stage_name,
"status": stage_status,
"entered_at": timestamps.get(f"{stage_name}_at"),
"details": stage_details.get(stage_name, {}),
}
)
return entries
def _build_remote_sensing_celery_payload(task_id: str) -> dict | None:
async_result = _get_remote_sensing_async_result(task_id)
if async_result is None:
return None
try:
payload = {
"state": str(async_result.state),
"ready": bool(async_result.ready()),
"successful": bool(async_result.successful()) if async_result.ready() else False,
"failed": bool(async_result.failed()) if async_result.ready() else False,
}
except Exception: # pragma: no cover - depends on Celery backend configuration
return None
info = getattr(async_result, "info", None)
if info not in (None, {}):
payload["info"] = _serialize_task_value(info)
if async_result.failed():
payload["error"] = _serialize_task_value(async_result.result)
return payload
2026-05-09 16:55:06 +03:30
def _get_location_by_lat_lon(lat, lon, *, prefetch: bool = False):
lat_rounded = round(lat, 6)
lon_rounded = round(lon, 6)
queryset = SoilLocation.objects.filter(latitude=lat_rounded, longitude=lon_rounded)
if prefetch:
queryset = queryset.prefetch_related("block_subdivisions")
return queryset.first()
def _sync_defined_blocks(location: SoilLocation, blocks: list[dict]) -> None:
if not blocks:
return
with transaction.atomic():
normalized_blocks = []
for index, block in enumerate(blocks):
normalized_blocks.append(
{
"block_code": str(block.get("block_code") or f"block-{index + 1}").strip(),
"boundary": block.get("boundary") or {},
"order": int(block.get("order") or index + 1),
}
)
normalized_codes = {block["block_code"] for block in normalized_blocks}
existing_subdivisions = {
subdivision.block_code: subdivision
for subdivision in location.block_subdivisions.all()
}
for block_code, subdivision in existing_subdivisions.items():
if block_code not in normalized_codes:
_clear_block_analysis_state(location, block_code, subdivision=subdivision)
subdivision.delete()
for block in normalized_blocks:
block_code = block["block_code"]
boundary = block["boundary"]
subdivision = existing_subdivisions.get(block_code)
definition_metadata = {
"definition_source": "farmer_input",
"order": block["order"],
}
if subdivision is None:
BlockSubdivision.objects.create(
soil_location=location,
block_code=block_code,
source_boundary=boundary,
chunk_size_sqm=900,
grid_points=[],
centroid_points=[],
grid_point_count=0,
centroid_count=0,
status="defined",
metadata=definition_metadata,
)
continue
boundary_changed = subdivision.source_boundary != boundary
metadata = dict(subdivision.metadata or {})
metadata.update(definition_metadata)
subdivision.source_boundary = boundary
subdivision.chunk_size_sqm = 900
subdivision.metadata = metadata
if boundary_changed:
_clear_block_analysis_state(location, block_code, subdivision=subdivision)
subdivision.grid_points = []
subdivision.centroid_points = []
subdivision.grid_point_count = 0
subdivision.centroid_count = 0
subdivision.status = "defined"
subdivision.metadata = definition_metadata
subdivision.elbow_plot = None
subdivision.save(
update_fields=[
"source_boundary",
"chunk_size_sqm",
"grid_points",
"centroid_points",
"grid_point_count",
"centroid_count",
"status",
"metadata",
"elbow_plot",
"updated_at",
]
)
continue
subdivision.save(
update_fields=[
"source_boundary",
"chunk_size_sqm",
"metadata",
"updated_at",
]
)
def _clear_block_analysis_state(
location: SoilLocation,
block_code: str,
*,
subdivision: BlockSubdivision | None = None,
) -> None:
AnalysisGridCell.objects.filter(
soil_location=location,
block_code=block_code or "",
).delete()
RemoteSensingSubdivisionResult.objects.filter(
soil_location=location,
block_code=block_code or "",
).delete()
RemoteSensingRun.objects.filter(
soil_location=location,
block_code=block_code or "",
).delete()
if subdivision is None:
return
subdivision.grid_points = []
subdivision.centroid_points = []
subdivision.grid_point_count = 0
subdivision.centroid_count = 0
subdivision.status = "defined"
subdivision.elbow_plot = None
2026-05-10 02:02:48 +03:30
def _resolve_chunk_size_for_location(location: SoilLocation, block_code: str) -> int:
2026-05-09 16:55:06 +03:30
if block_code:
subdivision = location.block_subdivisions.filter(block_code=block_code).first()
if subdivision is not None:
2026-05-10 02:02:48 +03:30
return int(subdivision.chunk_size_sqm or 900)
2026-05-09 16:55:06 +03:30
block_layout = location.block_layout or {}
if not block_code:
2026-05-10 02:02:48 +03:30
return int(block_layout.get("analysis_grid_summary", {}).get("chunk_size_sqm") or 900)
2026-05-09 16:55:06 +03:30
for block in block_layout.get("blocks", []):
if block.get("block_code") == block_code:
2026-05-10 02:02:48 +03:30
return int(block.get("analysis_grid_summary", {}).get("chunk_size_sqm") or 900)
return 900
2026-05-09 16:55:06 +03:30
2026-05-11 04:38:44 +03:30
def _resolve_live_remote_sensing_window(payload: dict[str, Any]):
temporal_start = payload.get("temporal_start")
temporal_end = payload.get("temporal_end")
if temporal_start and temporal_end:
return temporal_start, temporal_end
days = int(payload.get("days") or 30)
end_date = timezone.localdate() - timedelta(days=1)
start_date = end_date - timedelta(days=days - 1)
return start_date, end_date
def _resolve_cluster_block_geometry(cluster_block: RemoteSensingClusterBlock) -> dict[str, Any]:
geometry = dict(cluster_block.geometry or {})
if geometry.get("type") and geometry.get("coordinates"):
return geometry
cell_codes = list(cluster_block.cell_codes or [])
if not cell_codes:
return {}
cell_geometries = list(
AnalysisGridCell.objects.filter(
soil_location=cluster_block.soil_location,
cell_code__in=cell_codes,
)
.order_by("cell_code")
.values_list("geometry", flat=True)
)
polygon_coordinates: list[list[list[list[float]]]] = []
for cell_geometry in cell_geometries:
cell_geometry = dict(cell_geometry or {})
coordinates = cell_geometry.get("coordinates") or []
if cell_geometry.get("type") == "Polygon" and coordinates:
polygon_coordinates.append(coordinates)
elif cell_geometry.get("type") == "MultiPolygon" and coordinates:
polygon_coordinates.extend(coordinates)
if not polygon_coordinates:
return {}
if len(polygon_coordinates) == 1:
return {"type": "Polygon", "coordinates": polygon_coordinates[0]}
return {"type": "MultiPolygon", "coordinates": polygon_coordinates}
def _build_virtual_cluster_block_cell(
*,
cluster_block: RemoteSensingClusterBlock,
geometry: dict[str, Any],
):
return SimpleNamespace(
cell_code=f"cluster-{cluster_block.uuid}",
block_code=cluster_block.block_code,
soil_location_id=cluster_block.soil_location_id,
chunk_size_sqm=cluster_block.chunk_size_sqm,
geometry=geometry,
)
2026-05-09 16:55:06 +03:30
def _get_remote_sensing_observations(*, location, block_code: str, start_date, end_date):
queryset = (
AnalysisGridObservation.objects.select_related("cell", "run")
.filter(
cell__soil_location=location,
temporal_start=start_date,
temporal_end=end_date,
)
.order_by("cell__cell_code")
)
return queryset.filter(cell__block_code=block_code or "")
def _get_latest_remote_sensing_run(*, location, block_code: str, start_date, end_date):
return (
RemoteSensingRun.objects.filter(
soil_location=location,
block_code=block_code or "",
temporal_start=start_date,
temporal_end=end_date,
)
.order_by("-created_at", "-id")
.first()
)
def _get_remote_sensing_subdivision_result(*, location, block_code: str, start_date, end_date):
return (
RemoteSensingSubdivisionResult.objects.filter(
soil_location=location,
block_code=block_code or "",
temporal_start=start_date,
temporal_end=end_date,
)
.select_related("run")
2026-05-11 04:38:44 +03:30
.prefetch_related("assignments__cell", "cluster_blocks")
2026-05-09 16:55:06 +03:30
.order_by("-created_at", "-id")
.first()
)
def _build_remote_sensing_summary(observations):
aggregates = observations.aggregate(
cell_count=Avg("cell_id"),
ndvi_mean=Avg("ndvi"),
ndwi_mean=Avg("ndwi"),
soil_vv_db_mean=Avg("soil_vv_db"),
)
summary = {
"cell_count": observations.count(),
"ndvi_mean": _round_or_none(aggregates.get("ndvi_mean")),
"ndwi_mean": _round_or_none(aggregates.get("ndwi_mean")),
"soil_vv_db_mean": _round_or_none(aggregates.get("soil_vv_db_mean")),
}
return summary
def _empty_remote_sensing_summary():
return {
"cell_count": 0,
"ndvi_mean": None,
"ndwi_mean": None,
"soil_vv_db_mean": None,
}
def _round_or_none(value):
if value is None:
return None
return round(float(value), 6)
def _paginate_assignments(result: RemoteSensingSubdivisionResult, *, page: int, page_size: int) -> dict:
page_size = min(max(page_size, 1), MAX_REMOTE_SENSING_PAGE_SIZE)
assignments = result.assignments.select_related("cell").order_by("cluster_label", "cell__cell_code")
paginator = Paginator(assignments, page_size)
if paginator.count == 0:
return {
"items": [],
"pagination": {
"page": 1,
"page_size": page_size,
"total_items": 0,
"total_pages": 0,
"has_next": False,
"has_previous": False,
},
}
try:
page_obj = paginator.page(page)
except EmptyPage:
page_obj = paginator.page(paginator.num_pages)
return {
"items": list(page_obj.object_list),
"pagination": {
"page": page_obj.number,
"page_size": page_size,
"total_items": paginator.count,
"total_pages": paginator.num_pages,
"has_next": page_obj.has_next(),
"has_previous": page_obj.has_previous(),
},
}
def _safe_positive_int(value, *, default: int) -> int:
try:
parsed = int(value)
except (TypeError, ValueError):
return default
return parsed if parsed > 0 else default
def _paginate_observations(observations, *, page: int, page_size: int) -> dict:
page_size = min(max(page_size, 1), MAX_REMOTE_SENSING_PAGE_SIZE)
paginator = Paginator(observations, page_size)
if paginator.count == 0:
return {
"items": [],
"pagination": {
"page": 1,
"page_size": page_size,
"total_items": 0,
"total_pages": 0,
"has_next": False,
"has_previous": False,
},
}
try:
page_obj = paginator.page(page)
except EmptyPage:
page_obj = paginator.page(paginator.num_pages)
return {
"items": list(page_obj.object_list),
"pagination": {
"page": page_obj.number,
"page_size": page_size,
"total_items": paginator.count,
"total_pages": paginator.num_pages,
"has_next": page_obj.has_next(),
"has_previous": page_obj.has_previous(),
},
}