1967 lines
74 KiB
Python
1967 lines
74 KiB
Python
from datetime import timedelta
|
|
from types import SimpleNamespace
|
|
from typing import Any
|
|
from uuid import uuid4
|
|
|
|
from django.apps import apps
|
|
from django.core.paginator import EmptyPage, Paginator
|
|
from django.db.models import Avg
|
|
from django.db import transaction
|
|
from django.utils import timezone
|
|
from rest_framework import status
|
|
from drf_spectacular.utils import (
|
|
OpenApiExample,
|
|
OpenApiParameter,
|
|
OpenApiResponse,
|
|
extend_schema,
|
|
inline_serializer,
|
|
)
|
|
from rest_framework import serializers as drf_serializers
|
|
from rest_framework.response import Response
|
|
from rest_framework.views import APIView
|
|
|
|
from config.openapi import (
|
|
build_envelope_serializer,
|
|
build_response,
|
|
)
|
|
from .models import (
|
|
AnalysisGridCell,
|
|
AnalysisGridObservation,
|
|
BlockSubdivision,
|
|
RemoteSensingClusterBlock,
|
|
RemoteSensingRun,
|
|
RemoteSensingSubdivisionResult,
|
|
RemoteSensingSubdivisionOption,
|
|
SoilLocation,
|
|
)
|
|
from farm_data.models import SensorData
|
|
|
|
from .data_driven_subdivision import DEFAULT_CLUSTER_FEATURES
|
|
from .data_driven_subdivision import activate_subdivision_option
|
|
from .cluster_recommendation import (
|
|
ClusterRecommendationNotFound,
|
|
ClusterRecommendationValidationError,
|
|
build_cluster_crop_recommendations,
|
|
)
|
|
from .serializers import (
|
|
BlockSubdivisionSerializer,
|
|
ClusterCropRecommendationRequestSerializer,
|
|
ClusterCropRecommendationResponseSerializer,
|
|
NdviHealthRequestSerializer,
|
|
NdviHealthResponseSerializer,
|
|
RemoteSensingCellObservationSerializer,
|
|
RemoteSensingResponseSerializer,
|
|
RemoteSensingFarmRequestSerializer,
|
|
RemoteSensingRunSerializer,
|
|
RemoteSensingRunStatusResponseSerializer,
|
|
RemoteSensingClusterBlockLiveRequestSerializer,
|
|
RemoteSensingClusterBlockLiveResponseSerializer,
|
|
RemoteSensingClusterBlockSerializer,
|
|
RemoteSensingSubdivisionOptionActivateResponseSerializer,
|
|
RemoteSensingSubdivisionOptionActivateSerializer,
|
|
RemoteSensingSubdivisionOptionListResponseSerializer,
|
|
RemoteSensingSubdivisionOptionSerializer,
|
|
RemoteSensingSummarySerializer,
|
|
RemoteSensingSubdivisionResultSerializer,
|
|
SoilDataRequestSerializer,
|
|
SoilLocationResponseSerializer,
|
|
)
|
|
from .tasks import run_remote_sensing_analysis_task
|
|
from .openeo_service import (
|
|
OpenEOAuthenticationError,
|
|
OpenEOExecutionError,
|
|
OpenEOServiceError,
|
|
compute_remote_sensing_metrics,
|
|
)
|
|
|
|
MAX_REMOTE_SENSING_PAGE_SIZE = 200
|
|
REMOTE_SENSING_RUN_STAGE_ORDER = (
|
|
"queued",
|
|
"running",
|
|
"preparing_analysis_grid",
|
|
"analysis_grid_ready",
|
|
"analysis_cells_selected",
|
|
"using_cached_observations",
|
|
"fetching_remote_metrics",
|
|
"remote_metrics_fetched",
|
|
"observations_persisted",
|
|
"clustering_completed",
|
|
"completed",
|
|
"failed",
|
|
"retrying",
|
|
)
|
|
|
|
SoilLocationPayloadSerializer = inline_serializer(
|
|
name="SoilLocationPayloadSerializer",
|
|
fields={
|
|
"source": drf_serializers.CharField(),
|
|
"id": drf_serializers.IntegerField(),
|
|
"lon": drf_serializers.DecimalField(max_digits=9, decimal_places=6),
|
|
"lat": drf_serializers.DecimalField(max_digits=9, decimal_places=6),
|
|
"input_block_count": drf_serializers.IntegerField(),
|
|
"farm_boundary": drf_serializers.JSONField(),
|
|
"block_layout": drf_serializers.JSONField(),
|
|
"block_subdivisions": BlockSubdivisionSerializer(many=True),
|
|
"satellite_snapshots": drf_serializers.JSONField(),
|
|
},
|
|
)
|
|
SoilDataResponseSerializer = build_envelope_serializer(
|
|
"SoilDataResponseSerializer",
|
|
SoilLocationPayloadSerializer,
|
|
)
|
|
SoilErrorResponseSerializer = build_envelope_serializer(
|
|
"SoilErrorResponseSerializer",
|
|
data_required=False,
|
|
allow_null=True,
|
|
)
|
|
NdviHealthEnvelopeSerializer = build_envelope_serializer(
|
|
"NdviHealthEnvelopeSerializer",
|
|
NdviHealthResponseSerializer,
|
|
)
|
|
RemoteSensingEnvelopeSerializer = build_envelope_serializer(
|
|
"RemoteSensingEnvelopeSerializer",
|
|
RemoteSensingResponseSerializer,
|
|
)
|
|
RemoteSensingQueuedEnvelopeSerializer = build_envelope_serializer(
|
|
"RemoteSensingQueuedEnvelopeSerializer",
|
|
inline_serializer(
|
|
name="RemoteSensingQueuedPayloadSerializer",
|
|
fields={
|
|
"status": drf_serializers.CharField(),
|
|
"source": drf_serializers.CharField(),
|
|
"location": drf_serializers.JSONField(),
|
|
"block_code": drf_serializers.CharField(),
|
|
"chunk_size_sqm": drf_serializers.IntegerField(allow_null=True),
|
|
"temporal_extent": drf_serializers.JSONField(),
|
|
"summary": RemoteSensingSummarySerializer(),
|
|
"cells": drf_serializers.JSONField(),
|
|
"run": drf_serializers.JSONField(allow_null=True),
|
|
"task_id": drf_serializers.UUIDField(),
|
|
},
|
|
),
|
|
)
|
|
RemoteSensingRunStatusEnvelopeSerializer = build_envelope_serializer(
|
|
"RemoteSensingRunStatusEnvelopeSerializer",
|
|
RemoteSensingRunStatusResponseSerializer,
|
|
)
|
|
RemoteSensingClusterBlockLiveEnvelopeSerializer = build_envelope_serializer(
|
|
"RemoteSensingClusterBlockLiveEnvelopeSerializer",
|
|
RemoteSensingClusterBlockLiveResponseSerializer,
|
|
)
|
|
ClusterCropRecommendationEnvelopeSerializer = build_envelope_serializer(
|
|
"ClusterCropRecommendationEnvelopeSerializer",
|
|
ClusterCropRecommendationResponseSerializer,
|
|
)
|
|
RemoteSensingSubdivisionOptionListEnvelopeSerializer = build_envelope_serializer(
|
|
"RemoteSensingSubdivisionOptionListEnvelopeSerializer",
|
|
RemoteSensingSubdivisionOptionListResponseSerializer,
|
|
)
|
|
RemoteSensingSubdivisionOptionActivateEnvelopeSerializer = build_envelope_serializer(
|
|
"RemoteSensingSubdivisionOptionActivateEnvelopeSerializer",
|
|
RemoteSensingSubdivisionOptionActivateResponseSerializer,
|
|
)
|
|
class SoilDataView(APIView):
|
|
"""
|
|
ثبت مختصات گوشههای مزرعه و بلوکهای تعریفشده توسط کشاورز.
|
|
"""
|
|
|
|
@extend_schema(
|
|
tags=["Location Data"],
|
|
summary="خواندن ساختار مزرعه و بلوکها (GET)",
|
|
description="با ارسال lat و lon، ساختار ذخیرهشده مزرعه، بلوکها و آخرین خلاصه سنجشازدور هر بلوک بازگردانده میشود.",
|
|
parameters=[
|
|
{
|
|
"name": "lat",
|
|
"in": "query",
|
|
"required": True,
|
|
"schema": {"type": "number"},
|
|
"description": "عرض جغرافیایی",
|
|
},
|
|
{
|
|
"name": "lon",
|
|
"in": "query",
|
|
"required": True,
|
|
"schema": {"type": "number"},
|
|
"description": "طول جغرافیایی",
|
|
},
|
|
{
|
|
"name": "block_code",
|
|
"in": "query",
|
|
"required": False,
|
|
"schema": {"type": "string", "default": "block-1"},
|
|
"description": "در GET فقط برای فیلتر کلاینتی است و الگوریتمی اجرا نمیکند.",
|
|
},
|
|
],
|
|
responses={
|
|
200: build_response(
|
|
SoilDataResponseSerializer,
|
|
"ساختار بلوکهای زمین از دیتابیس بازگردانده شد.",
|
|
),
|
|
404: build_response(
|
|
SoilErrorResponseSerializer,
|
|
"location موردنظر پیدا نشد.",
|
|
),
|
|
400: build_response(
|
|
SoilErrorResponseSerializer,
|
|
"پارامترهای ورودی نامعتبر هستند.",
|
|
),
|
|
},
|
|
)
|
|
def get(self, request):
|
|
serializer = SoilDataRequestSerializer(data=request.query_params)
|
|
if not serializer.is_valid():
|
|
return Response(
|
|
{"code": 400, "msg": "داده نامعتبر.", "data": serializer.errors},
|
|
status=status.HTTP_400_BAD_REQUEST,
|
|
)
|
|
|
|
lat = serializer.validated_data["lat"]
|
|
lon = serializer.validated_data["lon"]
|
|
location = _get_location_by_lat_lon(lat, lon, prefetch=True)
|
|
if location is None:
|
|
return Response(
|
|
{"code": 404, "msg": "location پیدا نشد.", "data": None},
|
|
status=status.HTTP_404_NOT_FOUND,
|
|
)
|
|
|
|
data_serializer = SoilLocationResponseSerializer(location)
|
|
return Response(
|
|
{"code": 200, "msg": "success", "data": {"source": "database", **data_serializer.data}},
|
|
status=status.HTTP_200_OK,
|
|
)
|
|
|
|
@extend_schema(
|
|
tags=["Location Data"],
|
|
summary="ثبت مزرعه و بلوکهای کشاورز (POST)",
|
|
description="مختصات گوشههای مزرعه و boundary هر بلوک کشاورز ذخیره میشود. هیچ subdivision سنکرونی اجرا نمیشود.",
|
|
request=SoilDataRequestSerializer,
|
|
responses={
|
|
200: build_response(
|
|
SoilDataResponseSerializer,
|
|
"اطلاعات location ذخیره یا بهروزرسانی شد.",
|
|
),
|
|
400: build_response(
|
|
SoilErrorResponseSerializer,
|
|
"پارامترهای ورودی نامعتبر هستند.",
|
|
),
|
|
},
|
|
examples=[
|
|
OpenApiExample(
|
|
"نمونه درخواست",
|
|
value={
|
|
"lat": 35.6892,
|
|
"lon": 51.3890,
|
|
"farm_boundary": {
|
|
"type": "Polygon",
|
|
"coordinates": [
|
|
[
|
|
[51.3890, 35.6890],
|
|
[51.3902, 35.6890],
|
|
[51.3902, 35.6900],
|
|
[51.3890, 35.6900],
|
|
[51.3890, 35.6890],
|
|
]
|
|
],
|
|
},
|
|
"blocks": [
|
|
{
|
|
"block_code": "block-1",
|
|
"boundary": {
|
|
"type": "Polygon",
|
|
"coordinates": [
|
|
[
|
|
[51.3890, 35.6890],
|
|
[51.3896, 35.6890],
|
|
[51.3896, 35.6900],
|
|
[51.3890, 35.6900],
|
|
[51.3890, 35.6890],
|
|
]
|
|
],
|
|
},
|
|
}
|
|
],
|
|
},
|
|
request_only=True,
|
|
),
|
|
],
|
|
)
|
|
def post(self, request):
|
|
serializer = SoilDataRequestSerializer(
|
|
data=request.data,
|
|
context={"require_farm_boundary": True},
|
|
)
|
|
if not serializer.is_valid():
|
|
return Response(
|
|
{"code": 400, "msg": "داده نامعتبر.", "data": serializer.errors},
|
|
status=status.HTTP_400_BAD_REQUEST,
|
|
)
|
|
|
|
lat = serializer.validated_data["lat"]
|
|
lon = serializer.validated_data["lon"]
|
|
block_count = serializer.validated_data.get("block_count", 1)
|
|
farm_boundary = serializer.validated_data.get("farm_boundary")
|
|
blocks = serializer.validated_data.get("blocks") or []
|
|
farm_boundary_changed = False
|
|
lat_rounded = round(lat, 6)
|
|
lon_rounded = round(lon, 6)
|
|
|
|
location, created = SoilLocation.objects.get_or_create(
|
|
latitude=lat_rounded,
|
|
longitude=lon_rounded,
|
|
defaults={
|
|
"input_block_count": block_count,
|
|
"farm_boundary": farm_boundary or {},
|
|
},
|
|
)
|
|
if created:
|
|
location.set_input_block_count(block_count, blocks=blocks or None)
|
|
if farm_boundary is not None:
|
|
location.farm_boundary = farm_boundary
|
|
location.save(update_fields=["input_block_count", "farm_boundary", "block_layout", "updated_at"])
|
|
else:
|
|
changed_fields = []
|
|
if block_count != location.input_block_count or blocks:
|
|
location.set_input_block_count(block_count, blocks=blocks or None)
|
|
changed_fields.extend(["input_block_count", "block_layout"])
|
|
if farm_boundary is not None and location.farm_boundary != farm_boundary:
|
|
location.farm_boundary = farm_boundary
|
|
changed_fields.append("farm_boundary")
|
|
farm_boundary_changed = True
|
|
if changed_fields:
|
|
changed_fields.append("updated_at")
|
|
location.save(update_fields=changed_fields)
|
|
if farm_boundary_changed:
|
|
_clear_block_analysis_state(location, "")
|
|
|
|
if not (farm_boundary or location.farm_boundary):
|
|
return Response(
|
|
{
|
|
"code": 400,
|
|
"msg": "داده نامعتبر.",
|
|
"data": {"farm_boundary": ["برای ثبت location باید گوشههای کل زمین ارسال یا قبلاً ذخیره شده باشد."]},
|
|
},
|
|
status=status.HTTP_400_BAD_REQUEST,
|
|
)
|
|
|
|
_sync_defined_blocks(location, blocks)
|
|
|
|
location = _get_location_by_lat_lon(lat, lon, prefetch=True)
|
|
data_serializer = SoilLocationResponseSerializer(location)
|
|
return Response(
|
|
{
|
|
"code": 200,
|
|
"msg": "success",
|
|
"data": {
|
|
"source": "created" if created else "database",
|
|
**data_serializer.data,
|
|
},
|
|
},
|
|
status=status.HTTP_200_OK,
|
|
)
|
|
|
|
|
|
class NdviHealthView(APIView):
|
|
@extend_schema(
|
|
tags=["Location Data"],
|
|
summary="دریافت NDVI سلامت مزرعه",
|
|
description="با دریافت farm_uuid، داده NDVI سلامت پوشش گیاهی مزرعه را به صورت مستقل از dashboard برمی گرداند.",
|
|
request=NdviHealthRequestSerializer,
|
|
responses={
|
|
200: build_response(
|
|
NdviHealthEnvelopeSerializer,
|
|
"داده NDVI مزرعه با موفقیت بازگردانده شد.",
|
|
),
|
|
400: build_response(
|
|
SoilErrorResponseSerializer,
|
|
"داده ورودی نامعتبر است.",
|
|
),
|
|
404: build_response(
|
|
SoilErrorResponseSerializer,
|
|
"مزرعه یافت نشد.",
|
|
),
|
|
},
|
|
examples=[
|
|
OpenApiExample(
|
|
"نمونه درخواست NDVI",
|
|
value={"farm_uuid": "11111111-1111-1111-1111-111111111111"},
|
|
request_only=True,
|
|
)
|
|
],
|
|
)
|
|
def post(self, request):
|
|
serializer = NdviHealthRequestSerializer(data=request.data)
|
|
if not serializer.is_valid():
|
|
return Response(
|
|
{"code": 400, "msg": "داده نامعتبر.", "data": serializer.errors},
|
|
status=status.HTTP_400_BAD_REQUEST,
|
|
)
|
|
|
|
service = apps.get_app_config("location_data").get_ndvi_health_service()
|
|
try:
|
|
data = service.get_ndvi_health(
|
|
farm_uuid=str(serializer.validated_data["farm_uuid"])
|
|
)
|
|
except ValueError as exc:
|
|
return Response(
|
|
{"code": 404, "msg": str(exc), "data": None},
|
|
status=status.HTTP_404_NOT_FOUND,
|
|
)
|
|
|
|
return Response(
|
|
{"code": 200, "msg": "success", "data": data},
|
|
status=status.HTTP_200_OK,
|
|
)
|
|
|
|
|
|
class RemoteSensingAnalysisView(APIView):
|
|
@extend_schema(
|
|
tags=["Location Data"],
|
|
summary="اجرای async تحلیل سنجشازدور و subdivision دادهمحور",
|
|
description=(
|
|
"اگر خروجی cache شده برای مزرعه موجود باشد، همان داده مستقیم برگردانده میشود. "
|
|
"در غیر این صورت pipeline کامل grid + openEO + observation persistence + KMeans clustering در Celery صف میشود."
|
|
),
|
|
request=RemoteSensingFarmRequestSerializer,
|
|
responses={
|
|
200: build_response(
|
|
RemoteSensingEnvelopeSerializer,
|
|
"خروجی cache شده remote sensing بدون enqueue کردن Celery بازگردانده شد.",
|
|
),
|
|
202: build_response(
|
|
RemoteSensingQueuedEnvelopeSerializer,
|
|
"درخواست تحلیل سنجشازدور در صف قرار گرفت.",
|
|
),
|
|
400: build_response(
|
|
SoilErrorResponseSerializer,
|
|
"داده ورودی نامعتبر است.",
|
|
),
|
|
404: build_response(
|
|
SoilErrorResponseSerializer,
|
|
"location موردنظر پیدا نشد.",
|
|
),
|
|
},
|
|
examples=[
|
|
OpenApiExample(
|
|
"نمونه درخواست remote sensing",
|
|
value={
|
|
"farm_uuid": "11111111-1111-1111-1111-111111111111",
|
|
"force_refresh": False,
|
|
},
|
|
request_only=True,
|
|
),
|
|
],
|
|
)
|
|
def post(self, request):
|
|
serializer = RemoteSensingFarmRequestSerializer(data=request.data)
|
|
if not serializer.is_valid():
|
|
return Response(
|
|
{"code": 400, "msg": "داده نامعتبر.", "data": serializer.errors},
|
|
status=status.HTTP_400_BAD_REQUEST,
|
|
)
|
|
|
|
payload = serializer.validated_data
|
|
farm = SensorData.objects.select_related("center_location").filter(farm_uuid=payload["farm_uuid"]).first()
|
|
location = getattr(farm, "center_location", None)
|
|
if location is None:
|
|
return Response(
|
|
{"code": 404, "msg": "location پیدا نشد.", "data": None},
|
|
status=status.HTTP_404_NOT_FOUND,
|
|
)
|
|
|
|
temporal_end = timezone.localdate() - timedelta(days=1)
|
|
temporal_start = temporal_end - timedelta(days=30)
|
|
if not payload.get("force_refresh", False):
|
|
cached_response = _build_cached_remote_sensing_response(
|
|
location=location,
|
|
farm_uuid=str(payload["farm_uuid"]),
|
|
block_code="",
|
|
start_date=temporal_start,
|
|
end_date=temporal_end,
|
|
page=payload.get("page", 1),
|
|
page_size=payload.get("page_size", 100),
|
|
)
|
|
if cached_response is not None:
|
|
processing = cached_response.get("status") == "processing"
|
|
status_code = status.HTTP_202_ACCEPTED if processing else status.HTTP_200_OK
|
|
response_payload = cached_response
|
|
return Response(
|
|
{
|
|
"code": 202 if status_code == status.HTTP_202_ACCEPTED else 200,
|
|
"msg": "success" if processing else "داده cache شده بازگردانده شد.",
|
|
"data": response_payload,
|
|
},
|
|
status=status_code,
|
|
)
|
|
run = RemoteSensingRun.objects.create(
|
|
soil_location=location,
|
|
block_code="",
|
|
chunk_size_sqm=_resolve_chunk_size_for_location(location, ""),
|
|
temporal_start=temporal_start,
|
|
temporal_end=temporal_end,
|
|
status=RemoteSensingRun.STATUS_PENDING,
|
|
metadata={
|
|
"requested_via": "api",
|
|
"stage": "queued",
|
|
"status_label": "pending",
|
|
"requested_cluster_count": None,
|
|
"selected_features": list(DEFAULT_CLUSTER_FEATURES),
|
|
"farm_uuid": str(payload["farm_uuid"]),
|
|
"scope": "all_blocks",
|
|
},
|
|
)
|
|
task_result = run_remote_sensing_analysis_task.delay(
|
|
soil_location_id=location.id,
|
|
block_code="",
|
|
temporal_start=temporal_start.isoformat(),
|
|
temporal_end=temporal_end.isoformat(),
|
|
force_refresh=payload.get("force_refresh", False),
|
|
run_id=run.id,
|
|
cluster_count=None,
|
|
selected_features=list(DEFAULT_CLUSTER_FEATURES),
|
|
)
|
|
run.metadata = {**(run.metadata or {}), "task_id": task_result.id}
|
|
run.save(update_fields=["metadata", "updated_at"])
|
|
|
|
location_data = SoilLocationResponseSerializer(location).data
|
|
response_payload = {
|
|
"status": "processing",
|
|
"source": "processing",
|
|
"location": location_data,
|
|
"block_code": "",
|
|
"chunk_size_sqm": run.chunk_size_sqm,
|
|
"temporal_extent": {
|
|
"start_date": temporal_start.isoformat(),
|
|
"end_date": temporal_end.isoformat(),
|
|
},
|
|
"summary": _empty_remote_sensing_summary(),
|
|
"cells": [],
|
|
"run": RemoteSensingRunSerializer(run).data,
|
|
"task_id": task_result.id,
|
|
}
|
|
return Response(
|
|
{"code": 202, "msg": "تحلیل سنجشازدور در صف قرار گرفت.", "data": response_payload},
|
|
status=status.HTTP_202_ACCEPTED,
|
|
)
|
|
|
|
@extend_schema(
|
|
tags=["Location Data"],
|
|
summary="خواندن نتایج cache شده سنجشازدور و subdivision",
|
|
description="فقط نتایج ذخیرهشده remote sensing و clustering را برمیگرداند و هیچ پردازش sync اجرا نمیکند.",
|
|
parameters=[
|
|
OpenApiParameter(
|
|
name="farm_uuid",
|
|
type=str,
|
|
location=OpenApiParameter.QUERY,
|
|
required=True,
|
|
description="شناسه یکتای مزرعه",
|
|
default="11111111-1111-1111-1111-111111111111",
|
|
),
|
|
OpenApiParameter(
|
|
name="page",
|
|
type=int,
|
|
location=OpenApiParameter.QUERY,
|
|
required=False,
|
|
default=1,
|
|
),
|
|
OpenApiParameter(
|
|
name="page_size",
|
|
type=int,
|
|
location=OpenApiParameter.QUERY,
|
|
required=False,
|
|
default=100,
|
|
),
|
|
],
|
|
responses={
|
|
200: build_response(
|
|
RemoteSensingEnvelopeSerializer,
|
|
"نتایج cache شده remote sensing بازگردانده شد.",
|
|
),
|
|
404: build_response(
|
|
SoilErrorResponseSerializer,
|
|
"location موردنظر پیدا نشد.",
|
|
),
|
|
400: build_response(
|
|
SoilErrorResponseSerializer,
|
|
"داده ورودی نامعتبر است.",
|
|
),
|
|
},
|
|
examples=[
|
|
OpenApiExample(
|
|
"نمونه درخواست GET remote sensing",
|
|
value={
|
|
"farm_uuid": "11111111-1111-1111-1111-111111111111",
|
|
"page": 1,
|
|
"page_size": 100,
|
|
},
|
|
parameter_only=("farm_uuid", "query"),
|
|
),
|
|
],
|
|
)
|
|
def get(self, request):
|
|
serializer = RemoteSensingFarmRequestSerializer(data=request.query_params)
|
|
if not serializer.is_valid():
|
|
return Response(
|
|
{"code": 400, "msg": "داده نامعتبر.", "data": serializer.errors},
|
|
status=status.HTTP_400_BAD_REQUEST,
|
|
)
|
|
|
|
payload = serializer.validated_data
|
|
farm = SensorData.objects.select_related("center_location").filter(farm_uuid=payload["farm_uuid"]).first()
|
|
location = getattr(farm, "center_location", None)
|
|
if location is None:
|
|
return Response(
|
|
{"code": 404, "msg": "location پیدا نشد.", "data": None},
|
|
status=status.HTTP_404_NOT_FOUND,
|
|
)
|
|
|
|
temporal_end = timezone.localdate() - timedelta(days=1)
|
|
temporal_start = temporal_end - timedelta(days=30)
|
|
response_payload = _build_cached_remote_sensing_response(
|
|
location=location,
|
|
farm_uuid=str(payload["farm_uuid"]),
|
|
block_code="",
|
|
start_date=temporal_start,
|
|
end_date=temporal_end,
|
|
page=payload["page"],
|
|
page_size=payload["page_size"],
|
|
)
|
|
return Response(
|
|
{"code": 200, "msg": "success", "data": response_payload},
|
|
status=status.HTTP_200_OK,
|
|
)
|
|
|
|
|
|
class RemoteSensingRunStatusView(APIView):
|
|
@extend_schema(
|
|
tags=["Location Data"],
|
|
summary="وضعیت run تحلیل سنجشازدور",
|
|
description="وضعیت async pipeline را با task_id از نوع UUID برمیگرداند. این task_id همان شناسه تسک Celery ذخیرهشده در metadata.run است.",
|
|
parameters=[
|
|
OpenApiParameter(
|
|
name="run_id",
|
|
type={"type": "string", "format": "uuid"},
|
|
location=OpenApiParameter.PATH,
|
|
required=True,
|
|
description="شناسه UUID تسک async (task_id).",
|
|
),
|
|
],
|
|
responses={
|
|
200: build_response(
|
|
RemoteSensingRunStatusEnvelopeSerializer,
|
|
"وضعیت run بازگردانده شد و بعد از اتمام، نتیجه نهایی نیز از همین route برگردانده میشود.",
|
|
),
|
|
404: build_response(
|
|
SoilErrorResponseSerializer,
|
|
"run موردنظر پیدا نشد.",
|
|
),
|
|
},
|
|
)
|
|
def get(self, request, run_id):
|
|
page = _safe_positive_int(request.query_params.get("page"), default=1)
|
|
page_size = min(_safe_positive_int(request.query_params.get("page_size"), default=100), MAX_REMOTE_SENSING_PAGE_SIZE)
|
|
run = RemoteSensingRun.objects.filter(metadata__task_id=str(run_id)).select_related("soil_location").first()
|
|
if run is None:
|
|
return Response(
|
|
{"code": 404, "msg": "run با این task_id پیدا نشد.", "data": None},
|
|
status=status.HTTP_404_NOT_FOUND,
|
|
)
|
|
|
|
response_payload = _build_remote_sensing_run_status_payload(run, page=page, page_size=page_size)
|
|
return Response(
|
|
{"code": 200, "msg": "success", "data": response_payload},
|
|
status=status.HTTP_200_OK,
|
|
)
|
|
|
|
|
|
class RemoteSensingClusterBlockLiveView(APIView):
|
|
@extend_schema(
|
|
tags=["Location Data"],
|
|
summary="دریافت زنده remote sensing برای زیربلاک KMeans",
|
|
description="با دریافت UUID زیربلاک ساختهشده توسط KMeans، هندسه همان زیربلاک از دیتابیس خوانده میشود و داده تازه ماهوارهای از openEO برگردانده میشود.",
|
|
parameters=[
|
|
OpenApiParameter(
|
|
name="cluster_uuid",
|
|
type={"type": "string", "format": "uuid"},
|
|
location=OpenApiParameter.PATH,
|
|
required=True,
|
|
description="شناسه UUID زیربلاک KMeans.",
|
|
),
|
|
OpenApiParameter(
|
|
name="temporal_start",
|
|
type={"type": "string", "format": "date"},
|
|
location=OpenApiParameter.QUERY,
|
|
required=False,
|
|
description="شروع بازه سفارشی. اگر ست شود، temporal_end هم باید ارسال شود.",
|
|
),
|
|
OpenApiParameter(
|
|
name="temporal_end",
|
|
type={"type": "string", "format": "date"},
|
|
location=OpenApiParameter.QUERY,
|
|
required=False,
|
|
description="پایان بازه سفارشی. اگر ست شود، temporal_start هم باید ارسال شود.",
|
|
),
|
|
OpenApiParameter(
|
|
name="days",
|
|
type=int,
|
|
location=OpenApiParameter.QUERY,
|
|
required=False,
|
|
default=30,
|
|
description="اگر بازه سفارشی ارسال نشود، از yesterday backfill با این تعداد روز استفاده میشود.",
|
|
),
|
|
],
|
|
responses={
|
|
200: build_response(
|
|
RemoteSensingClusterBlockLiveEnvelopeSerializer,
|
|
"داده زنده openEO برای زیربلاک KMeans بازگردانده شد.",
|
|
),
|
|
400: build_response(
|
|
SoilErrorResponseSerializer,
|
|
"پارامترهای ورودی یا هندسه زیربلاک نامعتبر است.",
|
|
),
|
|
404: build_response(
|
|
SoilErrorResponseSerializer,
|
|
"زیربلاک KMeans پیدا نشد.",
|
|
),
|
|
502: build_response(
|
|
SoilErrorResponseSerializer,
|
|
"خواندن داده از openEO ناموفق بود.",
|
|
),
|
|
},
|
|
)
|
|
def get(self, request, cluster_uuid):
|
|
serializer = RemoteSensingClusterBlockLiveRequestSerializer(data=request.query_params)
|
|
if not serializer.is_valid():
|
|
return Response(
|
|
{"code": 400, "msg": "داده نامعتبر.", "data": serializer.errors},
|
|
status=status.HTTP_400_BAD_REQUEST,
|
|
)
|
|
|
|
cluster_block = (
|
|
RemoteSensingClusterBlock.objects.select_related("soil_location", "block_subdivision", "result")
|
|
.filter(uuid=cluster_uuid)
|
|
.first()
|
|
)
|
|
if cluster_block is None:
|
|
return Response(
|
|
{"code": 404, "msg": "زیربلاک KMeans پیدا نشد.", "data": None},
|
|
status=status.HTTP_404_NOT_FOUND,
|
|
)
|
|
|
|
geometry = _resolve_cluster_block_geometry(cluster_block)
|
|
if not geometry:
|
|
return Response(
|
|
{
|
|
"code": 400,
|
|
"msg": "هندسه زیربلاک KMeans نامعتبر است.",
|
|
"data": {"cluster_uuid": [str(cluster_block.uuid)]},
|
|
},
|
|
status=status.HTTP_400_BAD_REQUEST,
|
|
)
|
|
|
|
temporal_start, temporal_end = _resolve_live_remote_sensing_window(serializer.validated_data)
|
|
cached_cluster_payload = _build_cached_cluster_block_live_payload(
|
|
cluster_block=cluster_block,
|
|
temporal_start=temporal_start,
|
|
temporal_end=temporal_end,
|
|
)
|
|
if cached_cluster_payload is not None:
|
|
return Response(
|
|
{"code": 200, "msg": "success", "data": cached_cluster_payload},
|
|
status=status.HTTP_200_OK,
|
|
)
|
|
virtual_cell = _build_virtual_cluster_block_cell(cluster_block=cluster_block, geometry=geometry)
|
|
try:
|
|
remote_payload = compute_remote_sensing_metrics(
|
|
[virtual_cell],
|
|
temporal_start=temporal_start,
|
|
temporal_end=temporal_end,
|
|
selected_features=list(DEFAULT_CLUSTER_FEATURES),
|
|
)
|
|
except (OpenEOAuthenticationError, OpenEOExecutionError, OpenEOServiceError) as exc:
|
|
return Response(
|
|
{"code": 502, "msg": "خواندن داده از openEO ناموفق بود.", "data": {"detail": str(exc)}},
|
|
status=status.HTTP_502_BAD_GATEWAY,
|
|
)
|
|
|
|
metrics = dict(remote_payload.get("results", {}).get(virtual_cell.cell_code, {}) or {})
|
|
response_payload = {
|
|
"status": "success",
|
|
"source": "openeo",
|
|
"cluster_block": RemoteSensingClusterBlockSerializer(cluster_block).data,
|
|
"temporal_extent": {
|
|
"start_date": temporal_start.isoformat(),
|
|
"end_date": temporal_end.isoformat(),
|
|
},
|
|
"selected_features": list(DEFAULT_CLUSTER_FEATURES),
|
|
"summary": {
|
|
"cell_count": int(cluster_block.cell_count or 0),
|
|
"ndvi_mean": _round_or_none(metrics.get("ndvi")),
|
|
"ndwi_mean": _round_or_none(metrics.get("ndwi")),
|
|
"soil_vv_db_mean": _round_or_none(metrics.get("soil_vv_db")),
|
|
},
|
|
"metrics": {
|
|
"ndvi": _round_or_none(metrics.get("ndvi")),
|
|
"ndwi": _round_or_none(metrics.get("ndwi")),
|
|
"soil_vv": _round_or_none(metrics.get("soil_vv")),
|
|
"soil_vv_db": _round_or_none(metrics.get("soil_vv_db")),
|
|
},
|
|
"metadata": {
|
|
**dict(remote_payload.get("metadata") or {}),
|
|
"requested_cluster_uuid": str(cluster_block.uuid),
|
|
"block_code": cluster_block.block_code,
|
|
"sub_block_code": cluster_block.sub_block_code,
|
|
},
|
|
}
|
|
return Response(
|
|
{"code": 200, "msg": "success", "data": response_payload},
|
|
status=status.HTTP_200_OK,
|
|
)
|
|
|
|
|
|
class RemoteSensingClusterRecommendationView(APIView):
|
|
@extend_schema(
|
|
tags=["Location Data"],
|
|
summary="پیشنهاد گیاه برای هر کلاستر KMeans",
|
|
description=(
|
|
"با دریافت farm_uuid، داده هر کلاستر KMeans location_data بههمراه "
|
|
"ndvi، ndwi، soil_vv، soil_vv_db و مقایسه گیاههای ثبتشده در farm_data "
|
|
"با crop_simulation برگردانده میشود و برای هر زیربلاک یک گیاه پیشنهادی ارائه میشود."
|
|
),
|
|
parameters=[
|
|
OpenApiParameter(
|
|
name="farm_uuid",
|
|
type=str,
|
|
location=OpenApiParameter.QUERY,
|
|
required=True,
|
|
description="شناسه یکتای مزرعه",
|
|
),
|
|
],
|
|
responses={
|
|
200: build_response(
|
|
ClusterCropRecommendationEnvelopeSerializer,
|
|
"داده کلاسترها و پیشنهاد گیاه برای هر زیربلاک بازگردانده شد.",
|
|
),
|
|
400: build_response(
|
|
SoilErrorResponseSerializer,
|
|
"پیشنیازهای مقایسه نامعتبر یا ناقص است.",
|
|
),
|
|
404: build_response(
|
|
SoilErrorResponseSerializer,
|
|
"مزرعه یا خروجی KMeans یافت نشد.",
|
|
),
|
|
},
|
|
examples=[
|
|
OpenApiExample(
|
|
"نمونه درخواست پیشنهاد گیاه برای کلاسترها",
|
|
value={"farm_uuid": "11111111-1111-1111-1111-111111111111"},
|
|
parameter_only=("farm_uuid", "query"),
|
|
)
|
|
],
|
|
)
|
|
def get(self, request):
|
|
serializer = ClusterCropRecommendationRequestSerializer(data=request.query_params)
|
|
if not serializer.is_valid():
|
|
return Response(
|
|
{"code": 400, "msg": "داده نامعتبر.", "data": serializer.errors},
|
|
status=status.HTTP_400_BAD_REQUEST,
|
|
)
|
|
|
|
try:
|
|
payload = build_cluster_crop_recommendations(
|
|
str(serializer.validated_data["farm_uuid"])
|
|
)
|
|
except ClusterRecommendationNotFound as exc:
|
|
return Response(
|
|
{"code": 404, "msg": str(exc), "data": None},
|
|
status=status.HTTP_404_NOT_FOUND,
|
|
)
|
|
except ClusterRecommendationValidationError as exc:
|
|
return Response(
|
|
{"code": 400, "msg": str(exc), "data": None},
|
|
status=status.HTTP_400_BAD_REQUEST,
|
|
)
|
|
|
|
return Response(
|
|
{"code": 200, "msg": "success", "data": payload},
|
|
status=status.HTTP_200_OK,
|
|
)
|
|
|
|
|
|
class RemoteSensingSubdivisionOptionListView(APIView):
|
|
@extend_schema(
|
|
tags=["Location Data"],
|
|
summary="فهرست همه گزینههای K ذخیرهشده برای یک subdivision result",
|
|
description="همه ترکیبهای K که برای این run/result محاسبه و ذخیره شدهاند را برمیگرداند و مشخص میکند کدامیک active و کدامیک recommended است.",
|
|
responses={
|
|
200: build_response(
|
|
RemoteSensingSubdivisionOptionListEnvelopeSerializer,
|
|
"فهرست گزینههای K بازگردانده شد.",
|
|
),
|
|
404: build_response(
|
|
SoilErrorResponseSerializer,
|
|
"subdivision result موردنظر پیدا نشد.",
|
|
),
|
|
},
|
|
)
|
|
def get(self, request, result_id):
|
|
result = (
|
|
RemoteSensingSubdivisionResult.objects.filter(pk=result_id)
|
|
.prefetch_related("options__cluster_blocks")
|
|
.first()
|
|
)
|
|
if result is None:
|
|
return Response(
|
|
{"code": 404, "msg": "subdivision result پیدا نشد.", "data": None},
|
|
status=status.HTTP_404_NOT_FOUND,
|
|
)
|
|
|
|
options = list(result.options.all().order_by("requested_k"))
|
|
response_payload = {
|
|
"result_id": result.id,
|
|
"active_requested_k": next((option.requested_k for option in options if option.is_active), None),
|
|
"recommended_requested_k": next((option.requested_k for option in options if option.is_recommended), None),
|
|
"options": RemoteSensingSubdivisionOptionSerializer(options, many=True).data,
|
|
}
|
|
return Response(
|
|
{"code": 200, "msg": "success", "data": response_payload},
|
|
status=status.HTTP_200_OK,
|
|
)
|
|
|
|
|
|
class RemoteSensingSubdivisionOptionActivateView(APIView):
|
|
@extend_schema(
|
|
tags=["Location Data"],
|
|
summary="فعالسازی یک K ذخیرهشده برای subdivision result",
|
|
description="کاربر میتواند یکی از Kهای از قبل محاسبهشده و ذخیرهشده را انتخاب کند تا active شود و خروجی اصلی subdivision بر همان مبنا sync شود.",
|
|
request=RemoteSensingSubdivisionOptionActivateSerializer,
|
|
responses={
|
|
200: build_response(
|
|
RemoteSensingSubdivisionOptionActivateEnvelopeSerializer,
|
|
"K انتخابی فعال شد.",
|
|
),
|
|
400: build_response(
|
|
SoilErrorResponseSerializer,
|
|
"درخواست نامعتبر است یا K انتخابی موجود نیست.",
|
|
),
|
|
404: build_response(
|
|
SoilErrorResponseSerializer,
|
|
"subdivision result موردنظر پیدا نشد.",
|
|
),
|
|
},
|
|
)
|
|
def post(self, request, result_id):
|
|
serializer = RemoteSensingSubdivisionOptionActivateSerializer(data=request.data)
|
|
if not serializer.is_valid():
|
|
return Response(
|
|
{"code": 400, "msg": "داده نامعتبر.", "data": serializer.errors},
|
|
status=status.HTTP_400_BAD_REQUEST,
|
|
)
|
|
|
|
result = (
|
|
RemoteSensingSubdivisionResult.objects.filter(pk=result_id)
|
|
.select_related("soil_location", "block_subdivision")
|
|
.prefetch_related("options__cluster_blocks", "options__assignments__cell")
|
|
.first()
|
|
)
|
|
if result is None:
|
|
return Response(
|
|
{"code": 404, "msg": "subdivision result پیدا نشد.", "data": None},
|
|
status=status.HTTP_404_NOT_FOUND,
|
|
)
|
|
|
|
requested_k = serializer.validated_data["requested_k"]
|
|
option = next(
|
|
(candidate for candidate in result.options.all() if candidate.requested_k == requested_k),
|
|
None,
|
|
)
|
|
if option is None:
|
|
return Response(
|
|
{
|
|
"code": 400,
|
|
"msg": "K انتخابی برای این subdivision result موجود نیست.",
|
|
"data": {"requested_k": [requested_k]},
|
|
},
|
|
status=status.HTTP_400_BAD_REQUEST,
|
|
)
|
|
|
|
activate_subdivision_option(option=option, selection_source="user")
|
|
result.refresh_from_db()
|
|
result = (
|
|
RemoteSensingSubdivisionResult.objects.filter(pk=result.pk)
|
|
.prefetch_related("assignments__cell", "cluster_blocks", "options__cluster_blocks")
|
|
.first()
|
|
)
|
|
response_payload = {
|
|
"result_id": result.id,
|
|
"activated_requested_k": requested_k,
|
|
"subdivision_result": RemoteSensingSubdivisionResultSerializer(result).data,
|
|
}
|
|
return Response(
|
|
{"code": 200, "msg": "success", "data": response_payload},
|
|
status=status.HTTP_200_OK,
|
|
)
|
|
|
|
|
|
def _build_remote_sensing_run_status_payload(run: RemoteSensingRun, *, page: int, page_size: int) -> dict:
|
|
run_data = RemoteSensingRunSerializer(run).data
|
|
task_id = (run.metadata or {}).get("task_id")
|
|
task_data = _build_remote_sensing_task_payload(run)
|
|
effective_status = _apply_live_retry_state_override(run_data, task_data)
|
|
status_payload = {
|
|
"status": effective_status or run_data["status_label"],
|
|
"source": "database",
|
|
"run": run_data,
|
|
"task_id": task_id,
|
|
"task": task_data,
|
|
}
|
|
if run.status in {RemoteSensingRun.STATUS_PENDING, RemoteSensingRun.STATUS_RUNNING}:
|
|
return status_payload
|
|
if run.status == RemoteSensingRun.STATUS_FAILURE:
|
|
return status_payload
|
|
|
|
source_run = _resolve_status_source_run(run)
|
|
location = _get_location_by_lat_lon(run.soil_location.latitude, run.soil_location.longitude, prefetch=True)
|
|
observations = _get_remote_sensing_observations(
|
|
location=source_run.soil_location,
|
|
block_code=source_run.block_code,
|
|
start_date=source_run.temporal_start,
|
|
end_date=source_run.temporal_end,
|
|
run=source_run,
|
|
)
|
|
subdivision_result = _resolve_status_subdivision_result(run, source_run=source_run)
|
|
|
|
response_payload = {
|
|
**status_payload,
|
|
"location": SoilLocationResponseSerializer(location).data,
|
|
"block_code": source_run.block_code,
|
|
"chunk_size_sqm": source_run.chunk_size_sqm,
|
|
"temporal_extent": {
|
|
"start_date": source_run.temporal_start.isoformat() if source_run.temporal_start else None,
|
|
"end_date": source_run.temporal_end.isoformat() if source_run.temporal_end else None,
|
|
},
|
|
"summary": _empty_remote_sensing_summary(),
|
|
"cells": [],
|
|
"subdivision_result": None,
|
|
}
|
|
|
|
if not observations.exists():
|
|
return response_payload
|
|
|
|
paginated_observations = _paginate_observations(
|
|
observations,
|
|
page=page,
|
|
page_size=page_size,
|
|
)
|
|
paginated_assignments = []
|
|
pagination = {"cells": paginated_observations["pagination"]}
|
|
if subdivision_result is not None:
|
|
paginated = _paginate_assignments(
|
|
subdivision_result,
|
|
page=page,
|
|
page_size=page_size,
|
|
)
|
|
paginated_assignments = paginated["items"]
|
|
pagination["assignments"] = paginated["pagination"]
|
|
|
|
response_payload["summary"] = _build_remote_sensing_summary(observations)
|
|
response_payload["cells"] = RemoteSensingCellObservationSerializer(
|
|
paginated_observations["items"],
|
|
many=True,
|
|
).data
|
|
response_payload["pagination"] = pagination
|
|
|
|
if subdivision_result is not None:
|
|
response_payload["subdivision_result"] = RemoteSensingSubdivisionResultSerializer(
|
|
subdivision_result,
|
|
context={"paginated_assignments": paginated_assignments},
|
|
).data
|
|
|
|
return response_payload
|
|
|
|
|
|
def _get_remote_sensing_async_result(task_id: str):
|
|
try:
|
|
from celery.result import AsyncResult
|
|
except ImportError: # pragma: no cover - fallback when Celery is absent
|
|
return None
|
|
|
|
try:
|
|
return AsyncResult(task_id)
|
|
except Exception: # pragma: no cover - depends on Celery backend configuration
|
|
return None
|
|
|
|
|
|
def _serialize_task_value(value):
|
|
if value is None or isinstance(value, (str, int, float, bool)):
|
|
return value
|
|
if isinstance(value, dict):
|
|
return {str(key): _serialize_task_value(item) for key, item in value.items()}
|
|
if isinstance(value, (list, tuple)):
|
|
return [_serialize_task_value(item) for item in value]
|
|
return str(value)
|
|
|
|
|
|
def _build_remote_sensing_task_payload(run: RemoteSensingRun) -> dict:
|
|
metadata = dict(run.metadata or {})
|
|
timestamps = dict(metadata.get("timestamps") or {})
|
|
stage_details = dict(metadata.get("stage_details") or {})
|
|
current_stage = metadata.get("stage")
|
|
failed_stage = metadata.get("failed_stage")
|
|
task_payload = {
|
|
"current_stage": current_stage,
|
|
"current_stage_details": stage_details.get(current_stage, {}),
|
|
"timestamps": timestamps,
|
|
"stages": _build_remote_sensing_stage_entries(
|
|
current_stage=current_stage,
|
|
stage_details=stage_details,
|
|
timestamps=timestamps,
|
|
run_status=run.status,
|
|
),
|
|
}
|
|
if failed_stage:
|
|
task_payload["failed_stage"] = failed_stage
|
|
|
|
metric_progress = (stage_details.get("fetching_remote_metrics") or {}).get("metric_progress")
|
|
if metric_progress:
|
|
task_payload["metric_progress"] = metric_progress
|
|
|
|
retry_context = stage_details.get("retrying")
|
|
if retry_context:
|
|
task_payload["retry"] = retry_context
|
|
task_payload["last_error"] = retry_context.get("last_error")
|
|
|
|
failure_reason = None
|
|
if metadata.get("stage") == "failed" or run.status == RemoteSensingRun.STATUS_FAILURE:
|
|
failure_reason = metadata.get("failure_reason") or run.error_message
|
|
if failure_reason:
|
|
task_payload["failure_reason"] = failure_reason
|
|
|
|
task_id = metadata.get("task_id")
|
|
celery_payload = _build_remote_sensing_celery_payload(str(task_id)) if task_id else None
|
|
if celery_payload is not None:
|
|
task_payload["celery"] = celery_payload
|
|
|
|
return task_payload
|
|
|
|
|
|
def _apply_live_retry_state_override(run_data: dict[str, Any], task_data: dict[str, Any]) -> str | None:
|
|
celery_payload = task_data.get("celery") or {}
|
|
if celery_payload.get("state") != "RETRY":
|
|
return None
|
|
|
|
retry_context = dict(task_data.get("retry") or {})
|
|
if not retry_context:
|
|
retry_context = {
|
|
"retry_count": None,
|
|
"retry_delay_seconds": None,
|
|
"last_error": task_data.get("failure_reason") or celery_payload.get("info"),
|
|
"failed_stage": task_data.get("failed_stage"),
|
|
"failed_stage_details": (
|
|
task_data.get("current_stage_details", {})
|
|
if task_data.get("current_stage") == "failed"
|
|
else {}
|
|
),
|
|
}
|
|
|
|
task_data["retry"] = retry_context
|
|
task_data["last_error"] = retry_context.get("last_error") or celery_payload.get("info")
|
|
task_data["current_stage"] = "retrying"
|
|
task_data["current_stage_details"] = retry_context
|
|
task_data.pop("failure_reason", None)
|
|
_upsert_retrying_stage_entry(task_data, retry_context)
|
|
|
|
run_data["status_label"] = "retrying"
|
|
run_data["pipeline_status"] = "retrying"
|
|
run_data["stage"] = "retrying"
|
|
return "retrying"
|
|
|
|
|
|
def _upsert_retrying_stage_entry(task_data: dict[str, Any], retry_context: dict[str, Any]) -> None:
|
|
stages = list(task_data.get("stages") or [])
|
|
retrying_entry = {
|
|
"name": "retrying",
|
|
"status": "running",
|
|
"entered_at": (task_data.get("timestamps") or {}).get("retrying_at"),
|
|
"details": retry_context,
|
|
}
|
|
for index, entry in enumerate(stages):
|
|
if entry.get("name") == "retrying":
|
|
stages[index] = retrying_entry
|
|
task_data["stages"] = stages
|
|
return
|
|
stages.append(retrying_entry)
|
|
task_data["stages"] = stages
|
|
|
|
|
|
def _build_remote_sensing_stage_entries(
|
|
*,
|
|
current_stage: str | None,
|
|
stage_details: dict,
|
|
timestamps: dict,
|
|
run_status: str,
|
|
) -> list[dict]:
|
|
stage_names = []
|
|
for stage_name in REMOTE_SENSING_RUN_STAGE_ORDER:
|
|
if stage_name == current_stage or stage_name in stage_details or f"{stage_name}_at" in timestamps:
|
|
stage_names.append(stage_name)
|
|
if current_stage and current_stage not in stage_names:
|
|
stage_names.append(current_stage)
|
|
|
|
entries = []
|
|
for stage_name in stage_names:
|
|
if run_status == RemoteSensingRun.STATUS_FAILURE and stage_name == current_stage:
|
|
stage_status = "failed"
|
|
elif stage_name == current_stage and run_status == RemoteSensingRun.STATUS_PENDING:
|
|
stage_status = "pending"
|
|
elif stage_name == current_stage and run_status == RemoteSensingRun.STATUS_RUNNING:
|
|
stage_status = "running"
|
|
else:
|
|
stage_status = "completed"
|
|
entries.append(
|
|
{
|
|
"name": stage_name,
|
|
"status": stage_status,
|
|
"entered_at": timestamps.get(f"{stage_name}_at"),
|
|
"details": stage_details.get(stage_name, {}),
|
|
}
|
|
)
|
|
return entries
|
|
|
|
|
|
def _build_remote_sensing_celery_payload(task_id: str) -> dict | None:
|
|
async_result = _get_remote_sensing_async_result(task_id)
|
|
if async_result is None:
|
|
return None
|
|
|
|
try:
|
|
payload = {
|
|
"state": str(async_result.state),
|
|
"ready": bool(async_result.ready()),
|
|
"successful": bool(async_result.successful()) if async_result.ready() else False,
|
|
"failed": bool(async_result.failed()) if async_result.ready() else False,
|
|
}
|
|
except Exception: # pragma: no cover - depends on Celery backend configuration
|
|
return None
|
|
|
|
info = getattr(async_result, "info", None)
|
|
if info not in (None, {}):
|
|
payload["info"] = _serialize_task_value(info)
|
|
|
|
if async_result.failed():
|
|
payload["error"] = _serialize_task_value(async_result.result)
|
|
|
|
return payload
|
|
|
|
|
|
def _create_cached_status_run(
|
|
*,
|
|
location: SoilLocation,
|
|
farm_uuid: str,
|
|
block_code: str,
|
|
temporal_start,
|
|
temporal_end,
|
|
cached_response: dict[str, Any],
|
|
) -> RemoteSensingRun:
|
|
source_run_id = ((cached_response.get("run") or {}).get("id"))
|
|
source_result_id = ((cached_response.get("subdivision_result") or {}).get("id"))
|
|
task_id = str(uuid4())
|
|
return RemoteSensingRun.objects.create(
|
|
soil_location=location,
|
|
block_subdivision=None,
|
|
block_code=block_code or "",
|
|
chunk_size_sqm=int(cached_response.get("chunk_size_sqm") or _resolve_chunk_size_for_location(location, block_code)),
|
|
temporal_start=temporal_start,
|
|
temporal_end=temporal_end,
|
|
status=RemoteSensingRun.STATUS_SUCCESS,
|
|
started_at=timezone.now(),
|
|
finished_at=timezone.now(),
|
|
metadata={
|
|
"requested_via": "api",
|
|
"farm_uuid": farm_uuid,
|
|
"task_id": task_id,
|
|
"stage": "completed",
|
|
"status_label": "completed",
|
|
"selected_features": list(
|
|
((cached_response.get("subdivision_result") or {}).get("selected_features"))
|
|
or ((cached_response.get("run") or {}).get("selected_features"))
|
|
or DEFAULT_CLUSTER_FEATURES
|
|
),
|
|
"scope": "all_blocks",
|
|
"cache_hit": True,
|
|
"source_run_id": source_run_id,
|
|
"source_result_id": source_result_id,
|
|
"timestamps": {
|
|
"queued_at": timezone.now().isoformat(),
|
|
"completed_at": timezone.now().isoformat(),
|
|
},
|
|
},
|
|
)
|
|
|
|
|
|
def _resolve_status_source_run(run: RemoteSensingRun) -> RemoteSensingRun:
|
|
source_run_id = dict(run.metadata or {}).get("source_run_id")
|
|
if not source_run_id:
|
|
return run
|
|
return RemoteSensingRun.objects.filter(pk=source_run_id).select_related("soil_location").first() or run
|
|
|
|
|
|
def _resolve_status_subdivision_result(
|
|
run: RemoteSensingRun,
|
|
*,
|
|
source_run: RemoteSensingRun,
|
|
) -> RemoteSensingSubdivisionResult | None:
|
|
source_result_id = dict(run.metadata or {}).get("source_result_id")
|
|
if source_result_id:
|
|
return (
|
|
RemoteSensingSubdivisionResult.objects.filter(pk=source_result_id)
|
|
.prefetch_related("assignments__cell", "cluster_blocks")
|
|
.first()
|
|
)
|
|
return getattr(source_run, "subdivision_result", None)
|
|
|
|
|
|
def _get_location_by_lat_lon(lat, lon, *, prefetch: bool = False):
|
|
lat_rounded = round(lat, 6)
|
|
lon_rounded = round(lon, 6)
|
|
queryset = SoilLocation.objects.filter(latitude=lat_rounded, longitude=lon_rounded)
|
|
if prefetch:
|
|
queryset = queryset.prefetch_related("block_subdivisions")
|
|
return queryset.first()
|
|
|
|
|
|
def _sync_defined_blocks(location: SoilLocation, blocks: list[dict]) -> None:
|
|
if not blocks:
|
|
return
|
|
|
|
with transaction.atomic():
|
|
normalized_blocks = []
|
|
for index, block in enumerate(blocks):
|
|
normalized_blocks.append(
|
|
{
|
|
"block_code": str(block.get("block_code") or f"block-{index + 1}").strip(),
|
|
"boundary": block.get("boundary") or {},
|
|
"order": int(block.get("order") or index + 1),
|
|
}
|
|
)
|
|
|
|
normalized_codes = {block["block_code"] for block in normalized_blocks}
|
|
existing_subdivisions = {
|
|
subdivision.block_code: subdivision
|
|
for subdivision in location.block_subdivisions.all()
|
|
}
|
|
|
|
for block_code, subdivision in existing_subdivisions.items():
|
|
if block_code not in normalized_codes:
|
|
_clear_block_analysis_state(location, block_code, subdivision=subdivision)
|
|
subdivision.delete()
|
|
|
|
for block in normalized_blocks:
|
|
block_code = block["block_code"]
|
|
boundary = block["boundary"]
|
|
subdivision = existing_subdivisions.get(block_code)
|
|
definition_metadata = {
|
|
"definition_source": "farmer_input",
|
|
"order": block["order"],
|
|
}
|
|
if subdivision is None:
|
|
BlockSubdivision.objects.create(
|
|
soil_location=location,
|
|
block_code=block_code,
|
|
source_boundary=boundary,
|
|
chunk_size_sqm=900,
|
|
grid_points=[],
|
|
centroid_points=[],
|
|
grid_point_count=0,
|
|
centroid_count=0,
|
|
status="defined",
|
|
metadata=definition_metadata,
|
|
)
|
|
continue
|
|
|
|
boundary_changed = subdivision.source_boundary != boundary
|
|
metadata = dict(subdivision.metadata or {})
|
|
metadata.update(definition_metadata)
|
|
subdivision.source_boundary = boundary
|
|
subdivision.chunk_size_sqm = 900
|
|
subdivision.metadata = metadata
|
|
if boundary_changed:
|
|
_clear_block_analysis_state(location, block_code, subdivision=subdivision)
|
|
subdivision.grid_points = []
|
|
subdivision.centroid_points = []
|
|
subdivision.grid_point_count = 0
|
|
subdivision.centroid_count = 0
|
|
subdivision.status = "defined"
|
|
subdivision.metadata = definition_metadata
|
|
subdivision.elbow_plot = None
|
|
subdivision.save(
|
|
update_fields=[
|
|
"source_boundary",
|
|
"chunk_size_sqm",
|
|
"grid_points",
|
|
"centroid_points",
|
|
"grid_point_count",
|
|
"centroid_count",
|
|
"status",
|
|
"metadata",
|
|
"elbow_plot",
|
|
"updated_at",
|
|
]
|
|
)
|
|
continue
|
|
|
|
subdivision.save(
|
|
update_fields=[
|
|
"source_boundary",
|
|
"chunk_size_sqm",
|
|
"metadata",
|
|
"updated_at",
|
|
]
|
|
)
|
|
|
|
|
|
def _clear_block_analysis_state(
|
|
location: SoilLocation,
|
|
block_code: str,
|
|
*,
|
|
subdivision: BlockSubdivision | None = None,
|
|
) -> None:
|
|
AnalysisGridCell.objects.filter(
|
|
soil_location=location,
|
|
block_code=block_code or "",
|
|
).delete()
|
|
RemoteSensingSubdivisionResult.objects.filter(
|
|
soil_location=location,
|
|
block_code=block_code or "",
|
|
).delete()
|
|
RemoteSensingRun.objects.filter(
|
|
soil_location=location,
|
|
block_code=block_code or "",
|
|
).delete()
|
|
if subdivision is None:
|
|
return
|
|
subdivision.grid_points = []
|
|
subdivision.centroid_points = []
|
|
subdivision.grid_point_count = 0
|
|
subdivision.centroid_count = 0
|
|
subdivision.status = "defined"
|
|
subdivision.elbow_plot = None
|
|
|
|
|
|
def _resolve_chunk_size_for_location(location: SoilLocation, block_code: str) -> int:
|
|
if block_code:
|
|
subdivision = location.block_subdivisions.filter(block_code=block_code).first()
|
|
if subdivision is not None:
|
|
return int(subdivision.chunk_size_sqm or 900)
|
|
block_layout = location.block_layout or {}
|
|
if not block_code:
|
|
return int(block_layout.get("analysis_grid_summary", {}).get("chunk_size_sqm") or 900)
|
|
for block in block_layout.get("blocks", []):
|
|
if block.get("block_code") == block_code:
|
|
return int(block.get("analysis_grid_summary", {}).get("chunk_size_sqm") or 900)
|
|
return 900
|
|
|
|
|
|
def _build_cached_remote_sensing_response(
|
|
*,
|
|
location: SoilLocation,
|
|
farm_uuid: str,
|
|
block_code: str,
|
|
start_date,
|
|
end_date,
|
|
page: int,
|
|
page_size: int,
|
|
) -> dict[str, Any] | None:
|
|
run = _get_latest_remote_sensing_run(
|
|
location=location,
|
|
farm_uuid=farm_uuid,
|
|
block_code=block_code,
|
|
start_date=start_date,
|
|
end_date=end_date,
|
|
)
|
|
subdivision_result = _get_remote_sensing_subdivision_result(
|
|
location=location,
|
|
farm_uuid=farm_uuid,
|
|
block_code=block_code,
|
|
start_date=start_date,
|
|
end_date=end_date,
|
|
)
|
|
observations = _get_remote_sensing_observations(
|
|
location=location,
|
|
block_code=block_code,
|
|
start_date=start_date,
|
|
end_date=end_date,
|
|
run=run if run is not None else getattr(subdivision_result, "run", None),
|
|
)
|
|
if run is None and subdivision_result is None:
|
|
observations = observations.none()
|
|
|
|
if not observations.exists():
|
|
fallback_cached_response = _build_fallback_cached_remote_sensing_response(
|
|
location=location,
|
|
farm_uuid=farm_uuid,
|
|
block_code=block_code,
|
|
page=page,
|
|
page_size=page_size,
|
|
)
|
|
if fallback_cached_response is not None:
|
|
return fallback_cached_response
|
|
if run is None:
|
|
return None
|
|
processing = run.status in {
|
|
RemoteSensingRun.STATUS_PENDING,
|
|
RemoteSensingRun.STATUS_RUNNING,
|
|
}
|
|
source = "processing" if processing else "database"
|
|
status_label = "processing" if processing else "not_found"
|
|
payload = {
|
|
"status": status_label,
|
|
"source": source,
|
|
"location": SoilLocationResponseSerializer(location).data,
|
|
"block_code": block_code or "",
|
|
"chunk_size_sqm": getattr(run, "chunk_size_sqm", None),
|
|
"temporal_extent": {
|
|
"start_date": start_date.isoformat(),
|
|
"end_date": end_date.isoformat(),
|
|
},
|
|
"summary": _empty_remote_sensing_summary(),
|
|
"cells": [],
|
|
"run": RemoteSensingRunSerializer(run).data,
|
|
"subdivision_result": None,
|
|
"metadata": {
|
|
"farm_uuid": farm_uuid,
|
|
"cache_hit": True,
|
|
},
|
|
}
|
|
return payload
|
|
|
|
paginated_observations = _paginate_observations(
|
|
observations,
|
|
page=page,
|
|
page_size=page_size,
|
|
)
|
|
paginated_assignments = []
|
|
pagination = {"cells": paginated_observations["pagination"]}
|
|
if subdivision_result is not None:
|
|
paginated = _paginate_assignments(
|
|
subdivision_result,
|
|
page=page,
|
|
page_size=page_size,
|
|
)
|
|
paginated_assignments = paginated["items"]
|
|
pagination["assignments"] = paginated["pagination"]
|
|
|
|
subdivision_data = None
|
|
if subdivision_result is not None:
|
|
subdivision_data = RemoteSensingSubdivisionResultSerializer(
|
|
subdivision_result,
|
|
context={"paginated_assignments": paginated_assignments},
|
|
).data
|
|
|
|
payload = {
|
|
"status": "success",
|
|
"source": "database",
|
|
"location": SoilLocationResponseSerializer(location).data,
|
|
"block_code": block_code or "",
|
|
"chunk_size_sqm": observations.first().cell.chunk_size_sqm,
|
|
"temporal_extent": {
|
|
"start_date": start_date.isoformat(),
|
|
"end_date": end_date.isoformat(),
|
|
},
|
|
"summary": _build_remote_sensing_summary(observations),
|
|
"cells": RemoteSensingCellObservationSerializer(
|
|
paginated_observations["items"],
|
|
many=True,
|
|
).data,
|
|
"run": RemoteSensingRunSerializer(run).data if run else None,
|
|
"subdivision_result": subdivision_data,
|
|
"pagination": pagination,
|
|
"metadata": {
|
|
"farm_uuid": farm_uuid,
|
|
"cache_hit": True,
|
|
},
|
|
}
|
|
return payload
|
|
|
|
|
|
def _build_fallback_cached_remote_sensing_response(
|
|
*,
|
|
location: SoilLocation,
|
|
farm_uuid: str,
|
|
block_code: str,
|
|
page: int,
|
|
page_size: int,
|
|
) -> dict[str, Any] | None:
|
|
fallback_run = _get_latest_completed_remote_sensing_run(
|
|
location=location,
|
|
farm_uuid=farm_uuid,
|
|
block_code=block_code,
|
|
)
|
|
if fallback_run is None:
|
|
return None
|
|
|
|
fallback_observations = _get_remote_sensing_observations(
|
|
location=location,
|
|
block_code=block_code,
|
|
start_date=fallback_run.temporal_start,
|
|
end_date=fallback_run.temporal_end,
|
|
run=fallback_run,
|
|
)
|
|
if not fallback_observations.exists():
|
|
return None
|
|
|
|
fallback_result = _get_remote_sensing_subdivision_result(
|
|
location=location,
|
|
farm_uuid=farm_uuid,
|
|
block_code=block_code,
|
|
start_date=fallback_run.temporal_start,
|
|
end_date=fallback_run.temporal_end,
|
|
)
|
|
paginated_observations = _paginate_observations(
|
|
fallback_observations,
|
|
page=page,
|
|
page_size=page_size,
|
|
)
|
|
paginated_assignments = []
|
|
pagination = {"cells": paginated_observations["pagination"]}
|
|
if fallback_result is not None:
|
|
paginated = _paginate_assignments(
|
|
fallback_result,
|
|
page=page,
|
|
page_size=page_size,
|
|
)
|
|
paginated_assignments = paginated["items"]
|
|
pagination["assignments"] = paginated["pagination"]
|
|
|
|
subdivision_data = None
|
|
if fallback_result is not None:
|
|
subdivision_data = RemoteSensingSubdivisionResultSerializer(
|
|
fallback_result,
|
|
context={"paginated_assignments": paginated_assignments},
|
|
).data
|
|
|
|
return {
|
|
"status": "success",
|
|
"source": "database",
|
|
"location": SoilLocationResponseSerializer(location).data,
|
|
"block_code": block_code or "",
|
|
"chunk_size_sqm": fallback_run.chunk_size_sqm,
|
|
"temporal_extent": {
|
|
"start_date": fallback_run.temporal_start.isoformat() if fallback_run.temporal_start else None,
|
|
"end_date": fallback_run.temporal_end.isoformat() if fallback_run.temporal_end else None,
|
|
},
|
|
"summary": _build_remote_sensing_summary(fallback_observations),
|
|
"cells": RemoteSensingCellObservationSerializer(
|
|
paginated_observations["items"],
|
|
many=True,
|
|
).data,
|
|
"run": RemoteSensingRunSerializer(fallback_run).data,
|
|
"subdivision_result": subdivision_data,
|
|
"pagination": pagination,
|
|
"metadata": {
|
|
"farm_uuid": farm_uuid,
|
|
"cache_hit": True,
|
|
"cache_match": "latest_completed_for_farm",
|
|
},
|
|
}
|
|
|
|
|
|
def _resolve_live_remote_sensing_window(payload: dict[str, Any]):
|
|
temporal_start = payload.get("temporal_start")
|
|
temporal_end = payload.get("temporal_end")
|
|
if temporal_start and temporal_end:
|
|
return temporal_start, temporal_end
|
|
days = int(payload.get("days") or 30)
|
|
end_date = timezone.localdate() - timedelta(days=1)
|
|
start_date = end_date - timedelta(days=days - 1)
|
|
return start_date, end_date
|
|
|
|
|
|
def _resolve_cluster_block_geometry(cluster_block: RemoteSensingClusterBlock) -> dict[str, Any]:
|
|
geometry = dict(cluster_block.geometry or {})
|
|
if geometry.get("type") and geometry.get("coordinates"):
|
|
return geometry
|
|
|
|
cell_codes = list(cluster_block.cell_codes or [])
|
|
if not cell_codes:
|
|
return {}
|
|
cell_geometries = list(
|
|
AnalysisGridCell.objects.filter(
|
|
soil_location=cluster_block.soil_location,
|
|
cell_code__in=cell_codes,
|
|
)
|
|
.order_by("cell_code")
|
|
.values_list("geometry", flat=True)
|
|
)
|
|
polygon_coordinates: list[list[list[list[float]]]] = []
|
|
for cell_geometry in cell_geometries:
|
|
cell_geometry = dict(cell_geometry or {})
|
|
coordinates = cell_geometry.get("coordinates") or []
|
|
if cell_geometry.get("type") == "Polygon" and coordinates:
|
|
polygon_coordinates.append(coordinates)
|
|
elif cell_geometry.get("type") == "MultiPolygon" and coordinates:
|
|
polygon_coordinates.extend(coordinates)
|
|
if not polygon_coordinates:
|
|
return {}
|
|
if len(polygon_coordinates) == 1:
|
|
return {"type": "Polygon", "coordinates": polygon_coordinates[0]}
|
|
return {"type": "MultiPolygon", "coordinates": polygon_coordinates}
|
|
|
|
|
|
def _build_virtual_cluster_block_cell(
|
|
*,
|
|
cluster_block: RemoteSensingClusterBlock,
|
|
geometry: dict[str, Any],
|
|
):
|
|
return SimpleNamespace(
|
|
cell_code=f"cluster-{cluster_block.uuid}",
|
|
block_code=cluster_block.block_code,
|
|
soil_location_id=cluster_block.soil_location_id,
|
|
chunk_size_sqm=cluster_block.chunk_size_sqm,
|
|
geometry=geometry,
|
|
)
|
|
|
|
|
|
def _build_cached_cluster_block_live_payload(
|
|
*,
|
|
cluster_block: RemoteSensingClusterBlock,
|
|
temporal_start,
|
|
temporal_end,
|
|
) -> dict[str, Any] | None:
|
|
result = cluster_block.result
|
|
if result.temporal_start != temporal_start or result.temporal_end != temporal_end:
|
|
return None
|
|
|
|
observations = (
|
|
AnalysisGridObservation.objects.select_related("cell")
|
|
.filter(
|
|
cell__soil_location=cluster_block.soil_location,
|
|
cell__cell_code__in=list(cluster_block.cell_codes or []),
|
|
temporal_start=temporal_start,
|
|
temporal_end=temporal_end,
|
|
)
|
|
.order_by("cell__cell_code")
|
|
)
|
|
if not observations.exists():
|
|
return None
|
|
|
|
metrics = observations.aggregate(
|
|
ndvi=Avg("ndvi"),
|
|
ndwi=Avg("ndwi"),
|
|
soil_vv=Avg("soil_vv"),
|
|
soil_vv_db=Avg("soil_vv_db"),
|
|
)
|
|
return {
|
|
"status": "success",
|
|
"source": "database",
|
|
"cluster_block": RemoteSensingClusterBlockSerializer(cluster_block).data,
|
|
"temporal_extent": {
|
|
"start_date": temporal_start.isoformat(),
|
|
"end_date": temporal_end.isoformat(),
|
|
},
|
|
"selected_features": list(DEFAULT_CLUSTER_FEATURES),
|
|
"summary": {
|
|
"cell_count": int(cluster_block.cell_count or observations.count()),
|
|
"ndvi_mean": _round_or_none(metrics.get("ndvi")),
|
|
"ndwi_mean": _round_or_none(metrics.get("ndwi")),
|
|
"soil_vv_db_mean": _round_or_none(metrics.get("soil_vv_db")),
|
|
},
|
|
"metrics": {
|
|
"ndvi": _round_or_none(metrics.get("ndvi")),
|
|
"ndwi": _round_or_none(metrics.get("ndwi")),
|
|
"soil_vv": _round_or_none(metrics.get("soil_vv")),
|
|
"soil_vv_db": _round_or_none(metrics.get("soil_vv_db")),
|
|
},
|
|
"metadata": {
|
|
"requested_cluster_uuid": str(cluster_block.uuid),
|
|
"cache_hit": True,
|
|
"source_run_id": result.run_id,
|
|
"source_result_id": result.id,
|
|
},
|
|
}
|
|
|
|
|
|
def _get_remote_sensing_observations(*, location, block_code: str, start_date, end_date, run=None):
|
|
queryset = (
|
|
AnalysisGridObservation.objects.select_related("cell", "run")
|
|
.filter(
|
|
cell__soil_location=location,
|
|
temporal_start=start_date,
|
|
temporal_end=end_date,
|
|
)
|
|
.order_by("cell__cell_code")
|
|
)
|
|
queryset = queryset.filter(cell__block_code=block_code or "")
|
|
if run is not None:
|
|
queryset = queryset.filter(run=run)
|
|
return queryset
|
|
|
|
|
|
def _select_farm_scoped_run(runs, farm_uuid: str):
|
|
legacy_candidate = None
|
|
for run in runs:
|
|
metadata = dict(run.metadata or {})
|
|
scoped_farm_uuid = metadata.get("farm_uuid")
|
|
if scoped_farm_uuid == farm_uuid:
|
|
return run
|
|
if scoped_farm_uuid in (None, "") and legacy_candidate is None:
|
|
legacy_candidate = run
|
|
return legacy_candidate
|
|
|
|
|
|
def _get_latest_remote_sensing_run(*, location, farm_uuid: str, block_code: str, start_date, end_date):
|
|
runs = list(
|
|
RemoteSensingRun.objects.filter(
|
|
soil_location=location,
|
|
block_code=block_code or "",
|
|
temporal_start=start_date,
|
|
temporal_end=end_date,
|
|
).order_by("-created_at", "-id")
|
|
)
|
|
return _select_farm_scoped_run(runs, farm_uuid)
|
|
|
|
|
|
def _get_latest_completed_remote_sensing_run(*, location, farm_uuid: str, block_code: str):
|
|
runs = list(
|
|
RemoteSensingRun.objects.filter(
|
|
soil_location=location,
|
|
block_code=block_code or "",
|
|
status=RemoteSensingRun.STATUS_SUCCESS,
|
|
).order_by("-created_at", "-id")
|
|
)
|
|
return _select_farm_scoped_run(runs, farm_uuid)
|
|
|
|
|
|
def _get_remote_sensing_subdivision_result(
|
|
*,
|
|
location,
|
|
farm_uuid: str,
|
|
block_code: str,
|
|
start_date,
|
|
end_date,
|
|
):
|
|
results = list(
|
|
RemoteSensingSubdivisionResult.objects.filter(
|
|
soil_location=location,
|
|
block_code=block_code or "",
|
|
temporal_start=start_date,
|
|
temporal_end=end_date,
|
|
)
|
|
.select_related("run")
|
|
.prefetch_related("assignments__cell", "cluster_blocks")
|
|
.order_by("-created_at", "-id")
|
|
)
|
|
legacy_candidate = None
|
|
for result in results:
|
|
run = getattr(result, "run", None)
|
|
scoped_farm_uuid = dict(getattr(run, "metadata", {}) or {}).get("farm_uuid")
|
|
if scoped_farm_uuid == farm_uuid:
|
|
return result
|
|
if scoped_farm_uuid in (None, "") and legacy_candidate is None:
|
|
legacy_candidate = result
|
|
return legacy_candidate
|
|
|
|
|
|
def _build_remote_sensing_summary(observations):
|
|
aggregates = observations.aggregate(
|
|
cell_count=Avg("cell_id"),
|
|
ndvi_mean=Avg("ndvi"),
|
|
ndwi_mean=Avg("ndwi"),
|
|
soil_vv_db_mean=Avg("soil_vv_db"),
|
|
)
|
|
summary = {
|
|
"cell_count": observations.count(),
|
|
"ndvi_mean": _round_or_none(aggregates.get("ndvi_mean")),
|
|
"ndwi_mean": _round_or_none(aggregates.get("ndwi_mean")),
|
|
"soil_vv_db_mean": _round_or_none(aggregates.get("soil_vv_db_mean")),
|
|
}
|
|
return summary
|
|
|
|
|
|
def _empty_remote_sensing_summary():
|
|
return {
|
|
"cell_count": 0,
|
|
"ndvi_mean": None,
|
|
"ndwi_mean": None,
|
|
"soil_vv_db_mean": None,
|
|
}
|
|
|
|
|
|
def _round_or_none(value):
|
|
if value is None:
|
|
return None
|
|
return round(float(value), 6)
|
|
|
|
|
|
def _paginate_assignments(result: RemoteSensingSubdivisionResult, *, page: int, page_size: int) -> dict:
|
|
page_size = min(max(page_size, 1), MAX_REMOTE_SENSING_PAGE_SIZE)
|
|
assignments = result.assignments.select_related("cell").order_by("cluster_label", "cell__cell_code")
|
|
paginator = Paginator(assignments, page_size)
|
|
if paginator.count == 0:
|
|
return {
|
|
"items": [],
|
|
"pagination": {
|
|
"page": 1,
|
|
"page_size": page_size,
|
|
"total_items": 0,
|
|
"total_pages": 0,
|
|
"has_next": False,
|
|
"has_previous": False,
|
|
},
|
|
}
|
|
try:
|
|
page_obj = paginator.page(page)
|
|
except EmptyPage:
|
|
page_obj = paginator.page(paginator.num_pages)
|
|
return {
|
|
"items": list(page_obj.object_list),
|
|
"pagination": {
|
|
"page": page_obj.number,
|
|
"page_size": page_size,
|
|
"total_items": paginator.count,
|
|
"total_pages": paginator.num_pages,
|
|
"has_next": page_obj.has_next(),
|
|
"has_previous": page_obj.has_previous(),
|
|
},
|
|
}
|
|
|
|
|
|
def _safe_positive_int(value, *, default: int) -> int:
|
|
try:
|
|
parsed = int(value)
|
|
except (TypeError, ValueError):
|
|
return default
|
|
return parsed if parsed > 0 else default
|
|
|
|
|
|
|
|
def _paginate_observations(observations, *, page: int, page_size: int) -> dict:
|
|
page_size = min(max(page_size, 1), MAX_REMOTE_SENSING_PAGE_SIZE)
|
|
paginator = Paginator(observations, page_size)
|
|
if paginator.count == 0:
|
|
return {
|
|
"items": [],
|
|
"pagination": {
|
|
"page": 1,
|
|
"page_size": page_size,
|
|
"total_items": 0,
|
|
"total_pages": 0,
|
|
"has_next": False,
|
|
"has_previous": False,
|
|
},
|
|
}
|
|
try:
|
|
page_obj = paginator.page(page)
|
|
except EmptyPage:
|
|
page_obj = paginator.page(paginator.num_pages)
|
|
return {
|
|
"items": list(page_obj.object_list),
|
|
"pagination": {
|
|
"page": page_obj.number,
|
|
"page_size": page_size,
|
|
"total_items": paginator.count,
|
|
"total_pages": paginator.num_pages,
|
|
"has_next": page_obj.has_next(),
|
|
"has_previous": page_obj.has_previous(),
|
|
},
|
|
}
|