1002 lines
38 KiB
Python
1002 lines
38 KiB
Python
from datetime import timedelta
|
|
|
|
from django.apps import apps
|
|
from django.core.paginator import EmptyPage, Paginator
|
|
from django.db.models import Avg
|
|
from django.db import transaction
|
|
from django.utils import timezone
|
|
from rest_framework import status
|
|
from drf_spectacular.utils import (
|
|
OpenApiExample,
|
|
OpenApiParameter,
|
|
OpenApiResponse,
|
|
extend_schema,
|
|
inline_serializer,
|
|
)
|
|
from rest_framework import serializers as drf_serializers
|
|
from rest_framework.response import Response
|
|
from rest_framework.views import APIView
|
|
|
|
from config.openapi import (
|
|
build_envelope_serializer,
|
|
build_response,
|
|
)
|
|
from .models import (
|
|
AnalysisGridCell,
|
|
AnalysisGridObservation,
|
|
BlockSubdivision,
|
|
RemoteSensingRun,
|
|
RemoteSensingSubdivisionResult,
|
|
SoilLocation,
|
|
)
|
|
from farm_data.models import SensorData
|
|
|
|
from .data_driven_subdivision import DEFAULT_CLUSTER_FEATURES
|
|
from .serializers import (
|
|
BlockSubdivisionSerializer,
|
|
NdviHealthRequestSerializer,
|
|
NdviHealthResponseSerializer,
|
|
RemoteSensingCellObservationSerializer,
|
|
RemoteSensingResponseSerializer,
|
|
RemoteSensingFarmRequestSerializer,
|
|
RemoteSensingRunSerializer,
|
|
RemoteSensingRunStatusResponseSerializer,
|
|
RemoteSensingSummarySerializer,
|
|
RemoteSensingSubdivisionResultSerializer,
|
|
SoilDataRequestSerializer,
|
|
SoilLocationResponseSerializer,
|
|
)
|
|
from .tasks import run_remote_sensing_analysis_task
|
|
|
|
MAX_REMOTE_SENSING_PAGE_SIZE = 200
|
|
|
|
SoilLocationPayloadSerializer = inline_serializer(
|
|
name="SoilLocationPayloadSerializer",
|
|
fields={
|
|
"source": drf_serializers.CharField(),
|
|
"id": drf_serializers.IntegerField(),
|
|
"lon": drf_serializers.DecimalField(max_digits=9, decimal_places=6),
|
|
"lat": drf_serializers.DecimalField(max_digits=9, decimal_places=6),
|
|
"input_block_count": drf_serializers.IntegerField(),
|
|
"farm_boundary": drf_serializers.JSONField(),
|
|
"block_layout": drf_serializers.JSONField(),
|
|
"block_subdivisions": BlockSubdivisionSerializer(many=True),
|
|
"satellite_snapshots": drf_serializers.JSONField(),
|
|
},
|
|
)
|
|
SoilDataResponseSerializer = build_envelope_serializer(
|
|
"SoilDataResponseSerializer",
|
|
SoilLocationPayloadSerializer,
|
|
)
|
|
SoilErrorResponseSerializer = build_envelope_serializer(
|
|
"SoilErrorResponseSerializer",
|
|
data_required=False,
|
|
allow_null=True,
|
|
)
|
|
NdviHealthEnvelopeSerializer = build_envelope_serializer(
|
|
"NdviHealthEnvelopeSerializer",
|
|
NdviHealthResponseSerializer,
|
|
)
|
|
RemoteSensingEnvelopeSerializer = build_envelope_serializer(
|
|
"RemoteSensingEnvelopeSerializer",
|
|
RemoteSensingResponseSerializer,
|
|
)
|
|
RemoteSensingQueuedEnvelopeSerializer = build_envelope_serializer(
|
|
"RemoteSensingQueuedEnvelopeSerializer",
|
|
inline_serializer(
|
|
name="RemoteSensingQueuedPayloadSerializer",
|
|
fields={
|
|
"status": drf_serializers.CharField(),
|
|
"source": drf_serializers.CharField(),
|
|
"location": drf_serializers.JSONField(),
|
|
"block_code": drf_serializers.CharField(),
|
|
"chunk_size_sqm": drf_serializers.IntegerField(allow_null=True),
|
|
"temporal_extent": drf_serializers.JSONField(),
|
|
"summary": RemoteSensingSummarySerializer(),
|
|
"cells": drf_serializers.JSONField(),
|
|
"run": drf_serializers.JSONField(allow_null=True),
|
|
"task_id": drf_serializers.UUIDField(),
|
|
},
|
|
),
|
|
)
|
|
RemoteSensingRunStatusEnvelopeSerializer = build_envelope_serializer(
|
|
"RemoteSensingRunStatusEnvelopeSerializer",
|
|
RemoteSensingRunStatusResponseSerializer,
|
|
)
|
|
class SoilDataView(APIView):
|
|
"""
|
|
ثبت مختصات گوشههای مزرعه و بلوکهای تعریفشده توسط کشاورز.
|
|
"""
|
|
|
|
@extend_schema(
|
|
tags=["Location Data"],
|
|
summary="خواندن ساختار مزرعه و بلوکها (GET)",
|
|
description="با ارسال lat و lon، ساختار ذخیرهشده مزرعه، بلوکها و آخرین خلاصه سنجشازدور هر بلوک بازگردانده میشود.",
|
|
parameters=[
|
|
{
|
|
"name": "lat",
|
|
"in": "query",
|
|
"required": True,
|
|
"schema": {"type": "number"},
|
|
"description": "عرض جغرافیایی",
|
|
},
|
|
{
|
|
"name": "lon",
|
|
"in": "query",
|
|
"required": True,
|
|
"schema": {"type": "number"},
|
|
"description": "طول جغرافیایی",
|
|
},
|
|
{
|
|
"name": "block_code",
|
|
"in": "query",
|
|
"required": False,
|
|
"schema": {"type": "string", "default": "block-1"},
|
|
"description": "در GET فقط برای فیلتر کلاینتی است و الگوریتمی اجرا نمیکند.",
|
|
},
|
|
],
|
|
responses={
|
|
200: build_response(
|
|
SoilDataResponseSerializer,
|
|
"ساختار بلوکهای زمین از دیتابیس بازگردانده شد.",
|
|
),
|
|
404: build_response(
|
|
SoilErrorResponseSerializer,
|
|
"location موردنظر پیدا نشد.",
|
|
),
|
|
400: build_response(
|
|
SoilErrorResponseSerializer,
|
|
"پارامترهای ورودی نامعتبر هستند.",
|
|
),
|
|
},
|
|
)
|
|
def get(self, request):
|
|
serializer = SoilDataRequestSerializer(data=request.query_params)
|
|
if not serializer.is_valid():
|
|
return Response(
|
|
{"code": 400, "msg": "داده نامعتبر.", "data": serializer.errors},
|
|
status=status.HTTP_400_BAD_REQUEST,
|
|
)
|
|
|
|
lat = serializer.validated_data["lat"]
|
|
lon = serializer.validated_data["lon"]
|
|
location = _get_location_by_lat_lon(lat, lon, prefetch=True)
|
|
if location is None:
|
|
return Response(
|
|
{"code": 404, "msg": "location پیدا نشد.", "data": None},
|
|
status=status.HTTP_404_NOT_FOUND,
|
|
)
|
|
|
|
data_serializer = SoilLocationResponseSerializer(location)
|
|
return Response(
|
|
{"code": 200, "msg": "success", "data": {"source": "database", **data_serializer.data}},
|
|
status=status.HTTP_200_OK,
|
|
)
|
|
|
|
@extend_schema(
|
|
tags=["Location Data"],
|
|
summary="ثبت مزرعه و بلوکهای کشاورز (POST)",
|
|
description="مختصات گوشههای مزرعه و boundary هر بلوک کشاورز ذخیره میشود. هیچ subdivision سنکرونی اجرا نمیشود.",
|
|
request=SoilDataRequestSerializer,
|
|
responses={
|
|
200: build_response(
|
|
SoilDataResponseSerializer,
|
|
"اطلاعات location ذخیره یا بهروزرسانی شد.",
|
|
),
|
|
400: build_response(
|
|
SoilErrorResponseSerializer,
|
|
"پارامترهای ورودی نامعتبر هستند.",
|
|
),
|
|
},
|
|
examples=[
|
|
OpenApiExample(
|
|
"نمونه درخواست",
|
|
value={
|
|
"lat": 35.6892,
|
|
"lon": 51.3890,
|
|
"farm_boundary": {
|
|
"type": "Polygon",
|
|
"coordinates": [
|
|
[
|
|
[51.3890, 35.6890],
|
|
[51.3902, 35.6890],
|
|
[51.3902, 35.6900],
|
|
[51.3890, 35.6900],
|
|
[51.3890, 35.6890],
|
|
]
|
|
],
|
|
},
|
|
"blocks": [
|
|
{
|
|
"block_code": "block-1",
|
|
"boundary": {
|
|
"type": "Polygon",
|
|
"coordinates": [
|
|
[
|
|
[51.3890, 35.6890],
|
|
[51.3896, 35.6890],
|
|
[51.3896, 35.6900],
|
|
[51.3890, 35.6900],
|
|
[51.3890, 35.6890],
|
|
]
|
|
],
|
|
},
|
|
}
|
|
],
|
|
},
|
|
request_only=True,
|
|
),
|
|
],
|
|
)
|
|
def post(self, request):
|
|
serializer = SoilDataRequestSerializer(
|
|
data=request.data,
|
|
context={"require_farm_boundary": True},
|
|
)
|
|
if not serializer.is_valid():
|
|
return Response(
|
|
{"code": 400, "msg": "داده نامعتبر.", "data": serializer.errors},
|
|
status=status.HTTP_400_BAD_REQUEST,
|
|
)
|
|
|
|
lat = serializer.validated_data["lat"]
|
|
lon = serializer.validated_data["lon"]
|
|
block_count = serializer.validated_data.get("block_count", 1)
|
|
farm_boundary = serializer.validated_data.get("farm_boundary")
|
|
blocks = serializer.validated_data.get("blocks") or []
|
|
farm_boundary_changed = False
|
|
lat_rounded = round(lat, 6)
|
|
lon_rounded = round(lon, 6)
|
|
|
|
location, created = SoilLocation.objects.get_or_create(
|
|
latitude=lat_rounded,
|
|
longitude=lon_rounded,
|
|
defaults={
|
|
"input_block_count": block_count,
|
|
"farm_boundary": farm_boundary or {},
|
|
},
|
|
)
|
|
if created:
|
|
location.set_input_block_count(block_count, blocks=blocks or None)
|
|
if farm_boundary is not None:
|
|
location.farm_boundary = farm_boundary
|
|
location.save(update_fields=["input_block_count", "farm_boundary", "block_layout", "updated_at"])
|
|
else:
|
|
changed_fields = []
|
|
if block_count != location.input_block_count or blocks:
|
|
location.set_input_block_count(block_count, blocks=blocks or None)
|
|
changed_fields.extend(["input_block_count", "block_layout"])
|
|
if farm_boundary is not None and location.farm_boundary != farm_boundary:
|
|
location.farm_boundary = farm_boundary
|
|
changed_fields.append("farm_boundary")
|
|
farm_boundary_changed = True
|
|
if changed_fields:
|
|
changed_fields.append("updated_at")
|
|
location.save(update_fields=changed_fields)
|
|
if farm_boundary_changed:
|
|
_clear_block_analysis_state(location, "")
|
|
|
|
if not (farm_boundary or location.farm_boundary):
|
|
return Response(
|
|
{
|
|
"code": 400,
|
|
"msg": "داده نامعتبر.",
|
|
"data": {"farm_boundary": ["برای ثبت location باید گوشههای کل زمین ارسال یا قبلاً ذخیره شده باشد."]},
|
|
},
|
|
status=status.HTTP_400_BAD_REQUEST,
|
|
)
|
|
|
|
_sync_defined_blocks(location, blocks)
|
|
|
|
location = _get_location_by_lat_lon(lat, lon, prefetch=True)
|
|
data_serializer = SoilLocationResponseSerializer(location)
|
|
return Response(
|
|
{
|
|
"code": 200,
|
|
"msg": "success",
|
|
"data": {
|
|
"source": "created" if created else "database",
|
|
**data_serializer.data,
|
|
},
|
|
},
|
|
status=status.HTTP_200_OK,
|
|
)
|
|
|
|
|
|
class NdviHealthView(APIView):
|
|
@extend_schema(
|
|
tags=["Location Data"],
|
|
summary="دریافت NDVI سلامت مزرعه",
|
|
description="با دریافت farm_uuid، داده NDVI سلامت پوشش گیاهی مزرعه را به صورت مستقل از dashboard برمی گرداند.",
|
|
request=NdviHealthRequestSerializer,
|
|
responses={
|
|
200: build_response(
|
|
NdviHealthEnvelopeSerializer,
|
|
"داده NDVI مزرعه با موفقیت بازگردانده شد.",
|
|
),
|
|
400: build_response(
|
|
SoilErrorResponseSerializer,
|
|
"داده ورودی نامعتبر است.",
|
|
),
|
|
404: build_response(
|
|
SoilErrorResponseSerializer,
|
|
"مزرعه یافت نشد.",
|
|
),
|
|
},
|
|
examples=[
|
|
OpenApiExample(
|
|
"نمونه درخواست NDVI",
|
|
value={"farm_uuid": "11111111-1111-1111-1111-111111111111"},
|
|
request_only=True,
|
|
)
|
|
],
|
|
)
|
|
def post(self, request):
|
|
serializer = NdviHealthRequestSerializer(data=request.data)
|
|
if not serializer.is_valid():
|
|
return Response(
|
|
{"code": 400, "msg": "داده نامعتبر.", "data": serializer.errors},
|
|
status=status.HTTP_400_BAD_REQUEST,
|
|
)
|
|
|
|
service = apps.get_app_config("location_data").get_ndvi_health_service()
|
|
try:
|
|
data = service.get_ndvi_health(
|
|
farm_uuid=str(serializer.validated_data["farm_uuid"])
|
|
)
|
|
except ValueError as exc:
|
|
return Response(
|
|
{"code": 404, "msg": str(exc), "data": None},
|
|
status=status.HTTP_404_NOT_FOUND,
|
|
)
|
|
|
|
return Response(
|
|
{"code": 200, "msg": "success", "data": data},
|
|
status=status.HTTP_200_OK,
|
|
)
|
|
|
|
|
|
class RemoteSensingAnalysisView(APIView):
|
|
@extend_schema(
|
|
tags=["Location Data"],
|
|
summary="اجرای async تحلیل سنجشازدور و subdivision دادهمحور",
|
|
description="برای location موجود، pipeline کامل grid + openEO + observation persistence + KMeans clustering در Celery صف میشود و sync اجرا نمیشود.",
|
|
request=RemoteSensingFarmRequestSerializer,
|
|
responses={
|
|
202: build_response(
|
|
RemoteSensingQueuedEnvelopeSerializer,
|
|
"درخواست تحلیل سنجشازدور در صف قرار گرفت.",
|
|
),
|
|
400: build_response(
|
|
SoilErrorResponseSerializer,
|
|
"داده ورودی نامعتبر است.",
|
|
),
|
|
404: build_response(
|
|
SoilErrorResponseSerializer,
|
|
"location موردنظر پیدا نشد.",
|
|
),
|
|
},
|
|
examples=[
|
|
OpenApiExample(
|
|
"نمونه درخواست remote sensing",
|
|
value={
|
|
"farm_uuid": "11111111-1111-1111-1111-111111111111",
|
|
"force_refresh": False,
|
|
},
|
|
request_only=True,
|
|
),
|
|
],
|
|
)
|
|
def post(self, request):
|
|
serializer = RemoteSensingFarmRequestSerializer(data=request.data)
|
|
if not serializer.is_valid():
|
|
return Response(
|
|
{"code": 400, "msg": "داده نامعتبر.", "data": serializer.errors},
|
|
status=status.HTTP_400_BAD_REQUEST,
|
|
)
|
|
|
|
payload = serializer.validated_data
|
|
farm = SensorData.objects.select_related("center_location").filter(farm_uuid=payload["farm_uuid"]).first()
|
|
location = getattr(farm, "center_location", None)
|
|
if location is None:
|
|
return Response(
|
|
{"code": 404, "msg": "location پیدا نشد.", "data": None},
|
|
status=status.HTTP_404_NOT_FOUND,
|
|
)
|
|
|
|
temporal_end = timezone.localdate() - timedelta(days=1)
|
|
temporal_start = temporal_end - timedelta(days=30)
|
|
run = RemoteSensingRun.objects.create(
|
|
soil_location=location,
|
|
block_code="",
|
|
chunk_size_sqm=_resolve_chunk_size_for_location(location, ""),
|
|
temporal_start=temporal_start,
|
|
temporal_end=temporal_end,
|
|
status=RemoteSensingRun.STATUS_PENDING,
|
|
metadata={
|
|
"requested_via": "api",
|
|
"status_label": "pending",
|
|
"requested_cluster_count": None,
|
|
"selected_features": list(DEFAULT_CLUSTER_FEATURES),
|
|
"farm_uuid": str(payload["farm_uuid"]),
|
|
"scope": "all_blocks",
|
|
},
|
|
)
|
|
task_result = run_remote_sensing_analysis_task.delay(
|
|
soil_location_id=location.id,
|
|
block_code="",
|
|
temporal_start=temporal_start.isoformat(),
|
|
temporal_end=temporal_end.isoformat(),
|
|
force_refresh=payload.get("force_refresh", False),
|
|
run_id=run.id,
|
|
cluster_count=None,
|
|
selected_features=list(DEFAULT_CLUSTER_FEATURES),
|
|
)
|
|
run.metadata = {**(run.metadata or {}), "task_id": task_result.id}
|
|
run.save(update_fields=["metadata", "updated_at"])
|
|
|
|
location_data = SoilLocationResponseSerializer(location).data
|
|
response_payload = {
|
|
"status": "processing",
|
|
"source": "processing",
|
|
"location": location_data,
|
|
"block_code": "",
|
|
"chunk_size_sqm": run.chunk_size_sqm,
|
|
"temporal_extent": {
|
|
"start_date": temporal_start.isoformat(),
|
|
"end_date": temporal_end.isoformat(),
|
|
},
|
|
"summary": _empty_remote_sensing_summary(),
|
|
"cells": [],
|
|
"run": RemoteSensingRunSerializer(run).data,
|
|
"task_id": task_result.id,
|
|
}
|
|
return Response(
|
|
{"code": 202, "msg": "تحلیل سنجشازدور در صف قرار گرفت.", "data": response_payload},
|
|
status=status.HTTP_202_ACCEPTED,
|
|
)
|
|
|
|
@extend_schema(
|
|
tags=["Location Data"],
|
|
summary="خواندن نتایج cache شده سنجشازدور و subdivision",
|
|
description="فقط نتایج ذخیرهشده remote sensing و clustering را برمیگرداند و هیچ پردازش sync اجرا نمیکند.",
|
|
parameters=[
|
|
{"name": "farm_uuid", "in": "query", "required": True, "schema": {"type": "string", "format": "uuid"}},
|
|
{"name": "page", "in": "query", "required": False, "schema": {"type": "integer", "default": 1}},
|
|
{"name": "page_size", "in": "query", "required": False, "schema": {"type": "integer", "default": 100}},
|
|
],
|
|
responses={
|
|
200: build_response(
|
|
RemoteSensingEnvelopeSerializer,
|
|
"نتایج cache شده remote sensing بازگردانده شد.",
|
|
),
|
|
404: build_response(
|
|
SoilErrorResponseSerializer,
|
|
"location موردنظر پیدا نشد.",
|
|
),
|
|
400: build_response(
|
|
SoilErrorResponseSerializer,
|
|
"داده ورودی نامعتبر است.",
|
|
),
|
|
},
|
|
)
|
|
def get(self, request):
|
|
serializer = RemoteSensingFarmRequestSerializer(data=request.query_params)
|
|
if not serializer.is_valid():
|
|
return Response(
|
|
{"code": 400, "msg": "داده نامعتبر.", "data": serializer.errors},
|
|
status=status.HTTP_400_BAD_REQUEST,
|
|
)
|
|
|
|
payload = serializer.validated_data
|
|
farm = SensorData.objects.select_related("center_location").filter(farm_uuid=payload["farm_uuid"]).first()
|
|
location = getattr(farm, "center_location", None)
|
|
if location is None:
|
|
return Response(
|
|
{"code": 404, "msg": "location پیدا نشد.", "data": None},
|
|
status=status.HTTP_404_NOT_FOUND,
|
|
)
|
|
|
|
temporal_end = timezone.localdate() - timedelta(days=1)
|
|
temporal_start = temporal_end - timedelta(days=30)
|
|
block_code = ""
|
|
observations = _get_remote_sensing_observations(
|
|
location=location,
|
|
block_code=block_code,
|
|
start_date=temporal_start,
|
|
end_date=temporal_end,
|
|
)
|
|
run = _get_latest_remote_sensing_run(
|
|
location=location,
|
|
block_code=block_code,
|
|
start_date=temporal_start,
|
|
end_date=temporal_end,
|
|
)
|
|
subdivision_result = _get_remote_sensing_subdivision_result(
|
|
location=location,
|
|
block_code=block_code,
|
|
start_date=temporal_start,
|
|
end_date=temporal_end,
|
|
)
|
|
|
|
if not observations.exists():
|
|
processing = run is not None and run.status in {
|
|
RemoteSensingRun.STATUS_PENDING,
|
|
RemoteSensingRun.STATUS_RUNNING,
|
|
}
|
|
response_payload = {
|
|
"status": "processing" if processing else "not_found",
|
|
"source": "processing" if processing else "database",
|
|
"location": SoilLocationResponseSerializer(location).data,
|
|
"block_code": "",
|
|
"chunk_size_sqm": getattr(run, "chunk_size_sqm", None),
|
|
"temporal_extent": {
|
|
"start_date": temporal_start.isoformat(),
|
|
"end_date": temporal_end.isoformat(),
|
|
},
|
|
"summary": _empty_remote_sensing_summary(),
|
|
"cells": [],
|
|
"run": RemoteSensingRunSerializer(run).data if run else None,
|
|
"subdivision_result": None,
|
|
}
|
|
return Response(
|
|
{"code": 200, "msg": "success", "data": response_payload},
|
|
status=status.HTTP_200_OK,
|
|
)
|
|
|
|
paginated_observations = _paginate_observations(
|
|
observations,
|
|
page=payload["page"],
|
|
page_size=payload["page_size"],
|
|
)
|
|
paginated_assignments = []
|
|
pagination = {"cells": paginated_observations["pagination"]}
|
|
if subdivision_result is not None:
|
|
paginated = _paginate_assignments(
|
|
subdivision_result,
|
|
page=payload["page"],
|
|
page_size=payload["page_size"],
|
|
)
|
|
paginated_assignments = paginated["items"]
|
|
pagination["assignments"] = paginated["pagination"]
|
|
|
|
cells_data = RemoteSensingCellObservationSerializer(paginated_observations["items"], many=True).data
|
|
subdivision_data = None
|
|
if subdivision_result is not None:
|
|
subdivision_data = RemoteSensingSubdivisionResultSerializer(
|
|
subdivision_result,
|
|
context={"paginated_assignments": paginated_assignments},
|
|
).data
|
|
|
|
response_payload = {
|
|
"status": "success",
|
|
"source": "database",
|
|
"location": SoilLocationResponseSerializer(location).data,
|
|
"block_code": "",
|
|
"chunk_size_sqm": observations.first().cell.chunk_size_sqm,
|
|
"temporal_extent": {
|
|
"start_date": temporal_start.isoformat(),
|
|
"end_date": temporal_end.isoformat(),
|
|
},
|
|
"summary": _build_remote_sensing_summary(observations),
|
|
"cells": cells_data,
|
|
"run": RemoteSensingRunSerializer(run).data if run else None,
|
|
"subdivision_result": subdivision_data,
|
|
}
|
|
if pagination is not None:
|
|
response_payload["pagination"] = pagination
|
|
return Response(
|
|
{"code": 200, "msg": "success", "data": response_payload},
|
|
status=status.HTTP_200_OK,
|
|
)
|
|
|
|
|
|
class RemoteSensingRunStatusView(APIView):
|
|
@extend_schema(
|
|
tags=["Location Data"],
|
|
summary="وضعیت run تحلیل سنجشازدور",
|
|
description="وضعیت async pipeline را با task_id از نوع UUID برمیگرداند. این task_id همان شناسه تسک Celery ذخیرهشده در metadata.run است.",
|
|
parameters=[
|
|
OpenApiParameter(
|
|
name="run_id",
|
|
type={"type": "string", "format": "uuid"},
|
|
location=OpenApiParameter.PATH,
|
|
required=True,
|
|
description="شناسه UUID تسک async (task_id).",
|
|
),
|
|
],
|
|
responses={
|
|
200: build_response(
|
|
RemoteSensingRunStatusEnvelopeSerializer,
|
|
"وضعیت run بازگردانده شد و بعد از اتمام، نتیجه نهایی نیز از همین route برگردانده میشود.",
|
|
),
|
|
404: build_response(
|
|
SoilErrorResponseSerializer,
|
|
"run موردنظر پیدا نشد.",
|
|
),
|
|
},
|
|
)
|
|
def get(self, request, run_id):
|
|
page = _safe_positive_int(request.query_params.get("page"), default=1)
|
|
page_size = min(_safe_positive_int(request.query_params.get("page_size"), default=100), MAX_REMOTE_SENSING_PAGE_SIZE)
|
|
run = RemoteSensingRun.objects.filter(metadata__task_id=str(run_id)).select_related("soil_location").first()
|
|
if run is None:
|
|
return Response(
|
|
{"code": 404, "msg": "run با این task_id پیدا نشد.", "data": None},
|
|
status=status.HTTP_404_NOT_FOUND,
|
|
)
|
|
|
|
response_payload = _build_remote_sensing_run_status_payload(run, page=page, page_size=page_size)
|
|
return Response(
|
|
{"code": 200, "msg": "success", "data": response_payload},
|
|
status=status.HTTP_200_OK,
|
|
)
|
|
|
|
|
|
def _build_remote_sensing_run_status_payload(run: RemoteSensingRun, *, page: int, page_size: int) -> dict:
|
|
run_data = RemoteSensingRunSerializer(run).data
|
|
task_id = (run.metadata or {}).get("task_id")
|
|
if run.status in {RemoteSensingRun.STATUS_PENDING, RemoteSensingRun.STATUS_RUNNING}:
|
|
return {
|
|
"status": run_data["status_label"],
|
|
"source": "database",
|
|
"run": run_data,
|
|
"task_id": task_id,
|
|
}
|
|
|
|
location = _get_location_by_lat_lon(run.soil_location.latitude, run.soil_location.longitude, prefetch=True)
|
|
observations = _get_remote_sensing_observations(
|
|
location=run.soil_location,
|
|
block_code=run.block_code,
|
|
start_date=run.temporal_start,
|
|
end_date=run.temporal_end,
|
|
)
|
|
subdivision_result = getattr(run, "subdivision_result", None)
|
|
|
|
response_payload = {
|
|
"status": run_data["status_label"],
|
|
"source": "database",
|
|
"run": run_data,
|
|
"task_id": task_id,
|
|
"location": SoilLocationResponseSerializer(location).data,
|
|
"block_code": run.block_code,
|
|
"chunk_size_sqm": run.chunk_size_sqm,
|
|
"temporal_extent": {
|
|
"start_date": run.temporal_start.isoformat() if run.temporal_start else None,
|
|
"end_date": run.temporal_end.isoformat() if run.temporal_end else None,
|
|
},
|
|
"summary": _empty_remote_sensing_summary(),
|
|
"cells": [],
|
|
"subdivision_result": None,
|
|
}
|
|
|
|
if not observations.exists():
|
|
return response_payload
|
|
|
|
paginated_observations = _paginate_observations(
|
|
observations,
|
|
page=page,
|
|
page_size=page_size,
|
|
)
|
|
paginated_assignments = []
|
|
pagination = {"cells": paginated_observations["pagination"]}
|
|
if subdivision_result is not None:
|
|
paginated = _paginate_assignments(
|
|
subdivision_result,
|
|
page=page,
|
|
page_size=page_size,
|
|
)
|
|
paginated_assignments = paginated["items"]
|
|
pagination["assignments"] = paginated["pagination"]
|
|
|
|
response_payload["summary"] = _build_remote_sensing_summary(observations)
|
|
response_payload["cells"] = RemoteSensingCellObservationSerializer(
|
|
paginated_observations["items"],
|
|
many=True,
|
|
).data
|
|
response_payload["pagination"] = pagination
|
|
|
|
if subdivision_result is not None:
|
|
response_payload["subdivision_result"] = RemoteSensingSubdivisionResultSerializer(
|
|
subdivision_result,
|
|
context={"paginated_assignments": paginated_assignments},
|
|
).data
|
|
|
|
return response_payload
|
|
|
|
|
|
def _get_location_by_lat_lon(lat, lon, *, prefetch: bool = False):
|
|
lat_rounded = round(lat, 6)
|
|
lon_rounded = round(lon, 6)
|
|
queryset = SoilLocation.objects.filter(latitude=lat_rounded, longitude=lon_rounded)
|
|
if prefetch:
|
|
queryset = queryset.prefetch_related("block_subdivisions")
|
|
return queryset.first()
|
|
|
|
|
|
def _sync_defined_blocks(location: SoilLocation, blocks: list[dict]) -> None:
|
|
if not blocks:
|
|
return
|
|
|
|
with transaction.atomic():
|
|
normalized_blocks = []
|
|
for index, block in enumerate(blocks):
|
|
normalized_blocks.append(
|
|
{
|
|
"block_code": str(block.get("block_code") or f"block-{index + 1}").strip(),
|
|
"boundary": block.get("boundary") or {},
|
|
"order": int(block.get("order") or index + 1),
|
|
}
|
|
)
|
|
|
|
normalized_codes = {block["block_code"] for block in normalized_blocks}
|
|
existing_subdivisions = {
|
|
subdivision.block_code: subdivision
|
|
for subdivision in location.block_subdivisions.all()
|
|
}
|
|
|
|
for block_code, subdivision in existing_subdivisions.items():
|
|
if block_code not in normalized_codes:
|
|
_clear_block_analysis_state(location, block_code, subdivision=subdivision)
|
|
subdivision.delete()
|
|
|
|
for block in normalized_blocks:
|
|
block_code = block["block_code"]
|
|
boundary = block["boundary"]
|
|
subdivision = existing_subdivisions.get(block_code)
|
|
definition_metadata = {
|
|
"definition_source": "farmer_input",
|
|
"order": block["order"],
|
|
}
|
|
if subdivision is None:
|
|
BlockSubdivision.objects.create(
|
|
soil_location=location,
|
|
block_code=block_code,
|
|
source_boundary=boundary,
|
|
chunk_size_sqm=900,
|
|
grid_points=[],
|
|
centroid_points=[],
|
|
grid_point_count=0,
|
|
centroid_count=0,
|
|
status="defined",
|
|
metadata=definition_metadata,
|
|
)
|
|
continue
|
|
|
|
boundary_changed = subdivision.source_boundary != boundary
|
|
metadata = dict(subdivision.metadata or {})
|
|
metadata.update(definition_metadata)
|
|
subdivision.source_boundary = boundary
|
|
subdivision.chunk_size_sqm = 900
|
|
subdivision.metadata = metadata
|
|
if boundary_changed:
|
|
_clear_block_analysis_state(location, block_code, subdivision=subdivision)
|
|
subdivision.grid_points = []
|
|
subdivision.centroid_points = []
|
|
subdivision.grid_point_count = 0
|
|
subdivision.centroid_count = 0
|
|
subdivision.status = "defined"
|
|
subdivision.metadata = definition_metadata
|
|
subdivision.elbow_plot = None
|
|
subdivision.save(
|
|
update_fields=[
|
|
"source_boundary",
|
|
"chunk_size_sqm",
|
|
"grid_points",
|
|
"centroid_points",
|
|
"grid_point_count",
|
|
"centroid_count",
|
|
"status",
|
|
"metadata",
|
|
"elbow_plot",
|
|
"updated_at",
|
|
]
|
|
)
|
|
continue
|
|
|
|
subdivision.save(
|
|
update_fields=[
|
|
"source_boundary",
|
|
"chunk_size_sqm",
|
|
"metadata",
|
|
"updated_at",
|
|
]
|
|
)
|
|
|
|
|
|
def _clear_block_analysis_state(
|
|
location: SoilLocation,
|
|
block_code: str,
|
|
*,
|
|
subdivision: BlockSubdivision | None = None,
|
|
) -> None:
|
|
AnalysisGridCell.objects.filter(
|
|
soil_location=location,
|
|
block_code=block_code or "",
|
|
).delete()
|
|
RemoteSensingSubdivisionResult.objects.filter(
|
|
soil_location=location,
|
|
block_code=block_code or "",
|
|
).delete()
|
|
RemoteSensingRun.objects.filter(
|
|
soil_location=location,
|
|
block_code=block_code or "",
|
|
).delete()
|
|
if subdivision is None:
|
|
return
|
|
subdivision.grid_points = []
|
|
subdivision.centroid_points = []
|
|
subdivision.grid_point_count = 0
|
|
subdivision.centroid_count = 0
|
|
subdivision.status = "defined"
|
|
subdivision.elbow_plot = None
|
|
|
|
|
|
def _resolve_chunk_size_for_location(location: SoilLocation, block_code: str) -> int:
|
|
if block_code:
|
|
subdivision = location.block_subdivisions.filter(block_code=block_code).first()
|
|
if subdivision is not None:
|
|
return int(subdivision.chunk_size_sqm or 900)
|
|
block_layout = location.block_layout or {}
|
|
if not block_code:
|
|
return int(block_layout.get("analysis_grid_summary", {}).get("chunk_size_sqm") or 900)
|
|
for block in block_layout.get("blocks", []):
|
|
if block.get("block_code") == block_code:
|
|
return int(block.get("analysis_grid_summary", {}).get("chunk_size_sqm") or 900)
|
|
return 900
|
|
|
|
|
|
def _get_remote_sensing_observations(*, location, block_code: str, start_date, end_date):
|
|
queryset = (
|
|
AnalysisGridObservation.objects.select_related("cell", "run")
|
|
.filter(
|
|
cell__soil_location=location,
|
|
temporal_start=start_date,
|
|
temporal_end=end_date,
|
|
)
|
|
.order_by("cell__cell_code")
|
|
)
|
|
return queryset.filter(cell__block_code=block_code or "")
|
|
|
|
|
|
def _get_latest_remote_sensing_run(*, location, block_code: str, start_date, end_date):
|
|
return (
|
|
RemoteSensingRun.objects.filter(
|
|
soil_location=location,
|
|
block_code=block_code or "",
|
|
temporal_start=start_date,
|
|
temporal_end=end_date,
|
|
)
|
|
.order_by("-created_at", "-id")
|
|
.first()
|
|
)
|
|
|
|
|
|
def _get_remote_sensing_subdivision_result(*, location, block_code: str, start_date, end_date):
|
|
return (
|
|
RemoteSensingSubdivisionResult.objects.filter(
|
|
soil_location=location,
|
|
block_code=block_code or "",
|
|
temporal_start=start_date,
|
|
temporal_end=end_date,
|
|
)
|
|
.select_related("run")
|
|
.prefetch_related("assignments__cell")
|
|
.order_by("-created_at", "-id")
|
|
.first()
|
|
)
|
|
|
|
|
|
def _build_remote_sensing_summary(observations):
|
|
aggregates = observations.aggregate(
|
|
cell_count=Avg("cell_id"),
|
|
ndvi_mean=Avg("ndvi"),
|
|
ndwi_mean=Avg("ndwi"),
|
|
lst_c_mean=Avg("lst_c"),
|
|
soil_vv_db_mean=Avg("soil_vv_db"),
|
|
dem_m_mean=Avg("dem_m"),
|
|
slope_deg_mean=Avg("slope_deg"),
|
|
)
|
|
summary = {
|
|
"cell_count": observations.count(),
|
|
"ndvi_mean": _round_or_none(aggregates.get("ndvi_mean")),
|
|
"ndwi_mean": _round_or_none(aggregates.get("ndwi_mean")),
|
|
"lst_c_mean": _round_or_none(aggregates.get("lst_c_mean")),
|
|
"soil_vv_db_mean": _round_or_none(aggregates.get("soil_vv_db_mean")),
|
|
"dem_m_mean": _round_or_none(aggregates.get("dem_m_mean")),
|
|
"slope_deg_mean": _round_or_none(aggregates.get("slope_deg_mean")),
|
|
}
|
|
return summary
|
|
|
|
|
|
def _empty_remote_sensing_summary():
|
|
return {
|
|
"cell_count": 0,
|
|
"ndvi_mean": None,
|
|
"ndwi_mean": None,
|
|
"lst_c_mean": None,
|
|
"soil_vv_db_mean": None,
|
|
"dem_m_mean": None,
|
|
"slope_deg_mean": None,
|
|
}
|
|
|
|
|
|
def _round_or_none(value):
|
|
if value is None:
|
|
return None
|
|
return round(float(value), 6)
|
|
|
|
|
|
def _paginate_assignments(result: RemoteSensingSubdivisionResult, *, page: int, page_size: int) -> dict:
|
|
page_size = min(max(page_size, 1), MAX_REMOTE_SENSING_PAGE_SIZE)
|
|
assignments = result.assignments.select_related("cell").order_by("cluster_label", "cell__cell_code")
|
|
paginator = Paginator(assignments, page_size)
|
|
if paginator.count == 0:
|
|
return {
|
|
"items": [],
|
|
"pagination": {
|
|
"page": 1,
|
|
"page_size": page_size,
|
|
"total_items": 0,
|
|
"total_pages": 0,
|
|
"has_next": False,
|
|
"has_previous": False,
|
|
},
|
|
}
|
|
try:
|
|
page_obj = paginator.page(page)
|
|
except EmptyPage:
|
|
page_obj = paginator.page(paginator.num_pages)
|
|
return {
|
|
"items": list(page_obj.object_list),
|
|
"pagination": {
|
|
"page": page_obj.number,
|
|
"page_size": page_size,
|
|
"total_items": paginator.count,
|
|
"total_pages": paginator.num_pages,
|
|
"has_next": page_obj.has_next(),
|
|
"has_previous": page_obj.has_previous(),
|
|
},
|
|
}
|
|
|
|
|
|
def _safe_positive_int(value, *, default: int) -> int:
|
|
try:
|
|
parsed = int(value)
|
|
except (TypeError, ValueError):
|
|
return default
|
|
return parsed if parsed > 0 else default
|
|
|
|
|
|
|
|
def _paginate_observations(observations, *, page: int, page_size: int) -> dict:
|
|
page_size = min(max(page_size, 1), MAX_REMOTE_SENSING_PAGE_SIZE)
|
|
paginator = Paginator(observations, page_size)
|
|
if paginator.count == 0:
|
|
return {
|
|
"items": [],
|
|
"pagination": {
|
|
"page": 1,
|
|
"page_size": page_size,
|
|
"total_items": 0,
|
|
"total_pages": 0,
|
|
"has_next": False,
|
|
"has_previous": False,
|
|
},
|
|
}
|
|
try:
|
|
page_obj = paginator.page(page)
|
|
except EmptyPage:
|
|
page_obj = paginator.page(paginator.num_pages)
|
|
return {
|
|
"items": list(page_obj.object_list),
|
|
"pagination": {
|
|
"page": page_obj.number,
|
|
"page_size": page_size,
|
|
"total_items": paginator.count,
|
|
"total_pages": paginator.num_pages,
|
|
"has_next": page_obj.has_next(),
|
|
"has_previous": page_obj.has_previous(),
|
|
},
|
|
}
|