UPDATE
This commit is contained in:
@@ -0,0 +1,378 @@
|
||||
# جریان واقعی `location_data`
|
||||
|
||||
این توضیح دقیقاً بر اساس منطق جدید نوشته شده:
|
||||
|
||||
- اول مختصات گوشههای کل زمین گرفته میشود
|
||||
- بعد مختصات بلوکهایی که کشاورز خودش تعریف کرده گرفته میشود
|
||||
- هر بلوک جداگانه به grid های `30×30` تبدیل میشود
|
||||
- برای هر grid دادهی یک بازه زمانی از openEO گرفته میشود
|
||||
- میانگین همان بازه، وضعیت نهایی همان grid حساب میشود
|
||||
- بعد برای همان grid ها `KMeans` اجرا میشود
|
||||
- برای هر `K` مقدار `SSE / Inertia` ذخیره میشود
|
||||
- نمودار `K - SSE` رسم میشود
|
||||
- نقطهای که افت شیب ناگهانی دارد به عنوان تعداد مناسب زیربلوکها انتخاب میشود
|
||||
- در نهایت هر بلوک کشاورز به چند زیربلوک دادهمحور تقسیم میشود
|
||||
|
||||
---
|
||||
|
||||
## 1) ورودی مرحله اول
|
||||
|
||||
در مرحله اول این دادهها ثبت میشوند:
|
||||
|
||||
- مختصات گوشههای کل زمین
|
||||
- مختصات بلوکهایی که کشاورز تعریف کرده
|
||||
- کد هر بلوک
|
||||
|
||||
فایل اصلی:
|
||||
|
||||
- `location_data/views.py`
|
||||
- `location_data/serializers.py`
|
||||
- `location_data/models.py`
|
||||
|
||||
خروجی این مرحله:
|
||||
|
||||
- یک `SoilLocation` برای زمین
|
||||
- یک `block_layout` که داخلش boundary هر بلوک هست
|
||||
- یک `BlockSubdivision` برای هر بلوک، فقط به عنوان تعریف مرز بلوک کشاورز
|
||||
|
||||
نکته مهم:
|
||||
|
||||
- در این مرحله هیچ subdivision سنکرونی اجرا نمیشود
|
||||
- هیچ داده خاکی از adapter قدیمی گرفته نمیشود
|
||||
|
||||
---
|
||||
|
||||
## 2) هر بلوک کشاورز جداگانه grid میشود
|
||||
|
||||
فایل اصلی:
|
||||
|
||||
- `location_data/grid_analysis.py`
|
||||
|
||||
اینجا چه اتفاقی میافتد:
|
||||
|
||||
- boundary هر بلوک خوانده میشود
|
||||
- آن بلوک به cell های `30×30` متر تبدیل میشود
|
||||
- برای هر cell یک رکورد ساخته میشود
|
||||
|
||||
مدل ذخیره:
|
||||
|
||||
- `AnalysisGridCell`
|
||||
|
||||
هر `AnalysisGridCell` این چیزها را نگه میدارد:
|
||||
|
||||
- `cell_code`
|
||||
- `block_code`
|
||||
- `geometry`
|
||||
- `centroid_lat`
|
||||
- `centroid_lon`
|
||||
- `chunk_size_sqm`
|
||||
|
||||
یعنی از اینجا به بعد، کوچکترین واحد تحلیل ما دیگر خود بلوک نیست؛
|
||||
بلکه grid های `30×30` داخل هر بلوک هستند.
|
||||
|
||||
---
|
||||
|
||||
## 3) داده ماهوارهای هر grid از openEO گرفته میشود
|
||||
|
||||
فایل اصلی:
|
||||
|
||||
- `location_data/openeo_service.py`
|
||||
|
||||
منطق این بخش شبیه همان چیزی است که گفتی:
|
||||
|
||||
- برای هر بازه زمانی، cube هر سنجنده load میشود
|
||||
- روی زمان `mean_time()` زده میشود
|
||||
- بعد برای geometry هر grid از `aggregate_spatial(..., reducer=\"mean\")` استفاده میشود
|
||||
|
||||
یعنی:
|
||||
|
||||
- داده خام چند روز یا یک ماهه میآید
|
||||
- میانگین همان بازه زمانی برای هر grid محاسبه میشود
|
||||
- همان مقدار میانگین، وضعیت نهایی آن grid در آن بازه است
|
||||
|
||||
metric هایی که الان گرفته میشوند:
|
||||
|
||||
- `ndvi`
|
||||
- `ndwi`
|
||||
- `lst_c`
|
||||
- `soil_vv`
|
||||
- `soil_vv_db`
|
||||
- `dem_m`
|
||||
- `slope_deg`
|
||||
|
||||
نکته مهم:
|
||||
|
||||
- این دادهها برای **تمام grid های یک بلوک** گرفته میشوند
|
||||
- نه فقط برای مرکز مزرعه
|
||||
- نه فقط برای geometry خام
|
||||
|
||||
---
|
||||
|
||||
## 4) داده هر grid داخل جدول ذخیره میشود
|
||||
|
||||
فایل اصلی:
|
||||
|
||||
- `location_data/tasks.py`
|
||||
|
||||
مدل ذخیره:
|
||||
|
||||
- `AnalysisGridObservation`
|
||||
|
||||
برای هر grid و هر بازه زمانی، این دادهها ذخیره میشوند:
|
||||
|
||||
- `ndvi`
|
||||
- `ndwi`
|
||||
- `lst_c`
|
||||
- `soil_vv`
|
||||
- `soil_vv_db`
|
||||
- `dem_m`
|
||||
- `slope_deg`
|
||||
|
||||
پس هر grid یک بردار ویژگی واقعی دارد.
|
||||
|
||||
یعنی به زبان ساده:
|
||||
|
||||
- هر خانه 30×30 فقط یک polygon نیست
|
||||
- یک وضعیت دادهای واقعی هم دارد
|
||||
|
||||
---
|
||||
|
||||
## 5) اینجا یادگیری بدون نظارت استفاده میشود
|
||||
|
||||
فایل اصلی:
|
||||
|
||||
- `location_data/data_driven_subdivision.py`
|
||||
|
||||
اینجا از:
|
||||
|
||||
- `KMeans`
|
||||
|
||||
استفاده میشود.
|
||||
|
||||
این بخش unsupervised است چون:
|
||||
|
||||
- هیچ label آمادهای نداریم
|
||||
- فقط میخواهیم grid هایی که از نظر رفتار ماهوارهای شبیه هم هستند در یک گروه قرار بگیرند
|
||||
|
||||
---
|
||||
|
||||
## 6) feature matrix دقیقاً از چه چیزی ساخته میشود؟
|
||||
|
||||
هر سطر:
|
||||
|
||||
- یک `AnalysisGridCell`
|
||||
|
||||
هر ستون:
|
||||
|
||||
- یکی از feature های ماهوارهای
|
||||
|
||||
feature های پیشفرض:
|
||||
|
||||
- `ndvi`
|
||||
- `ndwi`
|
||||
- `lst_c`
|
||||
- `soil_vv_db`
|
||||
- `dem_m`
|
||||
- `slope_deg`
|
||||
|
||||
یعنی ورودی `KMeans` از observation های واقعی میآید، نه از مختصات هندسی.
|
||||
|
||||
---
|
||||
|
||||
## 7) داده ناقص چطور مدیریت میشود؟
|
||||
|
||||
قبل از اجرای KMeans:
|
||||
|
||||
- اگر یک grid برای همه feature ها خالی باشد، حذف میشود
|
||||
- اگر فقط بعضی feature ها خالی باشند، مقداردهی میشود
|
||||
|
||||
روش فعلی:
|
||||
|
||||
- `median imputation`
|
||||
|
||||
بعد از آن:
|
||||
|
||||
- دادهها استاندارد میشوند
|
||||
|
||||
روش فعلی:
|
||||
|
||||
- `StandardScaler`
|
||||
|
||||
این کار لازم است چون:
|
||||
|
||||
- مقیاس `ndvi` با `dem_m` فرق دارد
|
||||
- مقیاس `dem_m` با `lst_c` فرق دارد
|
||||
|
||||
---
|
||||
|
||||
## 8) برای هر K مقدار SSE ذخیره میشود
|
||||
|
||||
فایل اصلی:
|
||||
|
||||
- `location_data/data_driven_subdivision.py`
|
||||
- `location_data/block_subdivision.py`
|
||||
|
||||
در زمان انتخاب تعداد خوشه:
|
||||
|
||||
- برای `K = 1, 2, 3, ...`
|
||||
- مدل اجرا میشود
|
||||
- مقدار `SSE / Inertia` ذخیره میشود
|
||||
|
||||
این داده داخل metadata نتیجه clustering ذخیره میشود.
|
||||
|
||||
پس ما برای هر بلوک این را داریم:
|
||||
|
||||
- لیست `K`
|
||||
- مقدار `SSE` هر `K`
|
||||
|
||||
---
|
||||
|
||||
## 9) نمودار `K - SSE` رسم میشود
|
||||
|
||||
منطق رسم نمودار در سیستم وجود دارد و از همان منطق elbow استفاده میشود.
|
||||
|
||||
هدف نمودار:
|
||||
|
||||
- ببینیم از چه جایی به بعد کم شدن SSE دیگر خیلی شدید نیست
|
||||
- یعنی شیب نمودار ناگهان کمتر میشود
|
||||
|
||||
همان نقطه:
|
||||
|
||||
- تعداد مناسب زیربلوکهای آن بلوک است
|
||||
|
||||
به زبان ساده:
|
||||
|
||||
- اگر شیب تا `K=3` خیلی زیاد کم شود
|
||||
- ولی بعد از آن خیلی آرام شود
|
||||
- `K=3` انتخاب مناسب است
|
||||
|
||||
---
|
||||
|
||||
## 10) هر بلوک کشاورز جداگانه خوشهبندی میشود
|
||||
|
||||
این خیلی مهم است:
|
||||
|
||||
- کل مزرعه یکجا خوشهبندی نمیشود
|
||||
- هر بلوکی که کشاورز تعریف کرده جداگانه پردازش میشود
|
||||
|
||||
پس برای هر بلوک:
|
||||
|
||||
1. grid های 30×30 ساخته میشوند
|
||||
2. داده ماهوارهای همان grid ها گرفته میشود
|
||||
3. observation ذخیره میشود
|
||||
4. `KMeans` فقط روی grid های همان بلوک اجرا میشود
|
||||
5. تعداد زیربلوکهای مناسب همان بلوک تعیین میشود
|
||||
|
||||
---
|
||||
|
||||
## 11) نتیجه subdivision جدید کجا ذخیره میشود؟
|
||||
|
||||
مدل اصلی نتیجه:
|
||||
|
||||
- `RemoteSensingSubdivisionResult`
|
||||
|
||||
این مدل چیزهای اصلی را نگه میدارد:
|
||||
|
||||
- `block_code`
|
||||
- `cluster_count`
|
||||
- `selected_features`
|
||||
- `skipped_cell_codes`
|
||||
- `kmeans_params`
|
||||
- `inertia_curve`
|
||||
- `cluster_summaries`
|
||||
|
||||
و برای هر grid هم assignment جدا ذخیره میشود در:
|
||||
|
||||
- `RemoteSensingClusterAssignment`
|
||||
|
||||
یعنی برای هر grid مشخص است:
|
||||
|
||||
- در کدام cluster قرار گرفته
|
||||
- raw feature هایش چه بوده
|
||||
- scaled feature هایش چه بوده
|
||||
|
||||
---
|
||||
|
||||
## 12) `BlockSubdivision` الان چه نقشی دارد؟
|
||||
|
||||
الان `BlockSubdivision` دیگر مدل اصلی خوشهبندی نیست.
|
||||
|
||||
نقشش این است که:
|
||||
|
||||
- boundary بلوک کشاورز را نگه دارد
|
||||
- metadata بلوک را نگه دارد
|
||||
- به grid سازی و pipeline کمک کند
|
||||
|
||||
اما نتیجه اصلی data-driven subdivision در این دو مدل ذخیره میشود:
|
||||
|
||||
- `RemoteSensingSubdivisionResult`
|
||||
- `RemoteSensingClusterAssignment`
|
||||
|
||||
---
|
||||
|
||||
## 13) اجرای async کجا انجام میشود؟
|
||||
|
||||
فایل اصلی:
|
||||
|
||||
- `location_data/tasks.py`
|
||||
|
||||
این pipeline داخل Celery اجرا میشود.
|
||||
|
||||
مراحل run:
|
||||
|
||||
1. run ساخته میشود
|
||||
2. grid های بلوک ساخته میشوند
|
||||
3. داده openEO گرفته میشود
|
||||
4. observation ها ذخیره میشوند
|
||||
5. feature matrix ساخته میشود
|
||||
6. `KMeans` اجرا میشود
|
||||
7. نتیجه نهایی ذخیره میشود
|
||||
|
||||
مدل status:
|
||||
|
||||
- `RemoteSensingRun`
|
||||
|
||||
وضعیتهایی که track میشوند:
|
||||
|
||||
- `pending`
|
||||
- `running`
|
||||
- `failed`
|
||||
- `completed`
|
||||
|
||||
---
|
||||
|
||||
## 14) چیزی که حذف شده
|
||||
|
||||
این بخشها دیگر منبع اصلی داده نیستند و باید حذفشده در نظر گرفته شوند:
|
||||
|
||||
- منطق قدیمی دریافت soil depth
|
||||
- adapter های خاک
|
||||
- وابستگی اصلی به `SoilDepthData`
|
||||
|
||||
منبع اصلی داده از این به بعد:
|
||||
|
||||
- داده ماهوارهای هر grid
|
||||
|
||||
یعنی:
|
||||
|
||||
- به جای جدول depth-based
|
||||
- جدول observation های ماهوارهای grid-based مرجع اصلی است
|
||||
|
||||
---
|
||||
|
||||
## 15) خلاصه خیلی کوتاه
|
||||
|
||||
جریان نهایی این است:
|
||||
|
||||
1. گوشههای زمین و بلوکهای کشاورز ثبت میشوند
|
||||
2. هر بلوک به grid های `30×30` تبدیل میشود
|
||||
3. برای هر grid دادهی ماهوارهای یک بازه زمانی از openEO گرفته میشود
|
||||
4. میانگین آن بازه، وضعیت همان grid میشود
|
||||
5. همه grid ها در جدول observation ذخیره میشوند
|
||||
6. برای هر بلوک، روی feature های grid ها `KMeans` اجرا میشود
|
||||
7. برای هر `K` مقدار `SSE` ذخیره میشود
|
||||
8. نمودار `K - SSE` ساخته میشود
|
||||
9. elbow point تعداد مناسب زیربلوکها را مشخص میکند
|
||||
10. هر بلوک کشاورز به چند زیربلوک دادهمحور تقسیم میشود
|
||||
|
||||
این دقیقاً همان منطق اصلی جدید سیستم است.
|
||||
@@ -0,0 +1,136 @@
|
||||
from django.contrib import admin
|
||||
from .models import (
|
||||
AnalysisGridCell,
|
||||
AnalysisGridObservation,
|
||||
BlockSubdivision,
|
||||
RemoteSensingClusterAssignment,
|
||||
RemoteSensingRun,
|
||||
RemoteSensingSubdivisionResult,
|
||||
SoilLocation,
|
||||
)
|
||||
|
||||
|
||||
class BlockSubdivisionInline(admin.TabularInline):
|
||||
model = BlockSubdivision
|
||||
extra = 0
|
||||
readonly_fields = (
|
||||
"block_code",
|
||||
"chunk_size_sqm",
|
||||
"grid_point_count",
|
||||
"centroid_count",
|
||||
"status",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
)
|
||||
fields = readonly_fields
|
||||
show_change_link = True
|
||||
|
||||
|
||||
@admin.register(SoilLocation)
|
||||
class SoilLocationAdmin(admin.ModelAdmin):
|
||||
list_display = ("id", "latitude", "longitude", "is_complete", "created_at")
|
||||
list_filter = ("created_at",)
|
||||
search_fields = ("latitude", "longitude")
|
||||
readonly_fields = ("created_at", "updated_at")
|
||||
inlines = [BlockSubdivisionInline]
|
||||
|
||||
|
||||
@admin.register(BlockSubdivision)
|
||||
class BlockSubdivisionAdmin(admin.ModelAdmin):
|
||||
list_display = (
|
||||
"id",
|
||||
"soil_location",
|
||||
"block_code",
|
||||
"chunk_size_sqm",
|
||||
"grid_point_count",
|
||||
"centroid_count",
|
||||
"status",
|
||||
"updated_at",
|
||||
)
|
||||
list_filter = ("status", "chunk_size_sqm", "created_at")
|
||||
search_fields = ("block_code", "soil_location__latitude", "soil_location__longitude")
|
||||
readonly_fields = ("created_at", "updated_at")
|
||||
|
||||
|
||||
@admin.register(RemoteSensingRun)
|
||||
class RemoteSensingRunAdmin(admin.ModelAdmin):
|
||||
list_display = (
|
||||
"id",
|
||||
"soil_location",
|
||||
"block_code",
|
||||
"provider",
|
||||
"chunk_size_sqm",
|
||||
"status",
|
||||
"temporal_start",
|
||||
"temporal_end",
|
||||
"created_at",
|
||||
)
|
||||
list_filter = ("provider", "status", "chunk_size_sqm", "created_at")
|
||||
search_fields = ("block_code", "soil_location__latitude", "soil_location__longitude")
|
||||
readonly_fields = ("created_at", "updated_at")
|
||||
|
||||
|
||||
@admin.register(AnalysisGridCell)
|
||||
class AnalysisGridCellAdmin(admin.ModelAdmin):
|
||||
list_display = (
|
||||
"id",
|
||||
"cell_code",
|
||||
"soil_location",
|
||||
"block_code",
|
||||
"chunk_size_sqm",
|
||||
"centroid_lat",
|
||||
"centroid_lon",
|
||||
"created_at",
|
||||
)
|
||||
list_filter = ("chunk_size_sqm", "created_at")
|
||||
search_fields = ("cell_code", "block_code", "soil_location__latitude", "soil_location__longitude")
|
||||
readonly_fields = ("created_at", "updated_at")
|
||||
|
||||
|
||||
@admin.register(AnalysisGridObservation)
|
||||
class AnalysisGridObservationAdmin(admin.ModelAdmin):
|
||||
list_display = (
|
||||
"id",
|
||||
"cell",
|
||||
"temporal_start",
|
||||
"temporal_end",
|
||||
"ndvi",
|
||||
"ndwi",
|
||||
"lst_c",
|
||||
"created_at",
|
||||
)
|
||||
list_filter = ("temporal_start", "temporal_end", "created_at")
|
||||
search_fields = ("cell__cell_code", "cell__block_code")
|
||||
readonly_fields = ("created_at", "updated_at")
|
||||
|
||||
|
||||
|
||||
@admin.register(RemoteSensingSubdivisionResult)
|
||||
class RemoteSensingSubdivisionResultAdmin(admin.ModelAdmin):
|
||||
list_display = (
|
||||
"id",
|
||||
"soil_location",
|
||||
"block_code",
|
||||
"cluster_count",
|
||||
"chunk_size_sqm",
|
||||
"temporal_start",
|
||||
"temporal_end",
|
||||
"created_at",
|
||||
)
|
||||
list_filter = ("chunk_size_sqm", "cluster_count", "created_at")
|
||||
search_fields = ("block_code", "soil_location__latitude", "soil_location__longitude")
|
||||
readonly_fields = ("created_at", "updated_at")
|
||||
|
||||
|
||||
@admin.register(RemoteSensingClusterAssignment)
|
||||
class RemoteSensingClusterAssignmentAdmin(admin.ModelAdmin):
|
||||
list_display = (
|
||||
"id",
|
||||
"result",
|
||||
"cell",
|
||||
"cluster_label",
|
||||
"created_at",
|
||||
)
|
||||
list_filter = ("cluster_label", "created_at")
|
||||
search_fields = ("cell__cell_code", "result__block_code")
|
||||
readonly_fields = ("created_at", "updated_at")
|
||||
@@ -0,0 +1,18 @@
|
||||
from functools import cached_property
|
||||
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class SoilDataConfig(AppConfig):
|
||||
default_auto_field = "django.db.models.BigAutoField"
|
||||
name = "location_data"
|
||||
verbose_name = "Location Data (Remote Sensing)"
|
||||
|
||||
@cached_property
|
||||
def ndvi_health_service(self):
|
||||
from .ndvi import NdviHealthService
|
||||
|
||||
return NdviHealthService()
|
||||
|
||||
def get_ndvi_health_service(self):
|
||||
return self.ndvi_health_service
|
||||
@@ -0,0 +1,401 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from decimal import Decimal, ROUND_HALF_UP
|
||||
from io import BytesIO
|
||||
import math
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.files.base import ContentFile
|
||||
|
||||
|
||||
EARTH_RADIUS_M = 6371008.8
|
||||
COORD_PRECISION = Decimal("0.000001")
|
||||
MAX_K = 10
|
||||
RANDOM_STATE = 42
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class GeoPoint:
|
||||
lat: float
|
||||
lon: float
|
||||
|
||||
|
||||
def create_or_get_block_subdivision(
|
||||
location,
|
||||
block_code: str,
|
||||
boundary: dict | list,
|
||||
*,
|
||||
chunk_size_sqm: int | None = None,
|
||||
):
|
||||
"""
|
||||
اگر subdivision این بلوک قبلاً ساخته شده باشد همان را برمیگرداند؛
|
||||
در غیر این صورت الگوریتم grid + KMeans را اجرا و ذخیره میکند.
|
||||
"""
|
||||
from .models import BlockSubdivision
|
||||
|
||||
existing = BlockSubdivision.objects.filter(
|
||||
soil_location=location,
|
||||
block_code=block_code,
|
||||
).first()
|
||||
if existing is not None:
|
||||
return existing, False
|
||||
|
||||
payload = build_block_subdivision_payload(
|
||||
boundary=boundary,
|
||||
block_code=block_code,
|
||||
chunk_size_sqm=chunk_size_sqm,
|
||||
)
|
||||
subdivision = BlockSubdivision.objects.create(
|
||||
soil_location=location,
|
||||
block_code=block_code,
|
||||
source_boundary=payload["source_boundary"],
|
||||
chunk_size_sqm=payload["chunk_size_sqm"],
|
||||
grid_points=payload["grid_points"],
|
||||
centroid_points=payload["centroid_points"],
|
||||
grid_point_count=payload["grid_point_count"],
|
||||
centroid_count=payload["centroid_count"],
|
||||
status="created",
|
||||
metadata=payload["metadata"],
|
||||
)
|
||||
plot_content = render_elbow_plot(
|
||||
inertia_curve=payload["metadata"].get("inertia_curve", []),
|
||||
optimal_k=payload["metadata"].get("optimal_k", 0),
|
||||
block_code=block_code,
|
||||
)
|
||||
if plot_content is not None:
|
||||
subdivision.elbow_plot.save(
|
||||
f"{location.pk}_{block_code}_elbow.png",
|
||||
plot_content,
|
||||
save=False,
|
||||
)
|
||||
subdivision.save(update_fields=["elbow_plot", "updated_at"])
|
||||
sync_block_layout_with_subdivision(location, subdivision)
|
||||
return subdivision, True
|
||||
|
||||
|
||||
def build_block_subdivision_payload(
|
||||
boundary: dict | list,
|
||||
block_code: str = "block-1",
|
||||
chunk_size_sqm: int | None = None,
|
||||
) -> dict:
|
||||
"""
|
||||
مرز یک بلوک را گرفته و ابتدا شبکه نقاط را میسازد، سپس با KMeans
|
||||
تعداد بهینه خوشهها را از elbow point پیدا میکند و centroidها را برمیگرداند.
|
||||
"""
|
||||
chunk_size = int(chunk_size_sqm or getattr(settings, "SUBDIVISION_CHUNK_SQM", 100) or 100)
|
||||
if chunk_size <= 0:
|
||||
raise ValueError("chunk_size_sqm باید بزرگتر از صفر باشد.")
|
||||
|
||||
polygon = extract_polygon(boundary)
|
||||
if len(polygon) < 3:
|
||||
raise ValueError("مرز بلوک باید حداقل سه نقطه معتبر داشته باشد.")
|
||||
|
||||
projected_polygon = project_polygon_to_local_meters(polygon)
|
||||
area_sqm = abs(polygon_area(projected_polygon))
|
||||
grid_points, grid_vectors = generate_grid_points(
|
||||
polygon=polygon,
|
||||
projected_polygon=projected_polygon,
|
||||
chunk_size_sqm=chunk_size,
|
||||
)
|
||||
clustering_result = cluster_grid_points(grid_vectors, polygon)
|
||||
|
||||
return {
|
||||
"block_code": block_code,
|
||||
"source_boundary": boundary if isinstance(boundary, dict) else {"points": boundary},
|
||||
"chunk_size_sqm": chunk_size,
|
||||
"grid_points": grid_points,
|
||||
"centroid_points": clustering_result["centroid_points"],
|
||||
"grid_point_count": len(grid_points),
|
||||
"centroid_count": len(clustering_result["centroid_points"]),
|
||||
"metadata": {
|
||||
"estimated_area_sqm": round(area_sqm, 2),
|
||||
"optimal_k": clustering_result["optimal_k"],
|
||||
"inertia_curve": clustering_result["inertia_curve"],
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def cluster_grid_points(grid_vectors: list[tuple[float, float]], polygon: list[GeoPoint]) -> dict:
|
||||
if not grid_vectors:
|
||||
return {
|
||||
"optimal_k": 0,
|
||||
"inertia_curve": [],
|
||||
"centroid_points": [],
|
||||
}
|
||||
|
||||
if len(grid_vectors) == 1:
|
||||
lat, lon = unproject_point(grid_vectors[0][0], grid_vectors[0][1], polygon)
|
||||
return {
|
||||
"optimal_k": 1,
|
||||
"inertia_curve": [{"k": 1, "sse": 0.0}],
|
||||
"centroid_points": [
|
||||
{
|
||||
"sub_block_code": "sub-block-1",
|
||||
"centroid_lat": quantize_coordinate(lat),
|
||||
"centroid_lon": quantize_coordinate(lon),
|
||||
}
|
||||
],
|
||||
}
|
||||
|
||||
try:
|
||||
from sklearn.cluster import KMeans
|
||||
except ImportError as exc: # pragma: no cover - runtime dependency guard
|
||||
raise ImportError("scikit-learn برای اجرای subdivision لازم است.") from exc
|
||||
|
||||
max_k = min(MAX_K, len(grid_vectors))
|
||||
inertia_curve = []
|
||||
trained_models = {}
|
||||
for k in range(1, max_k + 1):
|
||||
model = KMeans(
|
||||
n_clusters=k,
|
||||
n_init=10,
|
||||
random_state=RANDOM_STATE,
|
||||
)
|
||||
model.fit(grid_vectors)
|
||||
trained_models[k] = model
|
||||
inertia_curve.append({"k": k, "sse": round(float(model.inertia_), 6)})
|
||||
|
||||
optimal_k = detect_elbow_point(inertia_curve)
|
||||
final_model = trained_models[optimal_k]
|
||||
centroid_points = []
|
||||
for index, center in enumerate(final_model.cluster_centers_, start=1):
|
||||
lat, lon = unproject_point(center[0], center[1], polygon)
|
||||
centroid_points.append(
|
||||
{
|
||||
"sub_block_code": f"sub-block-{index}",
|
||||
"centroid_lat": quantize_coordinate(lat),
|
||||
"centroid_lon": quantize_coordinate(lon),
|
||||
}
|
||||
)
|
||||
|
||||
return {
|
||||
"optimal_k": optimal_k,
|
||||
"inertia_curve": inertia_curve,
|
||||
"centroid_points": centroid_points,
|
||||
}
|
||||
|
||||
|
||||
def detect_elbow_point(inertia_curve: list[dict]) -> int:
|
||||
if not inertia_curve:
|
||||
return 0
|
||||
if len(inertia_curve) <= 2:
|
||||
return inertia_curve[-1]["k"] if len(inertia_curve) == 2 else inertia_curve[0]["k"]
|
||||
|
||||
sses = [item["sse"] for item in inertia_curve]
|
||||
ks = [item["k"] for item in inertia_curve]
|
||||
slopes = [sses[index] - sses[index + 1] for index in range(len(sses) - 1)]
|
||||
|
||||
best_k = ks[0]
|
||||
best_change = float("-inf")
|
||||
for index in range(len(slopes) - 1):
|
||||
change = slopes[index] - slopes[index + 1]
|
||||
candidate_k = ks[index + 1]
|
||||
if change > best_change:
|
||||
best_change = change
|
||||
best_k = candidate_k
|
||||
return best_k
|
||||
|
||||
|
||||
def render_elbow_plot(
|
||||
inertia_curve: list[dict],
|
||||
optimal_k: int,
|
||||
block_code: str,
|
||||
) -> ContentFile | None:
|
||||
if not inertia_curve:
|
||||
return None
|
||||
|
||||
try:
|
||||
import matplotlib
|
||||
|
||||
matplotlib.use("Agg")
|
||||
import matplotlib.pyplot as plt
|
||||
except ImportError as exc: # pragma: no cover - runtime dependency guard
|
||||
raise ImportError("matplotlib برای ذخیره نمودار elbow لازم است.") from exc
|
||||
|
||||
ks = [item["k"] for item in inertia_curve]
|
||||
sses = [item["sse"] for item in inertia_curve]
|
||||
buffer = BytesIO()
|
||||
fig, ax = plt.subplots(figsize=(8, 5))
|
||||
try:
|
||||
ax.plot(ks, sses, marker="o", linewidth=2, color="#2f6fed")
|
||||
if optimal_k in ks:
|
||||
elbow_index = ks.index(optimal_k)
|
||||
ax.scatter(
|
||||
[ks[elbow_index]],
|
||||
[sses[elbow_index]],
|
||||
color="#d62828",
|
||||
s=90,
|
||||
zorder=3,
|
||||
label=f"Elbow K={optimal_k}",
|
||||
)
|
||||
ax.legend()
|
||||
ax.set_title(f"Elbow Plot - {block_code}")
|
||||
ax.set_xlabel("K")
|
||||
ax.set_ylabel("SSE / Inertia")
|
||||
ax.grid(True, linestyle="--", linewidth=0.5, alpha=0.6)
|
||||
fig.tight_layout()
|
||||
fig.savefig(buffer, format="png", dpi=150)
|
||||
buffer.seek(0)
|
||||
return ContentFile(buffer.getvalue())
|
||||
finally:
|
||||
buffer.close()
|
||||
plt.close(fig)
|
||||
|
||||
|
||||
def sync_block_layout_with_subdivision(location, subdivision) -> None:
|
||||
layout = location.block_layout or {}
|
||||
blocks = list(layout.get("blocks") or [])
|
||||
target_block = None
|
||||
for block in blocks:
|
||||
if block.get("block_code") == subdivision.block_code:
|
||||
target_block = block
|
||||
break
|
||||
|
||||
if target_block is None:
|
||||
target_block = {
|
||||
"block_code": subdivision.block_code,
|
||||
"order": len(blocks) + 1,
|
||||
"source": "input",
|
||||
"needs_subdivision": None,
|
||||
"sub_blocks": [],
|
||||
}
|
||||
blocks.append(target_block)
|
||||
|
||||
target_block["needs_subdivision"] = subdivision.centroid_count > 1
|
||||
target_block["sub_blocks"] = list(subdivision.centroid_points or [])
|
||||
target_block["subdivision_summary"] = {
|
||||
"chunk_size_sqm": subdivision.chunk_size_sqm,
|
||||
"grid_point_count": subdivision.grid_point_count,
|
||||
"centroid_count": subdivision.centroid_count,
|
||||
"optimal_k": (subdivision.metadata or {}).get("optimal_k", subdivision.centroid_count),
|
||||
}
|
||||
layout["blocks"] = blocks
|
||||
layout["algorithm_status"] = "completed"
|
||||
location.block_layout = layout
|
||||
location.save(update_fields=["block_layout", "updated_at"])
|
||||
|
||||
|
||||
def generate_grid_points(
|
||||
polygon: list[GeoPoint],
|
||||
projected_polygon: list[tuple[float, float]],
|
||||
chunk_size_sqm: int,
|
||||
) -> tuple[list[dict], list[tuple[float, float]]]:
|
||||
step_m = math.sqrt(chunk_size_sqm)
|
||||
min_x, max_x, min_y, max_y = bounds(projected_polygon)
|
||||
grid_points: list[dict] = []
|
||||
grid_vectors: list[tuple[float, float]] = []
|
||||
|
||||
y = min_y + (step_m / 2.0)
|
||||
point_index = 0
|
||||
while y <= max_y:
|
||||
x = min_x + (step_m / 2.0)
|
||||
while x <= max_x:
|
||||
if point_in_polygon((x, y), projected_polygon):
|
||||
lat, lon = unproject_point(x, y, polygon)
|
||||
point_index += 1
|
||||
grid_vectors.append((x, y))
|
||||
grid_points.append(
|
||||
{
|
||||
"point_code": f"pt-{point_index}",
|
||||
"lat": quantize_coordinate(lat),
|
||||
"lon": quantize_coordinate(lon),
|
||||
}
|
||||
)
|
||||
x += step_m
|
||||
y += step_m
|
||||
return grid_points, grid_vectors
|
||||
|
||||
|
||||
def extract_polygon(boundary: dict | list) -> list[GeoPoint]:
|
||||
if isinstance(boundary, dict):
|
||||
if boundary.get("type") == "Polygon":
|
||||
coordinates = boundary.get("coordinates") or []
|
||||
if coordinates and isinstance(coordinates[0], list):
|
||||
points = coordinates[0]
|
||||
else:
|
||||
points = []
|
||||
else:
|
||||
points = boundary.get("corners") or []
|
||||
elif isinstance(boundary, list):
|
||||
points = boundary
|
||||
else:
|
||||
points = []
|
||||
|
||||
polygon: list[GeoPoint] = []
|
||||
for point in points:
|
||||
lat = lon = None
|
||||
if isinstance(point, dict):
|
||||
lat = point.get("lat", point.get("latitude"))
|
||||
lon = point.get("lon", point.get("longitude"))
|
||||
elif isinstance(point, (list, tuple)) and len(point) >= 2:
|
||||
lon, lat = point[0], point[1]
|
||||
|
||||
if lat is None or lon is None:
|
||||
continue
|
||||
polygon.append(GeoPoint(lat=float(lat), lon=float(lon)))
|
||||
|
||||
if len(polygon) > 1 and polygon[0] == polygon[-1]:
|
||||
polygon = polygon[:-1]
|
||||
return polygon
|
||||
|
||||
|
||||
def project_polygon_to_local_meters(polygon: list[GeoPoint]) -> list[tuple[float, float]]:
|
||||
origin = polygon[0]
|
||||
lat0 = math.radians(origin.lat)
|
||||
lon0 = math.radians(origin.lon)
|
||||
cos_lat0 = math.cos(lat0)
|
||||
projected = []
|
||||
for point in polygon:
|
||||
lat = math.radians(point.lat)
|
||||
lon = math.radians(point.lon)
|
||||
x = (lon - lon0) * cos_lat0 * EARTH_RADIUS_M
|
||||
y = (lat - lat0) * EARTH_RADIUS_M
|
||||
projected.append((x, y))
|
||||
return projected
|
||||
|
||||
|
||||
def unproject_point(x: float, y: float, polygon: list[GeoPoint]) -> tuple[float, float]:
|
||||
origin = polygon[0]
|
||||
lat0 = math.radians(origin.lat)
|
||||
lon0 = math.radians(origin.lon)
|
||||
cos_lat0 = math.cos(lat0)
|
||||
lat = math.degrees((y / EARTH_RADIUS_M) + lat0)
|
||||
lon = math.degrees((x / (EARTH_RADIUS_M * cos_lat0)) + lon0)
|
||||
return lat, lon
|
||||
|
||||
|
||||
def polygon_area(points: list[tuple[float, float]]) -> float:
|
||||
area = 0.0
|
||||
closed = points + [points[0]]
|
||||
for index in range(len(points)):
|
||||
x1, y1 = closed[index]
|
||||
x2, y2 = closed[index + 1]
|
||||
area += (x1 * y2) - (x2 * y1)
|
||||
return area / 2.0
|
||||
|
||||
|
||||
def bounds(points: list[tuple[float, float]]) -> tuple[float, float, float, float]:
|
||||
xs = [point[0] for point in points]
|
||||
ys = [point[1] for point in points]
|
||||
return min(xs), max(xs), min(ys), max(ys)
|
||||
|
||||
|
||||
def point_in_polygon(point: tuple[float, float], polygon: list[tuple[float, float]]) -> bool:
|
||||
x, y = point
|
||||
inside = False
|
||||
j = len(polygon) - 1
|
||||
for i in range(len(polygon)):
|
||||
xi, yi = polygon[i]
|
||||
xj, yj = polygon[j]
|
||||
intersects = ((yi > y) != (yj > y)) and (
|
||||
x < ((xj - xi) * (y - yi) / ((yj - yi) or 1e-12)) + xi
|
||||
)
|
||||
if intersects:
|
||||
inside = not inside
|
||||
j = i
|
||||
return inside
|
||||
|
||||
|
||||
def quantize_coordinate(value: float) -> float:
|
||||
return float(Decimal(str(value)).quantize(COORD_PRECISION, rounding=ROUND_HALF_UP))
|
||||
@@ -0,0 +1,421 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
from django.db import transaction
|
||||
|
||||
from .block_subdivision import detect_elbow_point, render_elbow_plot
|
||||
from .models import (
|
||||
AnalysisGridObservation,
|
||||
BlockSubdivision,
|
||||
RemoteSensingClusterAssignment,
|
||||
RemoteSensingRun,
|
||||
RemoteSensingSubdivisionResult,
|
||||
SoilLocation,
|
||||
)
|
||||
|
||||
|
||||
DEFAULT_CLUSTER_FEATURES = [
|
||||
"ndvi",
|
||||
"ndwi",
|
||||
"lst_c",
|
||||
"soil_vv_db",
|
||||
"dem_m",
|
||||
"slope_deg",
|
||||
]
|
||||
SUPPORTED_CLUSTER_FEATURES = tuple(DEFAULT_CLUSTER_FEATURES)
|
||||
DEFAULT_RANDOM_STATE = 42
|
||||
DEFAULT_MAX_K = 10
|
||||
|
||||
|
||||
class DataDrivenSubdivisionError(Exception):
|
||||
"""Raised when remote-sensing-driven subdivision can not be computed."""
|
||||
|
||||
|
||||
@dataclass
|
||||
class ClusteringDataset:
|
||||
observations: list[AnalysisGridObservation]
|
||||
selected_features: list[str]
|
||||
raw_feature_rows: list[list[float | None]]
|
||||
raw_feature_maps: list[dict[str, float | None]]
|
||||
skipped_cell_codes: list[str]
|
||||
used_cell_codes: list[str]
|
||||
imputed_matrix: list[list[float]]
|
||||
scaled_matrix: list[list[float]]
|
||||
imputer_statistics: dict[str, float | None]
|
||||
scaler_means: dict[str, float]
|
||||
scaler_scales: dict[str, float]
|
||||
missing_value_counts: dict[str, int]
|
||||
skipped_reasons: dict[str, list[str]]
|
||||
|
||||
|
||||
def create_remote_sensing_subdivision_result(
|
||||
*,
|
||||
location: SoilLocation,
|
||||
run: RemoteSensingRun,
|
||||
observations: list[AnalysisGridObservation],
|
||||
block_subdivision: BlockSubdivision | None = None,
|
||||
block_code: str = "",
|
||||
selected_features: list[str] | None = None,
|
||||
explicit_k: int | None = None,
|
||||
max_k: int = DEFAULT_MAX_K,
|
||||
random_state: int = DEFAULT_RANDOM_STATE,
|
||||
) -> RemoteSensingSubdivisionResult:
|
||||
"""
|
||||
Build a data-driven subdivision result from stored remote sensing observations.
|
||||
|
||||
KMeans is applied on actual per-cell feature vectors, not geometric points.
|
||||
"""
|
||||
dataset = build_clustering_dataset(
|
||||
observations=observations,
|
||||
selected_features=selected_features,
|
||||
)
|
||||
if not dataset.observations:
|
||||
raise DataDrivenSubdivisionError("هیچ observation قابل استفادهای برای خوشهبندی باقی نماند.")
|
||||
|
||||
optimal_k, inertia_curve = choose_cluster_count(
|
||||
scaled_matrix=dataset.scaled_matrix,
|
||||
explicit_k=explicit_k,
|
||||
max_k=max_k,
|
||||
random_state=random_state,
|
||||
)
|
||||
cluster_selection_strategy = "explicit_k" if explicit_k is not None else "elbow"
|
||||
labels = run_kmeans_labels(
|
||||
scaled_matrix=dataset.scaled_matrix,
|
||||
cluster_count=optimal_k,
|
||||
random_state=random_state,
|
||||
)
|
||||
cluster_summaries = build_cluster_summaries(
|
||||
observations=dataset.observations,
|
||||
labels=labels,
|
||||
)
|
||||
|
||||
with transaction.atomic():
|
||||
result, _created = RemoteSensingSubdivisionResult.objects.update_or_create(
|
||||
run=run,
|
||||
defaults={
|
||||
"soil_location": location,
|
||||
"block_subdivision": block_subdivision,
|
||||
"block_code": block_code,
|
||||
"chunk_size_sqm": run.chunk_size_sqm,
|
||||
"temporal_start": run.temporal_start,
|
||||
"temporal_end": run.temporal_end,
|
||||
"cluster_count": optimal_k,
|
||||
"selected_features": dataset.selected_features,
|
||||
"skipped_cell_codes": dataset.skipped_cell_codes,
|
||||
"metadata": {
|
||||
"cell_count": len(observations),
|
||||
"used_cell_count": len(dataset.observations),
|
||||
"skipped_cell_count": len(dataset.skipped_cell_codes),
|
||||
"used_cell_codes": dataset.used_cell_codes,
|
||||
"skipped_reasons": dataset.skipped_reasons,
|
||||
"selected_features": dataset.selected_features,
|
||||
"imputer_strategy": "median",
|
||||
"imputer_statistics": dataset.imputer_statistics,
|
||||
"missing_value_counts": dataset.missing_value_counts,
|
||||
"scaler_means": dataset.scaler_means,
|
||||
"scaler_scales": dataset.scaler_scales,
|
||||
"kmeans_params": {
|
||||
"random_state": random_state,
|
||||
"explicit_k": explicit_k,
|
||||
"selected_k": optimal_k,
|
||||
"max_k": max_k,
|
||||
"n_init": 10,
|
||||
"selection_strategy": cluster_selection_strategy,
|
||||
},
|
||||
"inertia_curve": inertia_curve,
|
||||
"cluster_summaries": cluster_summaries,
|
||||
},
|
||||
},
|
||||
)
|
||||
result.assignments.all().delete()
|
||||
assignment_rows = []
|
||||
for index, observation in enumerate(dataset.observations):
|
||||
assignment_rows.append(
|
||||
RemoteSensingClusterAssignment(
|
||||
result=result,
|
||||
cell=observation.cell,
|
||||
cluster_label=int(labels[index]),
|
||||
raw_feature_values=dataset.raw_feature_maps[index],
|
||||
scaled_feature_values={
|
||||
feature_name: round(dataset.scaled_matrix[index][feature_index], 6)
|
||||
for feature_index, feature_name in enumerate(dataset.selected_features)
|
||||
},
|
||||
)
|
||||
)
|
||||
RemoteSensingClusterAssignment.objects.bulk_create(assignment_rows)
|
||||
sync_location_block_layout_with_result(
|
||||
location=location,
|
||||
result=result,
|
||||
cluster_summaries=cluster_summaries,
|
||||
)
|
||||
if block_subdivision is not None:
|
||||
metadata = dict(block_subdivision.metadata or {})
|
||||
metadata["data_driven_subdivision"] = {
|
||||
"run_id": run.id,
|
||||
"cluster_count": optimal_k,
|
||||
"used_cell_count": len(dataset.observations),
|
||||
"skipped_cell_count": len(dataset.skipped_cell_codes),
|
||||
}
|
||||
block_subdivision.metadata = metadata
|
||||
plot_content = render_elbow_plot(
|
||||
inertia_curve=inertia_curve,
|
||||
optimal_k=optimal_k,
|
||||
block_code=block_code or block_subdivision.block_code,
|
||||
)
|
||||
if plot_content is not None:
|
||||
block_subdivision.elbow_plot.save(
|
||||
f"remote-sensing-{location.pk}-{block_code or block_subdivision.block_code}-elbow.png",
|
||||
plot_content,
|
||||
save=False,
|
||||
)
|
||||
block_subdivision.save(update_fields=["metadata", "elbow_plot", "updated_at"])
|
||||
else:
|
||||
block_subdivision.save(update_fields=["metadata", "updated_at"])
|
||||
return result
|
||||
|
||||
|
||||
def build_clustering_dataset(
|
||||
*,
|
||||
observations: list[AnalysisGridObservation],
|
||||
selected_features: list[str] | None = None,
|
||||
) -> ClusteringDataset:
|
||||
selected_features = list(selected_features or DEFAULT_CLUSTER_FEATURES)
|
||||
invalid_features = [
|
||||
feature_name
|
||||
for feature_name in selected_features
|
||||
if feature_name not in SUPPORTED_CLUSTER_FEATURES
|
||||
]
|
||||
if invalid_features:
|
||||
raise DataDrivenSubdivisionError(
|
||||
"ویژگیهای نامعتبر برای خوشهبندی: "
|
||||
+ ", ".join(sorted(invalid_features))
|
||||
)
|
||||
raw_rows: list[list[float | None]] = []
|
||||
raw_maps: list[dict[str, float | None]] = []
|
||||
usable_observations: list[AnalysisGridObservation] = []
|
||||
skipped_cell_codes: list[str] = []
|
||||
used_cell_codes: list[str] = []
|
||||
missing_value_counts = {feature_name: 0 for feature_name in selected_features}
|
||||
skipped_reasons = {"all_features_missing": []}
|
||||
|
||||
for observation in observations:
|
||||
feature_map = {
|
||||
feature_name: _coerce_float(getattr(observation, feature_name, None))
|
||||
for feature_name in selected_features
|
||||
}
|
||||
for feature_name, value in feature_map.items():
|
||||
if value is None:
|
||||
missing_value_counts[feature_name] += 1
|
||||
if all(value is None for value in feature_map.values()):
|
||||
skipped_cell_codes.append(observation.cell.cell_code)
|
||||
skipped_reasons["all_features_missing"].append(observation.cell.cell_code)
|
||||
continue
|
||||
usable_observations.append(observation)
|
||||
used_cell_codes.append(observation.cell.cell_code)
|
||||
raw_maps.append(feature_map)
|
||||
raw_rows.append([feature_map[feature_name] for feature_name in selected_features])
|
||||
|
||||
if not usable_observations:
|
||||
return ClusteringDataset(
|
||||
observations=[],
|
||||
selected_features=selected_features,
|
||||
raw_feature_rows=[],
|
||||
raw_feature_maps=[],
|
||||
skipped_cell_codes=skipped_cell_codes,
|
||||
used_cell_codes=[],
|
||||
imputed_matrix=[],
|
||||
scaled_matrix=[],
|
||||
imputer_statistics={feature_name: None for feature_name in selected_features},
|
||||
scaler_means={feature_name: 0.0 for feature_name in selected_features},
|
||||
scaler_scales={feature_name: 1.0 for feature_name in selected_features},
|
||||
missing_value_counts=missing_value_counts,
|
||||
skipped_reasons=skipped_reasons,
|
||||
)
|
||||
|
||||
try:
|
||||
import numpy as np
|
||||
from sklearn.impute import SimpleImputer
|
||||
from sklearn.preprocessing import StandardScaler
|
||||
except ImportError as exc: # pragma: no cover - runtime dependency guard
|
||||
raise DataDrivenSubdivisionError(
|
||||
"scikit-learn و numpy برای خوشهبندی دادهمحور لازم هستند."
|
||||
) from exc
|
||||
|
||||
raw_matrix = np.array(raw_rows, dtype=float)
|
||||
imputer = SimpleImputer(strategy="median")
|
||||
imputed_matrix = imputer.fit_transform(raw_matrix)
|
||||
scaler = StandardScaler()
|
||||
scaled_matrix = scaler.fit_transform(imputed_matrix)
|
||||
|
||||
return ClusteringDataset(
|
||||
observations=usable_observations,
|
||||
selected_features=selected_features,
|
||||
raw_feature_rows=raw_rows,
|
||||
raw_feature_maps=raw_maps,
|
||||
skipped_cell_codes=skipped_cell_codes,
|
||||
used_cell_codes=used_cell_codes,
|
||||
imputed_matrix=imputed_matrix.tolist(),
|
||||
scaled_matrix=scaled_matrix.tolist(),
|
||||
imputer_statistics={
|
||||
feature_name: _coerce_float(imputer.statistics_[index])
|
||||
for index, feature_name in enumerate(selected_features)
|
||||
},
|
||||
scaler_means={
|
||||
feature_name: float(scaler.mean_[index])
|
||||
for index, feature_name in enumerate(selected_features)
|
||||
},
|
||||
scaler_scales={
|
||||
feature_name: float(scaler.scale_[index] or 1.0)
|
||||
for index, feature_name in enumerate(selected_features)
|
||||
},
|
||||
missing_value_counts=missing_value_counts,
|
||||
skipped_reasons=skipped_reasons,
|
||||
)
|
||||
|
||||
|
||||
def choose_cluster_count(
|
||||
*,
|
||||
scaled_matrix: list[list[float]],
|
||||
explicit_k: int | None,
|
||||
max_k: int,
|
||||
random_state: int,
|
||||
) -> tuple[int, list[dict[str, float]]]:
|
||||
sample_count = len(scaled_matrix)
|
||||
if sample_count == 0:
|
||||
raise DataDrivenSubdivisionError("هیچ نمونهای برای خوشهبندی وجود ندارد.")
|
||||
if sample_count == 1:
|
||||
return 1, [{"k": 1, "sse": 0.0}]
|
||||
|
||||
if explicit_k is not None:
|
||||
if explicit_k <= 0:
|
||||
raise DataDrivenSubdivisionError("cluster_count باید بزرگتر از صفر باشد.")
|
||||
return min(explicit_k, sample_count), []
|
||||
|
||||
try:
|
||||
from sklearn.cluster import KMeans
|
||||
except ImportError as exc: # pragma: no cover
|
||||
raise DataDrivenSubdivisionError("scikit-learn برای انتخاب تعداد خوشه لازم است.") from exc
|
||||
|
||||
max_allowed_k = min(max_k, sample_count)
|
||||
inertia_curve = []
|
||||
for k in range(1, max_allowed_k + 1):
|
||||
model = KMeans(n_clusters=k, n_init=10, random_state=random_state)
|
||||
model.fit(scaled_matrix)
|
||||
inertia_curve.append({"k": k, "sse": round(float(model.inertia_), 6)})
|
||||
return detect_elbow_point(inertia_curve), inertia_curve
|
||||
|
||||
|
||||
def run_kmeans_labels(
|
||||
*,
|
||||
scaled_matrix: list[list[float]],
|
||||
cluster_count: int,
|
||||
random_state: int,
|
||||
) -> list[int]:
|
||||
if cluster_count <= 0:
|
||||
raise DataDrivenSubdivisionError("cluster_count باید بزرگتر از صفر باشد.")
|
||||
if len(scaled_matrix) == 1:
|
||||
return [0]
|
||||
try:
|
||||
from sklearn.cluster import KMeans
|
||||
except ImportError as exc: # pragma: no cover
|
||||
raise DataDrivenSubdivisionError("scikit-learn برای اجرای KMeans لازم است.") from exc
|
||||
model = KMeans(n_clusters=cluster_count, n_init=10, random_state=random_state)
|
||||
return [int(label) for label in model.fit_predict(scaled_matrix)]
|
||||
|
||||
|
||||
def build_cluster_summaries(
|
||||
*,
|
||||
observations: list[AnalysisGridObservation],
|
||||
labels: list[int],
|
||||
) -> list[dict[str, Any]]:
|
||||
clusters: dict[int, dict[str, Any]] = {}
|
||||
for observation, label in zip(observations, labels):
|
||||
cluster = clusters.setdefault(
|
||||
int(label),
|
||||
{
|
||||
"cluster_label": int(label),
|
||||
"cell_codes": [],
|
||||
"centroid_lat_sum": 0.0,
|
||||
"centroid_lon_sum": 0.0,
|
||||
"cell_count": 0,
|
||||
},
|
||||
)
|
||||
cluster["cell_codes"].append(observation.cell.cell_code)
|
||||
cluster["centroid_lat_sum"] += float(observation.cell.centroid_lat)
|
||||
cluster["centroid_lon_sum"] += float(observation.cell.centroid_lon)
|
||||
cluster["cell_count"] += 1
|
||||
|
||||
summaries = []
|
||||
for cluster_label in sorted(clusters):
|
||||
cluster = clusters[cluster_label]
|
||||
cell_count = cluster["cell_count"] or 1
|
||||
summaries.append(
|
||||
{
|
||||
"cluster_label": cluster_label,
|
||||
"cell_count": cluster["cell_count"],
|
||||
"centroid_lat": round(cluster["centroid_lat_sum"] / cell_count, 6),
|
||||
"centroid_lon": round(cluster["centroid_lon_sum"] / cell_count, 6),
|
||||
"cell_codes": cluster["cell_codes"],
|
||||
}
|
||||
)
|
||||
return summaries
|
||||
|
||||
|
||||
def sync_location_block_layout_with_result(
|
||||
*,
|
||||
location: SoilLocation,
|
||||
result: RemoteSensingSubdivisionResult,
|
||||
cluster_summaries: list[dict[str, Any]],
|
||||
) -> None:
|
||||
layout = dict(location.block_layout or {})
|
||||
blocks = list(layout.get("blocks") or [])
|
||||
target_block = None
|
||||
for block in blocks:
|
||||
if block.get("block_code") == result.block_code:
|
||||
target_block = block
|
||||
break
|
||||
|
||||
if target_block is None:
|
||||
target_block = {
|
||||
"block_code": result.block_code,
|
||||
"order": len(blocks) + 1,
|
||||
"source": "remote_sensing",
|
||||
"needs_subdivision": None,
|
||||
"sub_blocks": [],
|
||||
}
|
||||
blocks.append(target_block)
|
||||
|
||||
target_block["needs_subdivision"] = result.cluster_count > 1
|
||||
target_block["sub_blocks"] = [
|
||||
{
|
||||
"sub_block_code": f"cluster-{cluster['cluster_label']}",
|
||||
"cluster_label": cluster["cluster_label"],
|
||||
"centroid_lat": cluster["centroid_lat"],
|
||||
"centroid_lon": cluster["centroid_lon"],
|
||||
"cell_count": cluster["cell_count"],
|
||||
}
|
||||
for cluster in cluster_summaries
|
||||
]
|
||||
target_block["subdivision_summary"] = {
|
||||
"type": "data_driven_remote_sensing",
|
||||
"cluster_count": result.cluster_count,
|
||||
"selected_features": result.selected_features,
|
||||
"used_cell_count": result.metadata.get("used_cell_count", 0),
|
||||
"skipped_cell_count": result.metadata.get("skipped_cell_count", 0),
|
||||
"run_id": result.run_id,
|
||||
}
|
||||
layout["blocks"] = blocks
|
||||
layout["algorithm_status"] = "completed"
|
||||
location.block_layout = layout
|
||||
location.save(update_fields=["block_layout", "updated_at"])
|
||||
|
||||
|
||||
def _coerce_float(value: Any) -> float | None:
|
||||
if value is None:
|
||||
return None
|
||||
try:
|
||||
return float(value)
|
||||
except (TypeError, ValueError):
|
||||
return None
|
||||
@@ -0,0 +1,327 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from decimal import Decimal
|
||||
import math
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import transaction
|
||||
|
||||
from .block_subdivision import (
|
||||
GeoPoint,
|
||||
bounds,
|
||||
extract_polygon,
|
||||
point_in_polygon,
|
||||
project_polygon_to_local_meters,
|
||||
quantize_coordinate,
|
||||
unproject_point,
|
||||
)
|
||||
from .models import AnalysisGridCell, BlockSubdivision, SoilLocation
|
||||
|
||||
|
||||
def create_or_get_analysis_grid_cells(
|
||||
location: SoilLocation,
|
||||
*,
|
||||
boundary: dict | list | None = None,
|
||||
block_code: str | None = None,
|
||||
block_subdivision: BlockSubdivision | None = None,
|
||||
chunk_size_sqm: int | None = None,
|
||||
) -> dict:
|
||||
"""
|
||||
شبکه تحلیل 30x30 متری یا هر chunk size تنظیمشده را برای مزرعه/بلوک میسازد
|
||||
و رکوردهای AnalysisGridCell را بهصورت idempotent ذخیره میکند.
|
||||
"""
|
||||
normalized_chunk_size = int(
|
||||
chunk_size_sqm or getattr(settings, "SUBDIVISION_CHUNK_SQM", 900) or 900
|
||||
)
|
||||
if normalized_chunk_size <= 0:
|
||||
raise ValueError("chunk_size_sqm باید بزرگتر از صفر باشد.")
|
||||
|
||||
resolved_block_code = str(block_code or getattr(block_subdivision, "block_code", "") or "").strip()
|
||||
resolved_boundary = _resolve_boundary(
|
||||
location=location,
|
||||
boundary=boundary,
|
||||
block_subdivision=block_subdivision,
|
||||
)
|
||||
polygon = extract_polygon(resolved_boundary)
|
||||
if len(polygon) < 3:
|
||||
raise ValueError("برای ساخت analysis grid باید حداقل سه نقطه معتبر در boundary وجود داشته باشد.")
|
||||
|
||||
existing_qs = AnalysisGridCell.objects.filter(
|
||||
soil_location=location,
|
||||
block_code=resolved_block_code,
|
||||
chunk_size_sqm=normalized_chunk_size,
|
||||
).order_by("cell_code")
|
||||
existing_count = existing_qs.count()
|
||||
if existing_count:
|
||||
return {
|
||||
"created_count": 0,
|
||||
"existing_count": existing_count,
|
||||
"total_count": existing_count,
|
||||
"chunk_size_sqm": normalized_chunk_size,
|
||||
"block_code": resolved_block_code,
|
||||
"created": False,
|
||||
}
|
||||
|
||||
cell_payloads = build_analysis_grid_payload(
|
||||
polygon=polygon,
|
||||
location=location,
|
||||
block_code=resolved_block_code,
|
||||
chunk_size_sqm=normalized_chunk_size,
|
||||
)
|
||||
|
||||
created_cells = []
|
||||
with transaction.atomic():
|
||||
for payload in cell_payloads:
|
||||
created_cells.append(
|
||||
AnalysisGridCell.objects.create(
|
||||
soil_location=location,
|
||||
block_subdivision=block_subdivision,
|
||||
block_code=resolved_block_code,
|
||||
cell_code=payload["cell_code"],
|
||||
chunk_size_sqm=normalized_chunk_size,
|
||||
geometry=payload["geometry"],
|
||||
centroid_lat=Decimal(str(payload["centroid_lat"])),
|
||||
centroid_lon=Decimal(str(payload["centroid_lon"])),
|
||||
)
|
||||
)
|
||||
_update_grid_summary_metadata(
|
||||
location=location,
|
||||
block_code=resolved_block_code,
|
||||
chunk_size_sqm=normalized_chunk_size,
|
||||
total_count=len(created_cells),
|
||||
block_subdivision=block_subdivision,
|
||||
)
|
||||
|
||||
return {
|
||||
"created_count": len(created_cells),
|
||||
"existing_count": 0,
|
||||
"total_count": len(created_cells),
|
||||
"chunk_size_sqm": normalized_chunk_size,
|
||||
"block_code": resolved_block_code,
|
||||
"created": True,
|
||||
}
|
||||
|
||||
|
||||
def build_analysis_grid_payload(
|
||||
*,
|
||||
polygon: list[GeoPoint],
|
||||
location: SoilLocation,
|
||||
block_code: str,
|
||||
chunk_size_sqm: int,
|
||||
) -> list[dict]:
|
||||
projected_polygon = project_polygon_to_local_meters(polygon)
|
||||
step_m = math.sqrt(chunk_size_sqm)
|
||||
min_x, max_x, min_y, max_y = bounds(projected_polygon)
|
||||
|
||||
payloads: list[dict] = []
|
||||
row_index = 0
|
||||
y = min_y
|
||||
while y < max_y:
|
||||
col_index = 0
|
||||
x = min_x
|
||||
while x < max_x:
|
||||
cell_polygon = [
|
||||
(x, y),
|
||||
(x + step_m, y),
|
||||
(x + step_m, y + step_m),
|
||||
(x, y + step_m),
|
||||
]
|
||||
if _cell_intersects_polygon(cell_polygon, projected_polygon):
|
||||
payloads.append(
|
||||
_build_cell_payload(
|
||||
location=location,
|
||||
block_code=block_code,
|
||||
chunk_size_sqm=chunk_size_sqm,
|
||||
polygon=polygon,
|
||||
cell_polygon=cell_polygon,
|
||||
row_index=row_index,
|
||||
col_index=col_index,
|
||||
)
|
||||
)
|
||||
col_index += 1
|
||||
x += step_m
|
||||
row_index += 1
|
||||
y += step_m
|
||||
return payloads
|
||||
|
||||
|
||||
def _build_cell_payload(
|
||||
*,
|
||||
location: SoilLocation,
|
||||
block_code: str,
|
||||
chunk_size_sqm: int,
|
||||
polygon: list[GeoPoint],
|
||||
cell_polygon: list[tuple[float, float]],
|
||||
row_index: int,
|
||||
col_index: int,
|
||||
) -> dict:
|
||||
closed_polygon = cell_polygon + [cell_polygon[0]]
|
||||
geometry_coordinates = []
|
||||
for x, y in closed_polygon:
|
||||
lat, lon = unproject_point(x, y, polygon)
|
||||
geometry_coordinates.append(
|
||||
[quantize_coordinate(lon), quantize_coordinate(lat)]
|
||||
)
|
||||
|
||||
centroid_x = sum(point[0] for point in cell_polygon) / len(cell_polygon)
|
||||
centroid_y = sum(point[1] for point in cell_polygon) / len(cell_polygon)
|
||||
centroid_lat, centroid_lon = unproject_point(centroid_x, centroid_y, polygon)
|
||||
return {
|
||||
"cell_code": build_analysis_cell_code(
|
||||
location_id=location.id,
|
||||
block_code=block_code,
|
||||
chunk_size_sqm=chunk_size_sqm,
|
||||
row_index=row_index,
|
||||
col_index=col_index,
|
||||
),
|
||||
"geometry": {
|
||||
"type": "Polygon",
|
||||
"coordinates": [geometry_coordinates],
|
||||
},
|
||||
"centroid_lat": quantize_coordinate(centroid_lat),
|
||||
"centroid_lon": quantize_coordinate(centroid_lon),
|
||||
}
|
||||
|
||||
|
||||
def build_analysis_cell_code(
|
||||
*,
|
||||
location_id: int | None,
|
||||
block_code: str,
|
||||
chunk_size_sqm: int,
|
||||
row_index: int,
|
||||
col_index: int,
|
||||
) -> str:
|
||||
block_segment = block_code or "farm"
|
||||
location_segment = location_id if location_id is not None else "new"
|
||||
return (
|
||||
f"loc-{location_segment}__"
|
||||
f"block-{block_segment}__"
|
||||
f"chunk-{chunk_size_sqm}__"
|
||||
f"r{row_index:04d}c{col_index:04d}"
|
||||
)
|
||||
|
||||
|
||||
def _resolve_boundary(
|
||||
*,
|
||||
location: SoilLocation,
|
||||
boundary: dict | list | None,
|
||||
block_subdivision: BlockSubdivision | None,
|
||||
) -> dict | list:
|
||||
if boundary:
|
||||
return boundary
|
||||
if block_subdivision is not None and block_subdivision.source_boundary:
|
||||
return block_subdivision.source_boundary
|
||||
if location.farm_boundary:
|
||||
return location.farm_boundary
|
||||
raise ValueError("هیچ boundary معتبری برای ساخت analysis grid پیدا نشد.")
|
||||
|
||||
|
||||
def _cell_intersects_polygon(
|
||||
cell_polygon: list[tuple[float, float]],
|
||||
polygon: list[tuple[float, float]],
|
||||
) -> bool:
|
||||
if any(point_in_polygon(point, polygon) for point in cell_polygon):
|
||||
return True
|
||||
|
||||
for polygon_point in polygon:
|
||||
if _point_in_rect(polygon_point, cell_polygon):
|
||||
return True
|
||||
|
||||
cell_edges = _polygon_edges(cell_polygon)
|
||||
polygon_edges = _polygon_edges(polygon)
|
||||
for edge_a in cell_edges:
|
||||
for edge_b in polygon_edges:
|
||||
if _segments_intersect(edge_a[0], edge_a[1], edge_b[0], edge_b[1]):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _point_in_rect(point: tuple[float, float], rect: list[tuple[float, float]]) -> bool:
|
||||
xs = [vertex[0] for vertex in rect]
|
||||
ys = [vertex[1] for vertex in rect]
|
||||
return min(xs) <= point[0] <= max(xs) and min(ys) <= point[1] <= max(ys)
|
||||
|
||||
|
||||
def _polygon_edges(points: list[tuple[float, float]]) -> list[tuple[tuple[float, float], tuple[float, float]]]:
|
||||
closed = points + [points[0]]
|
||||
return [
|
||||
(closed[index], closed[index + 1])
|
||||
for index in range(len(points))
|
||||
]
|
||||
|
||||
|
||||
def _segments_intersect(
|
||||
p1: tuple[float, float],
|
||||
p2: tuple[float, float],
|
||||
q1: tuple[float, float],
|
||||
q2: tuple[float, float],
|
||||
) -> bool:
|
||||
o1 = _orientation(p1, p2, q1)
|
||||
o2 = _orientation(p1, p2, q2)
|
||||
o3 = _orientation(q1, q2, p1)
|
||||
o4 = _orientation(q1, q2, p2)
|
||||
|
||||
if o1 != o2 and o3 != o4:
|
||||
return True
|
||||
if o1 == 0 and _on_segment(p1, q1, p2):
|
||||
return True
|
||||
if o2 == 0 and _on_segment(p1, q2, p2):
|
||||
return True
|
||||
if o3 == 0 and _on_segment(q1, p1, q2):
|
||||
return True
|
||||
if o4 == 0 and _on_segment(q1, p2, q2):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _orientation(a: tuple[float, float], b: tuple[float, float], c: tuple[float, float]) -> int:
|
||||
value = ((b[1] - a[1]) * (c[0] - b[0])) - ((b[0] - a[0]) * (c[1] - b[1]))
|
||||
if abs(value) < 1e-9:
|
||||
return 0
|
||||
return 1 if value > 0 else 2
|
||||
|
||||
|
||||
def _on_segment(a: tuple[float, float], b: tuple[float, float], c: tuple[float, float]) -> bool:
|
||||
return (
|
||||
min(a[0], c[0]) <= b[0] <= max(a[0], c[0])
|
||||
and min(a[1], c[1]) <= b[1] <= max(a[1], c[1])
|
||||
)
|
||||
|
||||
|
||||
def _update_grid_summary_metadata(
|
||||
*,
|
||||
location: SoilLocation,
|
||||
block_code: str,
|
||||
chunk_size_sqm: int,
|
||||
total_count: int,
|
||||
block_subdivision: BlockSubdivision | None,
|
||||
) -> None:
|
||||
if block_subdivision is not None:
|
||||
metadata = dict(block_subdivision.metadata or {})
|
||||
metadata["analysis_grid"] = {
|
||||
"chunk_size_sqm": chunk_size_sqm,
|
||||
"cell_count": total_count,
|
||||
}
|
||||
block_subdivision.metadata = metadata
|
||||
block_subdivision.save(update_fields=["metadata", "updated_at"])
|
||||
|
||||
layout = dict(location.block_layout or {})
|
||||
blocks = list(layout.get("blocks") or [])
|
||||
for block in blocks:
|
||||
if block.get("block_code") == block_code:
|
||||
block["analysis_grid_summary"] = {
|
||||
"chunk_size_sqm": chunk_size_sqm,
|
||||
"cell_count": total_count,
|
||||
}
|
||||
break
|
||||
else:
|
||||
if not block_code:
|
||||
layout["analysis_grid_summary"] = {
|
||||
"chunk_size_sqm": chunk_size_sqm,
|
||||
"cell_count": total_count,
|
||||
}
|
||||
|
||||
if blocks:
|
||||
layout["blocks"] = blocks
|
||||
location.block_layout = layout
|
||||
location.save(update_fields=["block_layout", "updated_at"])
|
||||
@@ -0,0 +1,32 @@
|
||||
"""
|
||||
Management command: اجرای یکبار rename اپ label از soil_data به location_data در DB.
|
||||
این دستور را یک بار قبل از اجرای migrate اجرا کنید:
|
||||
python manage.py rename_soil_data_label
|
||||
python manage.py migrate
|
||||
"""
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db import connection
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Rename app label from soil_data to location_data in django_migrations and django_content_type"
|
||||
|
||||
def handle(self, *args, **options):
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(
|
||||
"UPDATE django_migrations SET app = %s WHERE app = %s",
|
||||
["location_data", "soil_data"],
|
||||
)
|
||||
migrations_updated = cursor.rowcount
|
||||
cursor.execute(
|
||||
"UPDATE django_content_type SET app_label = %s WHERE app_label = %s",
|
||||
["location_data", "soil_data"],
|
||||
)
|
||||
content_types_updated = cursor.rowcount
|
||||
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(
|
||||
f"Done. django_migrations rows updated: {migrations_updated}, "
|
||||
f"django_content_type rows updated: {content_types_updated}"
|
||||
)
|
||||
)
|
||||
@@ -0,0 +1,35 @@
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db import connection
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Rename legacy soil_data tables to location_data tables when needed"
|
||||
|
||||
def handle(self, *args, **options):
|
||||
table_map = {
|
||||
"soil_data_soillocation": "location_data_soillocation",
|
||||
"soil_data_soildepthdata": "location_data_soildepthdata",
|
||||
}
|
||||
|
||||
existing_tables = set(connection.introspection.table_names())
|
||||
renamed: list[str] = []
|
||||
|
||||
with connection.cursor() as cursor:
|
||||
for old_name, new_name in table_map.items():
|
||||
if new_name in existing_tables:
|
||||
continue
|
||||
if old_name not in existing_tables:
|
||||
continue
|
||||
|
||||
cursor.execute(f"RENAME TABLE `{old_name}` TO `{new_name}`")
|
||||
renamed.append(f"{old_name} -> {new_name}")
|
||||
existing_tables.discard(old_name)
|
||||
existing_tables.add(new_name)
|
||||
|
||||
if renamed:
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS("Renamed legacy tables: " + ", ".join(renamed))
|
||||
)
|
||||
return
|
||||
|
||||
self.stdout.write("No legacy location_data tables needed repair.")
|
||||
@@ -0,0 +1,34 @@
|
||||
# Generated manually for location_data
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = []
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="SoilLocation",
|
||||
fields=[
|
||||
("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
|
||||
("latitude", models.DecimalField(db_index=True, decimal_places=6, help_text="عرض جغرافیایی (lat)", max_digits=9)),
|
||||
("longitude", models.DecimalField(db_index=True, decimal_places=6, help_text="طول جغرافیایی (lon)", max_digits=9)),
|
||||
("depth_0_5cm", models.JSONField(blank=True, help_text="دادههای لایه ۰–۵ سانتیمتر از API SoilGrids", null=True)),
|
||||
("depth_5_15cm", models.JSONField(blank=True, help_text="دادههای لایه ۵–۱۵ سانتیمتر از API SoilGrids", null=True)),
|
||||
("depth_15_30cm", models.JSONField(blank=True, help_text="دادههای لایه ۱۵–۳۰ سانتیمتر از API SoilGrids", null=True)),
|
||||
("task_id", models.CharField(blank=True, help_text="شناسه تسک Celery در حال پردازش", max_length=255)),
|
||||
("created_at", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
],
|
||||
options={
|
||||
"ordering": ["-updated_at"],
|
||||
},
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="soillocation",
|
||||
constraint=models.UniqueConstraint(fields=("latitude", "longitude"), name="soil_location_unique_lat_lon"),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,77 @@
|
||||
# Generated manually: refactor to SoilDepthData table
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("location_data", "0001_initial"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="SoilDepthData",
|
||||
fields=[
|
||||
("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
|
||||
(
|
||||
"depth_label",
|
||||
models.CharField(
|
||||
choices=[
|
||||
("0-5cm", "۰–۵ سانتیمتر"),
|
||||
("5-15cm", "۵–۱۵ سانتیمتر"),
|
||||
("15-30cm", "۱۵–۳۰ سانتیمتر"),
|
||||
],
|
||||
db_index=True,
|
||||
max_length=10,
|
||||
),
|
||||
),
|
||||
("bdod", models.FloatField(blank=True, null=True)),
|
||||
("cec", models.FloatField(blank=True, null=True)),
|
||||
("cfvo", models.FloatField(blank=True, null=True)),
|
||||
("clay", models.FloatField(blank=True, null=True)),
|
||||
("nitrogen", models.FloatField(blank=True, null=True)),
|
||||
("ocd", models.FloatField(blank=True, null=True)),
|
||||
("ocs", models.FloatField(blank=True, null=True)),
|
||||
("phh2o", models.FloatField(blank=True, null=True)),
|
||||
("sand", models.FloatField(blank=True, null=True)),
|
||||
("silt", models.FloatField(blank=True, null=True)),
|
||||
("soc", models.FloatField(blank=True, null=True)),
|
||||
("wv0010", models.FloatField(blank=True, null=True)),
|
||||
("wv0033", models.FloatField(blank=True, null=True)),
|
||||
("wv1500", models.FloatField(blank=True, null=True)),
|
||||
("created_at", models.DateTimeField(auto_now_add=True)),
|
||||
(
|
||||
"soil_location",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="depths",
|
||||
to="location_data.soillocation",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"ordering": ["soil_location", "depth_label"],
|
||||
},
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="soildepthdata",
|
||||
constraint=models.UniqueConstraint(
|
||||
fields=("soil_location", "depth_label"),
|
||||
name="soil_depth_unique_location_depth",
|
||||
),
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="soillocation",
|
||||
name="depth_0_5cm",
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="soillocation",
|
||||
name="depth_5_15cm",
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="soillocation",
|
||||
name="depth_15_30cm",
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,23 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("location_data", "0001_initial"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="soillocation",
|
||||
name="ideal_sensor_profile",
|
||||
field=models.JSONField(
|
||||
blank=True,
|
||||
default=dict,
|
||||
help_text=(
|
||||
"پروفایل ایدهآل سنسورها برای این مزرعه/لوکیشن. "
|
||||
'نمونه: {"moisture": {"ideal": 0.65, "min": 0.50, "max": 0.80}}'
|
||||
),
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,17 @@
|
||||
from django.db import migrations
|
||||
from django.db import migrations
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
"""
|
||||
نشانگر تغییر اپ label از soil_data به location_data.
|
||||
پیش از اجرای این migration، دستور زیر را اجرا کنید:
|
||||
python manage.py rename_soil_data_label
|
||||
"""
|
||||
|
||||
dependencies = [
|
||||
("location_data", "0002_soildepthdata_refactor"),
|
||||
]
|
||||
|
||||
operations = []
|
||||
@@ -0,0 +1,23 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("location_data", "0003_rename_app_label"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="soillocation",
|
||||
name="farm_boundary",
|
||||
field=models.JSONField(
|
||||
blank=True,
|
||||
default=dict,
|
||||
help_text=(
|
||||
"مرز مزرعه برای درخواستهای سنجشازدور. "
|
||||
'میتواند GeoJSON polygon یا bbox مثل {"type": "Polygon", "coordinates": [...]} باشد.'
|
||||
),
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,14 @@
|
||||
# Generated by Django 5.1.15 on 2026-03-27 08:40
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('location_data', '0002_soillocation_ideal_sensor_profile'),
|
||||
('location_data', '0004_soillocation_farm_boundary'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
]
|
||||
@@ -0,0 +1,15 @@
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("location_data", "0005_merge_20260327_0840"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name="soillocation",
|
||||
name="ideal_sensor_profile",
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,46 @@
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("location_data", "0006_remove_soillocation_ideal_sensor_profile"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="NdviObservation",
|
||||
fields=[
|
||||
("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
|
||||
("observation_date", models.DateField(db_index=True)),
|
||||
("mean_ndvi", models.FloatField()),
|
||||
("ndvi_map", models.JSONField(blank=True, default=dict)),
|
||||
("vegetation_health_class", models.CharField(max_length=64)),
|
||||
("satellite_source", models.CharField(default="sentinel-2", max_length=64)),
|
||||
("cloud_cover", models.FloatField(blank=True, null=True)),
|
||||
("metadata", models.JSONField(blank=True, default=dict)),
|
||||
("created_at", models.DateTimeField(auto_now_add=True, db_index=True)),
|
||||
(
|
||||
"location",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="ndvi_observations",
|
||||
to="location_data.soillocation",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "NDVI Observation",
|
||||
"verbose_name_plural": "NDVI Observations",
|
||||
"db_table": "dashboard_data_ndviobservation",
|
||||
"ordering": ["-observation_date", "-created_at"],
|
||||
"constraints": [
|
||||
models.UniqueConstraint(
|
||||
fields=("location", "observation_date", "satellite_source"),
|
||||
name="ndvi_unique_location_date_source",
|
||||
),
|
||||
],
|
||||
},
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,45 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
def build_default_layout():
|
||||
return {
|
||||
"input_block_count": 1,
|
||||
"default_full_farm": True,
|
||||
"algorithm_status": "pending",
|
||||
"blocks": [
|
||||
{
|
||||
"block_code": "block-1",
|
||||
"order": 1,
|
||||
"source": "default",
|
||||
"needs_subdivision": None,
|
||||
"sub_blocks": [],
|
||||
}
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("location_data", "0007_ndviobservation"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="soillocation",
|
||||
name="block_layout",
|
||||
field=models.JSONField(
|
||||
blank=True,
|
||||
default=build_default_layout,
|
||||
help_text="ساختار بلوکهای زمین. بهصورت پیشفرض کل زمین یک بلوک است و بعداً الگوریتم میتواند برای هر بلوک زیربلوک تعریف کند.",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="soillocation",
|
||||
name="input_block_count",
|
||||
field=models.PositiveIntegerField(
|
||||
default=1,
|
||||
help_text="تعداد بلوکهای اولیهای که کشاورز برای زمین ثبت میکند.",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,38 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("location_data", "0008_soillocation_block_layout"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="BlockSubdivision",
|
||||
fields=[
|
||||
("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
|
||||
("block_code", models.CharField(help_text="شناسه بلوکی که این خردسازی برای آن انجام شده است.", max_length=64)),
|
||||
("source_boundary", models.JSONField(blank=True, default=dict, help_text="مرز همان بلوکی که به سرویس subdivision داده شده است.")),
|
||||
("chunk_size_sqm", models.PositiveIntegerField(default=100, help_text="اندازه هر chunk به متر مربع.")),
|
||||
("grid_points", models.JSONField(blank=True, default=list, help_text="نقاط اولیه شبکه داخل مرز بلوک.")),
|
||||
("centroid_points", models.JSONField(blank=True, default=list, help_text="مراکز نهایی بخشهای خردشده.")),
|
||||
("grid_point_count", models.PositiveIntegerField(default=0)),
|
||||
("centroid_count", models.PositiveIntegerField(default=0)),
|
||||
("status", models.CharField(default="created", help_text="وضعیت تولید subdivision برای این بلوک.", max_length=32)),
|
||||
("metadata", models.JSONField(blank=True, default=dict)),
|
||||
("created_at", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
("soil_location", models.ForeignKey(on_delete=models.deletion.CASCADE, related_name="block_subdivisions", to="location_data.soillocation")),
|
||||
],
|
||||
options={
|
||||
"ordering": ["soil_location", "block_code", "-updated_at"],
|
||||
"verbose_name": "خردسازی بلوک",
|
||||
"verbose_name_plural": "خردسازی بلوکها",
|
||||
},
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="blocksubdivision",
|
||||
constraint=models.UniqueConstraint(fields=("soil_location", "block_code"), name="location_block_subdivision_unique_location_block_code"),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,21 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("location_data", "0009_blocksubdivision"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="blocksubdivision",
|
||||
name="elbow_plot",
|
||||
field=models.ImageField(
|
||||
blank=True,
|
||||
help_text="تصویر نمودار elbow برای انتخاب تعداد بهینه خوشهها.",
|
||||
null=True,
|
||||
upload_to="location_data/elbow_plots/",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,110 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("location_data", "0010_blocksubdivision_elbow_plot"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="AnalysisGridCell",
|
||||
fields=[
|
||||
("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
|
||||
("block_code", models.CharField(blank=True, db_index=True, default="", help_text="شناسه بلوکی که این سلول به آن تعلق دارد.", max_length=64)),
|
||||
("cell_code", models.CharField(help_text="شناسه یکتای سلول تحلیل.", max_length=128, unique=True)),
|
||||
("chunk_size_sqm", models.PositiveIntegerField(db_index=True, default=900, help_text="اندازه سلول تحلیل به متر مربع.")),
|
||||
("geometry", models.JSONField(blank=True, default=dict, help_text="هندسه سلول به صورت GeoJSON polygon یا ساختار مشابه.")),
|
||||
("centroid_lat", models.DecimalField(db_index=True, decimal_places=6, help_text="عرض جغرافیایی مرکز سلول.", max_digits=9)),
|
||||
("centroid_lon", models.DecimalField(db_index=True, decimal_places=6, help_text="طول جغرافیایی مرکز سلول.", max_digits=9)),
|
||||
("created_at", models.DateTimeField(auto_now_add=True, db_index=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
("block_subdivision", models.ForeignKey(blank=True, null=True, on_delete=models.deletion.SET_NULL, related_name="analysis_grid_cells", to="location_data.blocksubdivision")),
|
||||
("soil_location", models.ForeignKey(on_delete=models.deletion.CASCADE, related_name="analysis_grid_cells", to="location_data.soillocation")),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "analysis grid cell",
|
||||
"verbose_name_plural": "analysis grid cells",
|
||||
"ordering": ["soil_location", "block_code", "cell_code"],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="RemoteSensingRun",
|
||||
fields=[
|
||||
("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
|
||||
("block_code", models.CharField(blank=True, db_index=True, default="", help_text="شناسه بلوکی که این run برای آن اجرا شده است.", max_length=64)),
|
||||
("provider", models.CharField(default="openeo", help_text="ارائهدهنده داده سنجشازدور.", max_length=64)),
|
||||
("chunk_size_sqm", models.PositiveIntegerField(default=900, help_text="اندازه هر سلول تحلیل به متر مربع.")),
|
||||
("temporal_start", models.DateField(blank=True, null=True)),
|
||||
("temporal_end", models.DateField(blank=True, null=True)),
|
||||
("status", models.CharField(choices=[("pending", "Pending"), ("running", "Running"), ("success", "Success"), ("failure", "Failure")], db_index=True, default="pending", max_length=16)),
|
||||
("metadata", models.JSONField(blank=True, default=dict)),
|
||||
("error_message", models.TextField(blank=True, default="")),
|
||||
("started_at", models.DateTimeField(blank=True, null=True)),
|
||||
("finished_at", models.DateTimeField(blank=True, null=True)),
|
||||
("created_at", models.DateTimeField(auto_now_add=True, db_index=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
("block_subdivision", models.ForeignKey(blank=True, null=True, on_delete=models.deletion.SET_NULL, related_name="remote_sensing_runs", to="location_data.blocksubdivision")),
|
||||
("soil_location", models.ForeignKey(on_delete=models.deletion.CASCADE, related_name="remote_sensing_runs", to="location_data.soillocation")),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "remote sensing run",
|
||||
"verbose_name_plural": "remote sensing runs",
|
||||
"ordering": ["-created_at", "-id"],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="AnalysisGridObservation",
|
||||
fields=[
|
||||
("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
|
||||
("temporal_start", models.DateField(db_index=True)),
|
||||
("temporal_end", models.DateField(db_index=True)),
|
||||
("ndvi", models.FloatField(blank=True, null=True)),
|
||||
("ndwi", models.FloatField(blank=True, null=True)),
|
||||
("lst_c", models.FloatField(blank=True, null=True)),
|
||||
("soil_vv", models.FloatField(blank=True, null=True)),
|
||||
("soil_vv_db", models.FloatField(blank=True, null=True)),
|
||||
("dem_m", models.FloatField(blank=True, null=True)),
|
||||
("slope_deg", models.FloatField(blank=True, null=True)),
|
||||
("metadata", models.JSONField(blank=True, default=dict)),
|
||||
("created_at", models.DateTimeField(auto_now_add=True, db_index=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
("cell", models.ForeignKey(on_delete=models.deletion.CASCADE, related_name="observations", to="location_data.analysisgridcell")),
|
||||
("run", models.ForeignKey(blank=True, null=True, on_delete=models.deletion.SET_NULL, related_name="observations", to="location_data.remotesensingrun")),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "analysis grid observation",
|
||||
"verbose_name_plural": "analysis grid observations",
|
||||
"ordering": ["-temporal_start", "-temporal_end", "-id"],
|
||||
},
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="analysisgridcell",
|
||||
index=models.Index(fields=["soil_location", "block_code"], name="grid_cell_loc_block_idx"),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="analysisgridcell",
|
||||
index=models.Index(fields=["soil_location", "chunk_size_sqm"], name="grid_cell_loc_chunk_idx"),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="remotesensingrun",
|
||||
index=models.Index(fields=["soil_location", "status", "created_at"], name="rs_run_loc_status_created_idx"),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="remotesensingrun",
|
||||
index=models.Index(fields=["block_code", "created_at"], name="rs_run_block_created_idx"),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="analysisgridobservation",
|
||||
constraint=models.UniqueConstraint(fields=("cell", "temporal_start", "temporal_end"), name="grid_obs_unique_cell_temporal_range"),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="analysisgridobservation",
|
||||
index=models.Index(fields=["cell", "temporal_start", "temporal_end"], name="grid_obs_cell_temporal_idx"),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="analysisgridobservation",
|
||||
index=models.Index(fields=["temporal_start", "temporal_end"], name="grid_obs_temporal_idx"),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,65 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("location_data", "0011_remote_sensing_models"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="RemoteSensingSubdivisionResult",
|
||||
fields=[
|
||||
("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
|
||||
("block_code", models.CharField(blank=True, db_index=True, default="", max_length=64)),
|
||||
("chunk_size_sqm", models.PositiveIntegerField(default=900)),
|
||||
("temporal_start", models.DateField(db_index=True)),
|
||||
("temporal_end", models.DateField(db_index=True)),
|
||||
("cluster_count", models.PositiveIntegerField(default=0)),
|
||||
("selected_features", models.JSONField(blank=True, default=list)),
|
||||
("skipped_cell_codes", models.JSONField(blank=True, default=list)),
|
||||
("metadata", models.JSONField(blank=True, default=dict)),
|
||||
("created_at", models.DateTimeField(auto_now_add=True, db_index=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
("block_subdivision", models.ForeignKey(blank=True, null=True, on_delete=models.deletion.SET_NULL, related_name="remote_sensing_subdivision_results", to="location_data.blocksubdivision")),
|
||||
("run", models.OneToOneField(on_delete=models.deletion.CASCADE, related_name="subdivision_result", to="location_data.remotesensingrun")),
|
||||
("soil_location", models.ForeignKey(on_delete=models.deletion.CASCADE, related_name="remote_sensing_subdivision_results", to="location_data.soillocation")),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "remote sensing subdivision result",
|
||||
"verbose_name_plural": "remote sensing subdivision results",
|
||||
"ordering": ["-created_at", "-id"],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="RemoteSensingClusterAssignment",
|
||||
fields=[
|
||||
("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
|
||||
("cluster_label", models.PositiveIntegerField(db_index=True)),
|
||||
("raw_feature_values", models.JSONField(blank=True, default=dict)),
|
||||
("scaled_feature_values", models.JSONField(blank=True, default=dict)),
|
||||
("created_at", models.DateTimeField(auto_now_add=True, db_index=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
("cell", models.ForeignKey(on_delete=models.deletion.CASCADE, related_name="cluster_assignments", to="location_data.analysisgridcell")),
|
||||
("result", models.ForeignKey(on_delete=models.deletion.CASCADE, related_name="assignments", to="location_data.remotesensingsubdivisionresult")),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "remote sensing cluster assignment",
|
||||
"verbose_name_plural": "remote sensing cluster assignments",
|
||||
"ordering": ["cluster_label", "cell__cell_code"],
|
||||
},
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="remotesensingsubdivisionresult",
|
||||
index=models.Index(fields=["soil_location", "block_code", "temporal_start", "temporal_end"], name="rs_subdiv_result_lookup_idx"),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="remotesensingclusterassignment",
|
||||
constraint=models.UniqueConstraint(fields=("result", "cell"), name="rs_cluster_assign_unique_result_cell"),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="remotesensingclusterassignment",
|
||||
index=models.Index(fields=["result", "cluster_label"], name="rs_cluster_assign_result_label_idx"),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,14 @@
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("location_data", "0012_remote_sensing_subdivision_models"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.DeleteModel(
|
||||
name="SoilDepthData",
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,523 @@
|
||||
from django.db import models
|
||||
|
||||
|
||||
def build_block_layout(block_count: int = 1, blocks: list[dict] | None = None) -> dict:
|
||||
normalized_blocks = []
|
||||
if blocks:
|
||||
for index, block in enumerate(blocks):
|
||||
normalized_blocks.append(
|
||||
{
|
||||
"block_code": str(block.get("block_code") or f"block-{index + 1}").strip(),
|
||||
"order": int(block.get("order") or index + 1),
|
||||
"source": "input",
|
||||
"boundary": block.get("boundary") or {},
|
||||
"needs_subdivision": None,
|
||||
"sub_blocks": [],
|
||||
}
|
||||
)
|
||||
else:
|
||||
normalized_count = max(int(block_count or 1), 1)
|
||||
for index in range(normalized_count):
|
||||
normalized_blocks.append(
|
||||
{
|
||||
"block_code": f"block-{index + 1}",
|
||||
"order": index + 1,
|
||||
"source": "input" if normalized_count > 1 else "default",
|
||||
"boundary": {},
|
||||
"needs_subdivision": None,
|
||||
"sub_blocks": [],
|
||||
}
|
||||
)
|
||||
|
||||
normalized_count = len(normalized_blocks) if normalized_blocks else max(int(block_count or 1), 1)
|
||||
|
||||
return {
|
||||
"input_block_count": normalized_count,
|
||||
"default_full_farm": normalized_count == 1,
|
||||
"algorithm_status": "pending",
|
||||
"blocks": normalized_blocks,
|
||||
}
|
||||
|
||||
|
||||
class SoilLocation(models.Model):
|
||||
"""
|
||||
مرکز زمین و مرز مزرعه/بلوکهای تعریفشده توسط کشاورز.
|
||||
"""
|
||||
|
||||
latitude = models.DecimalField(
|
||||
max_digits=9,
|
||||
decimal_places=6,
|
||||
db_index=True,
|
||||
help_text="عرض جغرافیایی مرکز زمین (lat)",
|
||||
)
|
||||
longitude = models.DecimalField(
|
||||
max_digits=9,
|
||||
decimal_places=6,
|
||||
db_index=True,
|
||||
help_text="طول جغرافیایی مرکز زمین (lon)",
|
||||
)
|
||||
task_id = models.CharField(
|
||||
max_length=255,
|
||||
blank=True,
|
||||
help_text="شناسه تسک Celery در حال پردازش",
|
||||
)
|
||||
|
||||
farm_boundary = models.JSONField(
|
||||
default=dict,
|
||||
blank=True,
|
||||
help_text=(
|
||||
"مرز مزرعه برای درخواستهای سنجشازدور. "
|
||||
'میتواند GeoJSON polygon یا bbox مثل {"type": "Polygon", "coordinates": [...]} باشد.'
|
||||
),
|
||||
)
|
||||
input_block_count = models.PositiveIntegerField(
|
||||
default=1,
|
||||
help_text="تعداد بلوکهای اولیهای که کشاورز برای زمین ثبت میکند.",
|
||||
)
|
||||
block_layout = models.JSONField(
|
||||
default=build_block_layout,
|
||||
blank=True,
|
||||
help_text=(
|
||||
"ساختار بلوکهای زمین. بهصورت پیشفرض کل زمین یک بلوک است و "
|
||||
"بعداً الگوریتم میتواند برای هر بلوک زیربلوک تعریف کند."
|
||||
),
|
||||
)
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
updated_at = models.DateTimeField(auto_now=True)
|
||||
|
||||
class Meta:
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=["latitude", "longitude"],
|
||||
name="soil_location_unique_lat_lon",
|
||||
)
|
||||
]
|
||||
ordering = ["-updated_at"]
|
||||
verbose_name = "مرکز زمین"
|
||||
verbose_name_plural = "مراکز زمین"
|
||||
|
||||
def __str__(self):
|
||||
return f"SoilLocation({self.latitude}, {self.longitude})"
|
||||
|
||||
@property
|
||||
def center_latitude(self):
|
||||
return self.latitude
|
||||
|
||||
@property
|
||||
def center_longitude(self):
|
||||
return self.longitude
|
||||
|
||||
@property
|
||||
def is_complete(self):
|
||||
"""آیا حداقل یک run کامل remote sensing برای این location وجود دارد؟"""
|
||||
return self.remote_sensing_runs.filter(status="success").exists()
|
||||
|
||||
def set_input_block_count(self, block_count: int = 1, blocks: list[dict] | None = None):
|
||||
normalized_count = len(blocks) if blocks else max(int(block_count or 1), 1)
|
||||
self.input_block_count = normalized_count
|
||||
self.block_layout = build_block_layout(normalized_count, blocks=blocks)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
if not self.input_block_count:
|
||||
self.input_block_count = 1
|
||||
if not self.block_layout:
|
||||
self.block_layout = build_block_layout(self.input_block_count)
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
|
||||
class BlockSubdivision(models.Model):
|
||||
"""
|
||||
نتیجه خردسازی یک بلوک برای یک SoilLocation.
|
||||
grid_points نقاط اولیه شبکه هستند و centroid_points مراکز نهایی بخشها.
|
||||
"""
|
||||
|
||||
soil_location = models.ForeignKey(
|
||||
SoilLocation,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="block_subdivisions",
|
||||
)
|
||||
block_code = models.CharField(
|
||||
max_length=64,
|
||||
help_text="شناسه بلوکی که این خردسازی برای آن انجام شده است.",
|
||||
)
|
||||
source_boundary = models.JSONField(
|
||||
default=dict,
|
||||
blank=True,
|
||||
help_text="مرز همان بلوکی که به سرویس subdivision داده شده است.",
|
||||
)
|
||||
chunk_size_sqm = models.PositiveIntegerField(
|
||||
default=100,
|
||||
help_text="اندازه هر chunk به متر مربع.",
|
||||
)
|
||||
grid_points = models.JSONField(
|
||||
default=list,
|
||||
blank=True,
|
||||
help_text="نقاط اولیه شبکه داخل مرز بلوک.",
|
||||
)
|
||||
centroid_points = models.JSONField(
|
||||
default=list,
|
||||
blank=True,
|
||||
help_text="مراکز نهایی بخشهای خردشده.",
|
||||
)
|
||||
grid_point_count = models.PositiveIntegerField(default=0)
|
||||
centroid_count = models.PositiveIntegerField(default=0)
|
||||
elbow_plot = models.ImageField(
|
||||
upload_to="location_data/elbow_plots/",
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text="تصویر نمودار elbow برای انتخاب تعداد بهینه خوشهها.",
|
||||
)
|
||||
status = models.CharField(
|
||||
max_length=32,
|
||||
default="created",
|
||||
help_text="وضعیت تولید subdivision برای این بلوک.",
|
||||
)
|
||||
metadata = models.JSONField(default=dict, blank=True)
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
updated_at = models.DateTimeField(auto_now=True)
|
||||
|
||||
class Meta:
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=["soil_location", "block_code"],
|
||||
name="location_block_subdivision_unique_location_block_code",
|
||||
)
|
||||
]
|
||||
ordering = ["soil_location", "block_code", "-updated_at"]
|
||||
verbose_name = "خردسازی بلوک"
|
||||
verbose_name_plural = "خردسازی بلوکها"
|
||||
|
||||
def __str__(self):
|
||||
return f"BlockSubdivision({self.soil_location_id}, {self.block_code})"
|
||||
|
||||
|
||||
class RemoteSensingRun(models.Model):
|
||||
STATUS_PENDING = "pending"
|
||||
STATUS_RUNNING = "running"
|
||||
STATUS_SUCCESS = "success"
|
||||
STATUS_FAILURE = "failure"
|
||||
STATUS_CHOICES = [
|
||||
(STATUS_PENDING, "Pending"),
|
||||
(STATUS_RUNNING, "Running"),
|
||||
(STATUS_SUCCESS, "Success"),
|
||||
(STATUS_FAILURE, "Failure"),
|
||||
]
|
||||
|
||||
soil_location = models.ForeignKey(
|
||||
SoilLocation,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="remote_sensing_runs",
|
||||
)
|
||||
block_subdivision = models.ForeignKey(
|
||||
BlockSubdivision,
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name="remote_sensing_runs",
|
||||
)
|
||||
block_code = models.CharField(
|
||||
max_length=64,
|
||||
blank=True,
|
||||
default="",
|
||||
db_index=True,
|
||||
help_text="شناسه بلوکی که این run برای آن اجرا شده است.",
|
||||
)
|
||||
provider = models.CharField(
|
||||
max_length=64,
|
||||
default="openeo",
|
||||
help_text="ارائهدهنده داده سنجشازدور.",
|
||||
)
|
||||
chunk_size_sqm = models.PositiveIntegerField(
|
||||
default=900,
|
||||
help_text="اندازه هر سلول تحلیل به متر مربع.",
|
||||
)
|
||||
temporal_start = models.DateField(null=True, blank=True)
|
||||
temporal_end = models.DateField(null=True, blank=True)
|
||||
status = models.CharField(
|
||||
max_length=16,
|
||||
choices=STATUS_CHOICES,
|
||||
default=STATUS_PENDING,
|
||||
db_index=True,
|
||||
)
|
||||
metadata = models.JSONField(default=dict, blank=True)
|
||||
error_message = models.TextField(blank=True, default="")
|
||||
started_at = models.DateTimeField(null=True, blank=True)
|
||||
finished_at = models.DateTimeField(null=True, blank=True)
|
||||
created_at = models.DateTimeField(auto_now_add=True, db_index=True)
|
||||
updated_at = models.DateTimeField(auto_now=True)
|
||||
|
||||
class Meta:
|
||||
ordering = ["-created_at", "-id"]
|
||||
indexes = [
|
||||
models.Index(
|
||||
fields=["soil_location", "status", "created_at"],
|
||||
name="rs_run_loc_status_created_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["block_code", "created_at"],
|
||||
name="rs_run_block_created_idx",
|
||||
),
|
||||
]
|
||||
verbose_name = "remote sensing run"
|
||||
verbose_name_plural = "remote sensing runs"
|
||||
|
||||
def __str__(self):
|
||||
block_text = self.block_code or "farm"
|
||||
return f"RemoteSensingRun({self.soil_location_id}, {block_text}, {self.status})"
|
||||
|
||||
@property
|
||||
def normalized_status(self) -> str:
|
||||
"""
|
||||
Return the client-facing lifecycle status while keeping legacy DB values stable.
|
||||
"""
|
||||
if self.status == self.STATUS_SUCCESS:
|
||||
return "completed"
|
||||
if self.status == self.STATUS_FAILURE:
|
||||
return "failed"
|
||||
return self.status
|
||||
|
||||
|
||||
class AnalysisGridCell(models.Model):
|
||||
soil_location = models.ForeignKey(
|
||||
SoilLocation,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="analysis_grid_cells",
|
||||
)
|
||||
block_subdivision = models.ForeignKey(
|
||||
BlockSubdivision,
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name="analysis_grid_cells",
|
||||
)
|
||||
block_code = models.CharField(
|
||||
max_length=64,
|
||||
blank=True,
|
||||
default="",
|
||||
db_index=True,
|
||||
help_text="شناسه بلوکی که این سلول به آن تعلق دارد.",
|
||||
)
|
||||
cell_code = models.CharField(
|
||||
max_length=128,
|
||||
unique=True,
|
||||
help_text="شناسه یکتای سلول تحلیل.",
|
||||
)
|
||||
chunk_size_sqm = models.PositiveIntegerField(
|
||||
default=900,
|
||||
db_index=True,
|
||||
help_text="اندازه سلول تحلیل به متر مربع.",
|
||||
)
|
||||
geometry = models.JSONField(
|
||||
default=dict,
|
||||
blank=True,
|
||||
help_text="هندسه سلول به صورت GeoJSON polygon یا ساختار مشابه.",
|
||||
)
|
||||
centroid_lat = models.DecimalField(
|
||||
max_digits=9,
|
||||
decimal_places=6,
|
||||
db_index=True,
|
||||
help_text="عرض جغرافیایی مرکز سلول.",
|
||||
)
|
||||
centroid_lon = models.DecimalField(
|
||||
max_digits=9,
|
||||
decimal_places=6,
|
||||
db_index=True,
|
||||
help_text="طول جغرافیایی مرکز سلول.",
|
||||
)
|
||||
created_at = models.DateTimeField(auto_now_add=True, db_index=True)
|
||||
updated_at = models.DateTimeField(auto_now=True)
|
||||
|
||||
class Meta:
|
||||
ordering = ["soil_location", "block_code", "cell_code"]
|
||||
indexes = [
|
||||
models.Index(
|
||||
fields=["soil_location", "block_code"],
|
||||
name="grid_cell_loc_block_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["soil_location", "chunk_size_sqm"],
|
||||
name="grid_cell_loc_chunk_idx",
|
||||
),
|
||||
]
|
||||
verbose_name = "analysis grid cell"
|
||||
verbose_name_plural = "analysis grid cells"
|
||||
|
||||
def __str__(self):
|
||||
return f"AnalysisGridCell({self.cell_code})"
|
||||
|
||||
|
||||
class AnalysisGridObservation(models.Model):
|
||||
cell = models.ForeignKey(
|
||||
AnalysisGridCell,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="observations",
|
||||
)
|
||||
run = models.ForeignKey(
|
||||
RemoteSensingRun,
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name="observations",
|
||||
)
|
||||
temporal_start = models.DateField(db_index=True)
|
||||
temporal_end = models.DateField(db_index=True)
|
||||
ndvi = models.FloatField(null=True, blank=True)
|
||||
ndwi = models.FloatField(null=True, blank=True)
|
||||
lst_c = models.FloatField(null=True, blank=True)
|
||||
soil_vv = models.FloatField(null=True, blank=True)
|
||||
soil_vv_db = models.FloatField(null=True, blank=True)
|
||||
dem_m = models.FloatField(null=True, blank=True)
|
||||
slope_deg = models.FloatField(null=True, blank=True)
|
||||
metadata = models.JSONField(default=dict, blank=True)
|
||||
created_at = models.DateTimeField(auto_now_add=True, db_index=True)
|
||||
updated_at = models.DateTimeField(auto_now=True)
|
||||
|
||||
class Meta:
|
||||
ordering = ["-temporal_start", "-temporal_end", "-id"]
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=["cell", "temporal_start", "temporal_end"],
|
||||
name="grid_obs_unique_cell_temporal_range",
|
||||
)
|
||||
]
|
||||
indexes = [
|
||||
models.Index(
|
||||
fields=["cell", "temporal_start", "temporal_end"],
|
||||
name="grid_obs_cell_temporal_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["temporal_start", "temporal_end"],
|
||||
name="grid_obs_temporal_idx",
|
||||
),
|
||||
]
|
||||
verbose_name = "analysis grid observation"
|
||||
verbose_name_plural = "analysis grid observations"
|
||||
|
||||
def __str__(self):
|
||||
return (
|
||||
f"AnalysisGridObservation({self.cell_id}, "
|
||||
f"{self.temporal_start}, {self.temporal_end})"
|
||||
)
|
||||
|
||||
|
||||
class RemoteSensingSubdivisionResult(models.Model):
|
||||
soil_location = models.ForeignKey(
|
||||
SoilLocation,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="remote_sensing_subdivision_results",
|
||||
)
|
||||
run = models.OneToOneField(
|
||||
RemoteSensingRun,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="subdivision_result",
|
||||
)
|
||||
block_subdivision = models.ForeignKey(
|
||||
BlockSubdivision,
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name="remote_sensing_subdivision_results",
|
||||
)
|
||||
block_code = models.CharField(
|
||||
max_length=64,
|
||||
blank=True,
|
||||
default="",
|
||||
db_index=True,
|
||||
)
|
||||
chunk_size_sqm = models.PositiveIntegerField(default=900)
|
||||
temporal_start = models.DateField(db_index=True)
|
||||
temporal_end = models.DateField(db_index=True)
|
||||
cluster_count = models.PositiveIntegerField(default=0)
|
||||
selected_features = models.JSONField(default=list, blank=True)
|
||||
skipped_cell_codes = models.JSONField(default=list, blank=True)
|
||||
metadata = models.JSONField(default=dict, blank=True)
|
||||
created_at = models.DateTimeField(auto_now_add=True, db_index=True)
|
||||
updated_at = models.DateTimeField(auto_now=True)
|
||||
|
||||
class Meta:
|
||||
ordering = ["-created_at", "-id"]
|
||||
indexes = [
|
||||
models.Index(
|
||||
fields=["soil_location", "block_code", "temporal_start", "temporal_end"],
|
||||
name="rs_subdiv_result_lookup_idx",
|
||||
)
|
||||
]
|
||||
verbose_name = "remote sensing subdivision result"
|
||||
verbose_name_plural = "remote sensing subdivision results"
|
||||
|
||||
def __str__(self):
|
||||
return (
|
||||
f"RemoteSensingSubdivisionResult({self.soil_location_id}, "
|
||||
f"{self.block_code or 'farm'}, clusters={self.cluster_count})"
|
||||
)
|
||||
|
||||
|
||||
class RemoteSensingClusterAssignment(models.Model):
|
||||
result = models.ForeignKey(
|
||||
RemoteSensingSubdivisionResult,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="assignments",
|
||||
)
|
||||
cell = models.ForeignKey(
|
||||
AnalysisGridCell,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="cluster_assignments",
|
||||
)
|
||||
cluster_label = models.PositiveIntegerField(db_index=True)
|
||||
raw_feature_values = models.JSONField(default=dict, blank=True)
|
||||
scaled_feature_values = models.JSONField(default=dict, blank=True)
|
||||
created_at = models.DateTimeField(auto_now_add=True, db_index=True)
|
||||
updated_at = models.DateTimeField(auto_now=True)
|
||||
|
||||
class Meta:
|
||||
ordering = ["cluster_label", "cell__cell_code"]
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=["result", "cell"],
|
||||
name="rs_cluster_assign_unique_result_cell",
|
||||
)
|
||||
]
|
||||
indexes = [
|
||||
models.Index(
|
||||
fields=["result", "cluster_label"],
|
||||
name="rs_cluster_assign_result_label_idx",
|
||||
)
|
||||
]
|
||||
verbose_name = "remote sensing cluster assignment"
|
||||
verbose_name_plural = "remote sensing cluster assignments"
|
||||
|
||||
def __str__(self):
|
||||
return f"RemoteSensingClusterAssignment({self.result_id}, {self.cell_id}, {self.cluster_label})"
|
||||
|
||||
|
||||
|
||||
|
||||
class NdviObservation(models.Model):
|
||||
location = models.ForeignKey(
|
||||
SoilLocation,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="ndvi_observations",
|
||||
)
|
||||
observation_date = models.DateField(db_index=True)
|
||||
mean_ndvi = models.FloatField()
|
||||
ndvi_map = models.JSONField(default=dict, blank=True)
|
||||
vegetation_health_class = models.CharField(max_length=64)
|
||||
satellite_source = models.CharField(max_length=64, default="sentinel-2")
|
||||
cloud_cover = models.FloatField(null=True, blank=True)
|
||||
metadata = models.JSONField(default=dict, blank=True)
|
||||
created_at = models.DateTimeField(auto_now_add=True, db_index=True)
|
||||
|
||||
class Meta:
|
||||
db_table = "dashboard_data_ndviobservation"
|
||||
ordering = ["-observation_date", "-created_at"]
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=["location", "observation_date", "satellite_source"],
|
||||
name="ndvi_unique_location_date_source",
|
||||
)
|
||||
]
|
||||
verbose_name = "NDVI Observation"
|
||||
verbose_name_plural = "NDVI Observations"
|
||||
|
||||
def __str__(self):
|
||||
return f"NDVI {self.location_id} {self.observation_date} {self.satellite_source}"
|
||||
@@ -0,0 +1,92 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from farm_data.models import SensorData
|
||||
from .remote_sensing import fetch_or_get_ndvi_observation
|
||||
|
||||
|
||||
def _ndvi_explanation(observation, ai_bundle: dict | None = None) -> str:
|
||||
ai_bundle = ai_bundle or {}
|
||||
ai_payload = ai_bundle.get("ndviHealthCard", {}) if isinstance(ai_bundle, dict) else {}
|
||||
explanation = ai_payload.get("explanation")
|
||||
if isinstance(explanation, str) and explanation.strip():
|
||||
return explanation.strip()
|
||||
return (
|
||||
f"میانگین NDVI مزرعه {observation.mean_ndvi} ثبت شده و کلاس سلامت پوشش گیاهی "
|
||||
f"در وضعیت {observation.vegetation_health_class} قرار دارد."
|
||||
)
|
||||
|
||||
|
||||
def _build_ndvi_health_card(location: Any, ai_bundle: dict | None = None) -> dict[str, Any]:
|
||||
if location is None:
|
||||
return {
|
||||
"mean_ndvi": None,
|
||||
"ndvi_map": {},
|
||||
"vegetation_health_class": None,
|
||||
"observation_date": None,
|
||||
"satellite_source": None,
|
||||
"healthData": [],
|
||||
}
|
||||
|
||||
observation = fetch_or_get_ndvi_observation(location)
|
||||
if observation is None:
|
||||
return {
|
||||
"mean_ndvi": None,
|
||||
"ndvi_map": {},
|
||||
"vegetation_health_class": "Unavailable",
|
||||
"observation_date": None,
|
||||
"satellite_source": None,
|
||||
"healthData": [
|
||||
{
|
||||
"title": "وضعیت NDVI",
|
||||
"value": "داده ماهوارهای موجود نیست",
|
||||
"color": "warning",
|
||||
"icon": "tabler-satellite-off",
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
mean_value = round(observation.mean_ndvi, 2)
|
||||
vegetation_class = observation.vegetation_health_class
|
||||
return {
|
||||
"ndviIndex": mean_value,
|
||||
"mean_ndvi": mean_value,
|
||||
"ndvi_map": observation.ndvi_map,
|
||||
"vegetation_health_class": vegetation_class,
|
||||
"observation_date": observation.observation_date.isoformat(),
|
||||
"satellite_source": observation.satellite_source,
|
||||
"healthData": [
|
||||
{
|
||||
"title": "سلامت پوشش گیاهی",
|
||||
"value": vegetation_class,
|
||||
"color": "success" if mean_value > 0.6 else "warning" if mean_value >= 0.4 else "error",
|
||||
"icon": "tabler-plant",
|
||||
},
|
||||
{
|
||||
"title": "تاریخ مشاهده",
|
||||
"value": observation.observation_date.isoformat(),
|
||||
"color": "info",
|
||||
"icon": "tabler-calendar",
|
||||
},
|
||||
{
|
||||
"title": "تفسیر",
|
||||
"value": _ndvi_explanation(observation, ai_bundle=ai_bundle),
|
||||
"color": "primary",
|
||||
"icon": "tabler-message-2",
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
class NdviHealthService:
|
||||
def get_ndvi_health(self, *, farm_uuid: str) -> dict[str, Any]:
|
||||
sensor = (
|
||||
SensorData.objects.select_related("center_location")
|
||||
.filter(farm_uuid=farm_uuid)
|
||||
.first()
|
||||
)
|
||||
if sensor is None:
|
||||
raise ValueError("Farm not found.")
|
||||
|
||||
return _build_ndvi_health_card(sensor.center_location, ai_bundle=None)
|
||||
@@ -0,0 +1,476 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import math
|
||||
import os
|
||||
from dataclasses import dataclass
|
||||
from datetime import date
|
||||
from decimal import Decimal
|
||||
from typing import Any
|
||||
|
||||
from .models import AnalysisGridCell
|
||||
|
||||
|
||||
DEFAULT_OPENEO_BACKEND_URL = "https://openeofed.dataspace.copernicus.eu"
|
||||
DEFAULT_OPENEO_PROVIDER = "openeo"
|
||||
|
||||
SENTINEL2_COLLECTION = "SENTINEL2_L2A"
|
||||
SENTINEL3_LST_COLLECTION = "SENTINEL3_SLSTR_L2_LST"
|
||||
SENTINEL1_COLLECTION = "SENTINEL1_GRD"
|
||||
COPERNICUS_DEM_COLLECTION = "COPERNICUS_30"
|
||||
|
||||
VALID_SCL_CLASSES = (4, 5, 6)
|
||||
METRIC_NAMES = (
|
||||
"ndvi",
|
||||
"ndwi",
|
||||
"lst_c",
|
||||
"soil_vv",
|
||||
"soil_vv_db",
|
||||
"dem_m",
|
||||
"slope_deg",
|
||||
)
|
||||
|
||||
|
||||
class OpenEOServiceError(Exception):
|
||||
"""Base exception for openEO service failures."""
|
||||
|
||||
|
||||
class OpenEOAuthenticationError(OpenEOServiceError):
|
||||
"""Raised when authentication with the openEO backend fails."""
|
||||
|
||||
|
||||
class OpenEOExecutionError(OpenEOServiceError):
|
||||
"""Raised when a metric process graph can not be executed successfully."""
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class OpenEOConnectionSettings:
|
||||
backend_url: str = DEFAULT_OPENEO_BACKEND_URL
|
||||
auth_method: str = "client_credentials"
|
||||
client_id: str = ""
|
||||
client_secret: str = ""
|
||||
provider_id: str = ""
|
||||
username: str = ""
|
||||
password: str = ""
|
||||
allow_interactive_oidc: bool = False
|
||||
|
||||
@classmethod
|
||||
def from_env(cls) -> "OpenEOConnectionSettings":
|
||||
return cls(
|
||||
backend_url=os.environ.get("OPENEO_BACKEND_URL", DEFAULT_OPENEO_BACKEND_URL).strip(),
|
||||
auth_method=os.environ.get("OPENEO_AUTH_METHOD", "client_credentials").strip().lower(),
|
||||
client_id=os.environ.get("OPENEO_AUTH_CLIENT_ID", "").strip(),
|
||||
client_secret=os.environ.get("OPENEO_AUTH_CLIENT_SECRET", "").strip(),
|
||||
provider_id=os.environ.get("OPENEO_AUTH_PROVIDER_ID", "").strip(),
|
||||
username=os.environ.get("OPENEO_USERNAME", "").strip(),
|
||||
password=os.environ.get("OPENEO_PASSWORD", "").strip(),
|
||||
allow_interactive_oidc=os.environ.get("OPENEO_ALLOW_INTERACTIVE_OIDC", "0").strip().lower()
|
||||
in {"1", "true", "yes", "on"},
|
||||
)
|
||||
|
||||
|
||||
def connect_openeo(settings: OpenEOConnectionSettings | None = None):
|
||||
"""
|
||||
Build an authenticated openEO connection using environment-driven configuration.
|
||||
|
||||
Preferred authentication mode in production is OIDC client credentials.
|
||||
"""
|
||||
settings = settings or OpenEOConnectionSettings.from_env()
|
||||
try:
|
||||
import openeo
|
||||
except ImportError as exc: # pragma: no cover - runtime dependency guard
|
||||
raise OpenEOServiceError("The `openeo` Python client is required for remote sensing jobs.") from exc
|
||||
|
||||
connection = openeo.connect(settings.backend_url)
|
||||
try:
|
||||
if settings.auth_method == "client_credentials":
|
||||
if not settings.client_id or not settings.client_secret:
|
||||
raise OpenEOAuthenticationError(
|
||||
"OPENEO_AUTH_CLIENT_ID and OPENEO_AUTH_CLIENT_SECRET must be configured."
|
||||
)
|
||||
auth_kwargs = {
|
||||
"client_id": settings.client_id,
|
||||
"client_secret": settings.client_secret,
|
||||
}
|
||||
if settings.provider_id:
|
||||
auth_kwargs["provider_id"] = settings.provider_id
|
||||
return connection.authenticate_oidc_client_credentials(**auth_kwargs)
|
||||
|
||||
if settings.auth_method == "password":
|
||||
if not settings.username or not settings.password:
|
||||
raise OpenEOAuthenticationError(
|
||||
"OPENEO_USERNAME and OPENEO_PASSWORD must be configured for password auth."
|
||||
)
|
||||
auth_kwargs = {
|
||||
"username": settings.username,
|
||||
"password": settings.password,
|
||||
}
|
||||
if settings.provider_id:
|
||||
auth_kwargs["provider_id"] = settings.provider_id
|
||||
return connection.authenticate_oidc_resource_owner_password_credentials(**auth_kwargs)
|
||||
|
||||
if settings.auth_method == "oidc":
|
||||
if not settings.allow_interactive_oidc:
|
||||
raise OpenEOAuthenticationError(
|
||||
"Interactive OIDC auth is disabled. Use client credentials in Celery workers."
|
||||
)
|
||||
auth_kwargs = {}
|
||||
if settings.provider_id:
|
||||
auth_kwargs["provider_id"] = settings.provider_id
|
||||
return connection.authenticate_oidc(**auth_kwargs)
|
||||
|
||||
raise OpenEOAuthenticationError(f"Unsupported OPENEO_AUTH_METHOD: {settings.auth_method}")
|
||||
except Exception as exc:
|
||||
if isinstance(exc, OpenEOServiceError):
|
||||
raise
|
||||
raise OpenEOAuthenticationError(f"Failed to authenticate with openEO backend: {exc}") from exc
|
||||
|
||||
|
||||
def build_feature_collection(cells: list[AnalysisGridCell]) -> dict[str, Any]:
|
||||
features = []
|
||||
for cell in cells:
|
||||
features.append(
|
||||
{
|
||||
"type": "Feature",
|
||||
"id": cell.cell_code,
|
||||
"properties": {
|
||||
"cell_code": cell.cell_code,
|
||||
"block_code": cell.block_code,
|
||||
"soil_location_id": cell.soil_location_id,
|
||||
},
|
||||
"geometry": cell.geometry,
|
||||
}
|
||||
)
|
||||
return {"type": "FeatureCollection", "features": features}
|
||||
|
||||
|
||||
def build_spatial_extent(cells: list[AnalysisGridCell]) -> dict[str, float]:
|
||||
if not cells:
|
||||
raise ValueError("At least one analysis grid cell is required.")
|
||||
|
||||
west = None
|
||||
east = None
|
||||
south = None
|
||||
north = None
|
||||
for cell in cells:
|
||||
coordinates = ((cell.geometry or {}).get("coordinates") or [[]])[0]
|
||||
for lon, lat in coordinates:
|
||||
west = lon if west is None else min(west, lon)
|
||||
east = lon if east is None else max(east, lon)
|
||||
south = lat if south is None else min(south, lat)
|
||||
north = lat if north is None else max(north, lat)
|
||||
|
||||
return {
|
||||
"west": float(west),
|
||||
"south": float(south),
|
||||
"east": float(east),
|
||||
"north": float(north),
|
||||
}
|
||||
|
||||
|
||||
def build_empty_metric_payload() -> dict[str, Any]:
|
||||
return {metric_name: None for metric_name in METRIC_NAMES}
|
||||
|
||||
|
||||
def initialize_metric_result_map(cells: list[AnalysisGridCell]) -> dict[str, dict[str, Any]]:
|
||||
return {cell.cell_code: build_empty_metric_payload() for cell in cells}
|
||||
|
||||
|
||||
def compute_remote_sensing_metrics(
|
||||
cells: list[AnalysisGridCell],
|
||||
*,
|
||||
temporal_start: date | str,
|
||||
temporal_end: date | str,
|
||||
connection=None,
|
||||
) -> dict[str, Any]:
|
||||
"""
|
||||
Compute all requested remote sensing metrics in batch mode per metric.
|
||||
|
||||
Returns a normalized structure keyed by `cell_code`, plus execution metadata
|
||||
that can be stored by Celery tasks and Django models.
|
||||
"""
|
||||
if not cells:
|
||||
return {
|
||||
"results": {},
|
||||
"metadata": {
|
||||
"backend": DEFAULT_OPENEO_PROVIDER,
|
||||
"collections_used": [],
|
||||
"slope_supported": False,
|
||||
"job_refs": {},
|
||||
"failed_metrics": [],
|
||||
},
|
||||
}
|
||||
|
||||
connection = connection or connect_openeo()
|
||||
feature_collection = build_feature_collection(cells)
|
||||
spatial_extent = build_spatial_extent(cells)
|
||||
results = initialize_metric_result_map(cells)
|
||||
metadata = {
|
||||
"backend": DEFAULT_OPENEO_PROVIDER,
|
||||
"backend_url": DEFAULT_OPENEO_BACKEND_URL,
|
||||
"collections_used": [
|
||||
SENTINEL2_COLLECTION,
|
||||
SENTINEL3_LST_COLLECTION,
|
||||
SENTINEL1_COLLECTION,
|
||||
COPERNICUS_DEM_COLLECTION,
|
||||
],
|
||||
"slope_supported": True,
|
||||
"job_refs": {},
|
||||
"failed_metrics": [],
|
||||
}
|
||||
|
||||
metric_runners = [
|
||||
("ndvi", compute_ndvi),
|
||||
("ndwi", compute_ndwi),
|
||||
("lst_c", compute_lst_c),
|
||||
("soil_vv", compute_soil_vv),
|
||||
("dem_m", compute_dem_m),
|
||||
("slope_deg", compute_slope_deg),
|
||||
]
|
||||
for metric_name, runner in metric_runners:
|
||||
try:
|
||||
metric_payload = runner(
|
||||
connection=connection,
|
||||
feature_collection=feature_collection,
|
||||
spatial_extent=spatial_extent,
|
||||
temporal_start=temporal_start,
|
||||
temporal_end=temporal_end,
|
||||
)
|
||||
merge_metric_results(results, metric_payload["results"])
|
||||
metadata["job_refs"][metric_name] = metric_payload.get("job_ref")
|
||||
if metric_name == "slope_deg" and not metric_payload.get("supported", True):
|
||||
metadata["slope_supported"] = False
|
||||
except Exception as exc:
|
||||
if metric_name == "slope_deg":
|
||||
metadata["slope_supported"] = False
|
||||
metadata["failed_metrics"].append(
|
||||
{"metric": metric_name, "error": str(exc), "non_fatal": True}
|
||||
)
|
||||
continue
|
||||
raise OpenEOExecutionError(f"Failed to compute metric `{metric_name}`: {exc}") from exc
|
||||
|
||||
for cell_code, payload in results.items():
|
||||
soil_vv = payload.get("soil_vv")
|
||||
payload["soil_vv_db"] = linear_to_db(soil_vv)
|
||||
|
||||
return {"results": results, "metadata": metadata}
|
||||
|
||||
|
||||
def compute_ndvi(*, connection, feature_collection, spatial_extent, temporal_start, temporal_end) -> dict[str, Any]:
|
||||
cube = connection.load_collection(
|
||||
SENTINEL2_COLLECTION,
|
||||
spatial_extent=spatial_extent,
|
||||
temporal_extent=[_normalize_date(temporal_start), _normalize_date(temporal_end)],
|
||||
bands=["B03", "B04", "B08", "SCL"],
|
||||
)
|
||||
scl = cube.band("SCL")
|
||||
invalid_mask = (scl != VALID_SCL_CLASSES[0]) & (scl != VALID_SCL_CLASSES[1]) & (scl != VALID_SCL_CLASSES[2])
|
||||
red = cube.band("B04") * 0.0001
|
||||
nir = cube.band("B08") * 0.0001
|
||||
ndvi = ((nir - red) / (nir + red)).mask(invalid_mask.resample_cube_spatial(red))
|
||||
aggregated = ndvi.mean_time().aggregate_spatial(geometries=feature_collection, reducer="mean").execute()
|
||||
return {"results": parse_aggregate_spatial_response(aggregated, "ndvi")}
|
||||
|
||||
|
||||
def compute_ndwi(*, connection, feature_collection, spatial_extent, temporal_start, temporal_end) -> dict[str, Any]:
|
||||
cube = connection.load_collection(
|
||||
SENTINEL2_COLLECTION,
|
||||
spatial_extent=spatial_extent,
|
||||
temporal_extent=[_normalize_date(temporal_start), _normalize_date(temporal_end)],
|
||||
bands=["B03", "B08", "SCL"],
|
||||
)
|
||||
scl = cube.band("SCL")
|
||||
invalid_mask = (scl != VALID_SCL_CLASSES[0]) & (scl != VALID_SCL_CLASSES[1]) & (scl != VALID_SCL_CLASSES[2])
|
||||
green = cube.band("B03") * 0.0001
|
||||
nir = cube.band("B08") * 0.0001
|
||||
ndwi = ((green - nir) / (green + nir)).mask(invalid_mask.resample_cube_spatial(green))
|
||||
aggregated = ndwi.mean_time().aggregate_spatial(geometries=feature_collection, reducer="mean").execute()
|
||||
return {"results": parse_aggregate_spatial_response(aggregated, "ndwi")}
|
||||
|
||||
|
||||
def compute_lst_c(*, connection, feature_collection, spatial_extent, temporal_start, temporal_end) -> dict[str, Any]:
|
||||
cube = connection.load_collection(
|
||||
SENTINEL3_LST_COLLECTION,
|
||||
spatial_extent=spatial_extent,
|
||||
temporal_extent=[_normalize_date(temporal_start), _normalize_date(temporal_end)],
|
||||
)
|
||||
band_name = infer_band_name(cube, preferred=("LST", "LST_in", "LST", "band_0"))
|
||||
lst_k = cube.band(band_name) if band_name else cube
|
||||
lst_c = lst_k - 273.15
|
||||
aggregated = lst_c.mean_time().aggregate_spatial(geometries=feature_collection, reducer="mean").execute()
|
||||
return {"results": parse_aggregate_spatial_response(aggregated, "lst_c")}
|
||||
|
||||
|
||||
def compute_soil_vv(*, connection, feature_collection, spatial_extent, temporal_start, temporal_end) -> dict[str, Any]:
|
||||
cube = connection.load_collection(
|
||||
SENTINEL1_COLLECTION,
|
||||
spatial_extent=spatial_extent,
|
||||
temporal_extent=[_normalize_date(temporal_start), _normalize_date(temporal_end)],
|
||||
bands=["VV"],
|
||||
)
|
||||
vv = cube.band("VV")
|
||||
aggregated = vv.mean_time().aggregate_spatial(geometries=feature_collection, reducer="mean").execute()
|
||||
return {"results": parse_aggregate_spatial_response(aggregated, "soil_vv")}
|
||||
|
||||
|
||||
def compute_dem_m(*, connection, feature_collection, spatial_extent, temporal_start, temporal_end) -> dict[str, Any]:
|
||||
cube = connection.load_collection(
|
||||
COPERNICUS_DEM_COLLECTION,
|
||||
spatial_extent=spatial_extent,
|
||||
temporal_extent=[_normalize_date(temporal_start), _normalize_date(temporal_end)],
|
||||
)
|
||||
band_name = infer_band_name(cube, preferred=("DEM", "elevation", "band_0"))
|
||||
dem = cube.band(band_name) if band_name else cube
|
||||
aggregated = dem.aggregate_spatial(geometries=feature_collection, reducer="mean").execute()
|
||||
return {"results": parse_aggregate_spatial_response(aggregated, "dem_m")}
|
||||
|
||||
|
||||
def compute_slope_deg(*, connection, feature_collection, spatial_extent, temporal_start, temporal_end) -> dict[str, Any]:
|
||||
cube = connection.load_collection(
|
||||
COPERNICUS_DEM_COLLECTION,
|
||||
spatial_extent=spatial_extent,
|
||||
temporal_extent=[_normalize_date(temporal_start), _normalize_date(temporal_end)],
|
||||
)
|
||||
band_name = infer_band_name(cube, preferred=("DEM", "elevation", "band_0"))
|
||||
dem = cube.band(band_name) if band_name else cube
|
||||
try:
|
||||
slope_rad = dem.slope()
|
||||
slope_deg = slope_rad * (180.0 / math.pi)
|
||||
aggregated = slope_deg.aggregate_spatial(geometries=feature_collection, reducer="mean").execute()
|
||||
return {
|
||||
"results": parse_aggregate_spatial_response(aggregated, "slope_deg"),
|
||||
"supported": True,
|
||||
}
|
||||
except Exception:
|
||||
return {
|
||||
"results": {feature["id"]: {"slope_deg": None} for feature in feature_collection.get("features", [])},
|
||||
"supported": False,
|
||||
}
|
||||
|
||||
|
||||
def parse_aggregate_spatial_response(payload: Any, metric_name: str) -> dict[str, dict[str, Any]]:
|
||||
"""
|
||||
Parse different JSON shapes returned by openEO aggregate_spatial executions.
|
||||
"""
|
||||
if payload is None:
|
||||
return {}
|
||||
|
||||
if isinstance(payload, dict) and payload.get("type") == "FeatureCollection":
|
||||
return _parse_feature_collection_results(payload, metric_name)
|
||||
|
||||
if isinstance(payload, dict) and "features" in payload:
|
||||
return _parse_feature_collection_results(payload, metric_name)
|
||||
|
||||
if isinstance(payload, dict):
|
||||
return _parse_mapping_results(payload, metric_name)
|
||||
|
||||
if isinstance(payload, list):
|
||||
return _parse_list_results(payload, metric_name)
|
||||
|
||||
raise OpenEOExecutionError(f"Unsupported openEO aggregate_spatial response type: {type(payload)!r}")
|
||||
|
||||
|
||||
def _parse_feature_collection_results(payload: dict[str, Any], metric_name: str) -> dict[str, dict[str, Any]]:
|
||||
results: dict[str, dict[str, Any]] = {}
|
||||
for feature in payload.get("features", []):
|
||||
feature_id = str(
|
||||
feature.get("id")
|
||||
or (feature.get("properties") or {}).get("cell_code")
|
||||
or (feature.get("properties") or {}).get("id")
|
||||
)
|
||||
if not feature_id:
|
||||
continue
|
||||
properties = feature.get("properties") or {}
|
||||
value = _extract_aggregate_value(properties)
|
||||
results[feature_id] = {metric_name: _coerce_float(value)}
|
||||
return results
|
||||
|
||||
|
||||
def _parse_mapping_results(payload: dict[str, Any], metric_name: str) -> dict[str, dict[str, Any]]:
|
||||
if "data" in payload and isinstance(payload["data"], (dict, list)):
|
||||
return parse_aggregate_spatial_response(payload["data"], metric_name)
|
||||
|
||||
results: dict[str, dict[str, Any]] = {}
|
||||
for feature_id, value in payload.items():
|
||||
if feature_id in {"type", "links", "meta"}:
|
||||
continue
|
||||
results[str(feature_id)] = {metric_name: _coerce_float(_extract_aggregate_value(value))}
|
||||
return results
|
||||
|
||||
|
||||
def _parse_list_results(payload: list[Any], metric_name: str) -> dict[str, dict[str, Any]]:
|
||||
results: dict[str, dict[str, Any]] = {}
|
||||
for index, item in enumerate(payload):
|
||||
if isinstance(item, dict):
|
||||
feature_id = str(item.get("id") or item.get("cell_code") or item.get("feature_id") or index)
|
||||
value = _extract_aggregate_value(item)
|
||||
else:
|
||||
feature_id = str(index)
|
||||
value = item
|
||||
results[feature_id] = {metric_name: _coerce_float(value)}
|
||||
return results
|
||||
|
||||
|
||||
def _extract_aggregate_value(value: Any) -> Any:
|
||||
if isinstance(value, dict):
|
||||
for key in ("mean", "value", "result", "average"):
|
||||
if key in value:
|
||||
return _extract_aggregate_value(value[key])
|
||||
if len(value) == 1:
|
||||
return _extract_aggregate_value(next(iter(value.values())))
|
||||
return None
|
||||
if isinstance(value, list):
|
||||
if not value:
|
||||
return None
|
||||
return _extract_aggregate_value(value[0])
|
||||
return value
|
||||
|
||||
|
||||
def merge_metric_results(target: dict[str, dict[str, Any]], updates: dict[str, dict[str, Any]]) -> None:
|
||||
for cell_code, values in updates.items():
|
||||
target.setdefault(cell_code, build_empty_metric_payload())
|
||||
target[cell_code].update(values)
|
||||
|
||||
|
||||
def linear_to_db(value: Any) -> float | None:
|
||||
numeric = _coerce_float(value)
|
||||
if numeric is None or numeric <= 0:
|
||||
return None
|
||||
return round(10.0 * math.log10(numeric), 6)
|
||||
|
||||
|
||||
def infer_band_name(cube, preferred: tuple[str, ...]) -> str | None:
|
||||
"""
|
||||
Best-effort band name selection for collections with backend-specific naming.
|
||||
"""
|
||||
metadata = getattr(cube, "metadata", None)
|
||||
if metadata is None:
|
||||
return None
|
||||
band_dimension = getattr(metadata, "band_dimension", None)
|
||||
bands = getattr(band_dimension, "bands", None)
|
||||
if not bands:
|
||||
return None
|
||||
available = []
|
||||
for band in bands:
|
||||
name = getattr(band, "name", None) or str(band)
|
||||
available.append(name)
|
||||
for candidate in preferred:
|
||||
if candidate in available:
|
||||
return candidate
|
||||
return available[0] if available else None
|
||||
|
||||
|
||||
def _coerce_float(value: Any) -> float | None:
|
||||
if value is None:
|
||||
return None
|
||||
if isinstance(value, Decimal):
|
||||
return float(value)
|
||||
try:
|
||||
return float(value)
|
||||
except (TypeError, ValueError):
|
||||
return None
|
||||
|
||||
|
||||
def _normalize_date(value: date | str) -> str:
|
||||
if isinstance(value, date):
|
||||
return value.isoformat()
|
||||
return str(value)
|
||||
@@ -0,0 +1,93 @@
|
||||
{
|
||||
"info": {
|
||||
"name": "Soil Data",
|
||||
"description": "API دادههای خاک (SoilGrids) بر اساس مختصات جغرافیایی",
|
||||
"schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json"
|
||||
},
|
||||
"variable": [
|
||||
{
|
||||
"key": "baseUrl",
|
||||
"value": "http://localhost:8020"
|
||||
},
|
||||
{
|
||||
"key": "task_id",
|
||||
"value": ""
|
||||
}
|
||||
],
|
||||
"item": [
|
||||
{
|
||||
"name": "Get Soil Data (query)",
|
||||
"request": {
|
||||
"method": "GET",
|
||||
"header": [
|
||||
{
|
||||
"key": "Accept",
|
||||
"value": "application/json"
|
||||
}
|
||||
],
|
||||
"url": {
|
||||
"raw": "{{baseUrl}}/api/soil-data/?lon=52.42&lat=36.38",
|
||||
"host": ["{{baseUrl}}"],
|
||||
"path": ["api", "soil-data", ""],
|
||||
"query": [
|
||||
{
|
||||
"key": "lon",
|
||||
"value": "52.42",
|
||||
"description": "طول جغرافیایی"
|
||||
},
|
||||
{
|
||||
"key": "lat",
|
||||
"value": "36.38",
|
||||
"description": "عرض جغرافیایی"
|
||||
}
|
||||
]
|
||||
},
|
||||
"description": "دریافت داده خاک با lon و lat در query. اگر داده در DB باشد 200، وگرنه 202 با task_id برمیگردد."
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Get Soil Data (POST)",
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"header": [
|
||||
{
|
||||
"key": "Content-Type",
|
||||
"value": "application/json"
|
||||
},
|
||||
{
|
||||
"key": "Accept",
|
||||
"value": "application/json"
|
||||
}
|
||||
],
|
||||
"body": {
|
||||
"mode": "raw",
|
||||
"raw": "{\n \"lon\": 52.42,\n \"lat\": 36.38\n}"
|
||||
},
|
||||
"url": {
|
||||
"raw": "{{baseUrl}}/api/soil-data/",
|
||||
"host": ["{{baseUrl}}"],
|
||||
"path": ["api", "soil-data", ""]
|
||||
},
|
||||
"description": "دریافت داده خاک با lon و lat در body. اگر داده در DB باشد 200، وگرنه 202 با task_id برمیگردد."
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Task Status",
|
||||
"request": {
|
||||
"method": "GET",
|
||||
"header": [
|
||||
{
|
||||
"key": "Accept",
|
||||
"value": "application/json"
|
||||
}
|
||||
],
|
||||
"url": {
|
||||
"raw": "{{baseUrl}}/api/soil-data/tasks/{{task_id}}/status/",
|
||||
"host": ["{{baseUrl}}"],
|
||||
"path": ["api", "soil-data", "tasks", "{{task_id}}", "status", ""]
|
||||
},
|
||||
"description": "بررسی وضعیت تسک واکشی خاک. task_id را از پاسخ 202 دریافت میکنید."
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,155 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from dataclasses import dataclass
|
||||
from datetime import date, timedelta
|
||||
from typing import Any
|
||||
|
||||
import requests
|
||||
|
||||
from .models import NdviObservation
|
||||
|
||||
|
||||
DEFAULT_SATELLITE_SOURCE = "sentinel-2"
|
||||
DEFAULT_CLOUD_COVER = 20.0
|
||||
|
||||
|
||||
def classify_ndvi(mean_ndvi: float) -> str:
|
||||
if mean_ndvi < 0.2:
|
||||
return "Bare soil"
|
||||
if mean_ndvi < 0.4:
|
||||
return "Weak vegetation"
|
||||
if mean_ndvi < 0.6:
|
||||
return "Moderate vegetation"
|
||||
return "Healthy vegetation"
|
||||
|
||||
|
||||
def calculate_ndvi(red: float, nir: float) -> float | None:
|
||||
denominator = nir + red
|
||||
if denominator == 0:
|
||||
return None
|
||||
return round((nir - red) / denominator, 4)
|
||||
|
||||
|
||||
def calculate_ndvi_grid(red_band: list[list[float]], nir_band: list[list[float]]) -> list[list[float | None]]:
|
||||
grid: list[list[float | None]] = []
|
||||
for red_row, nir_row in zip(red_band, nir_band):
|
||||
row: list[float | None] = []
|
||||
for red, nir in zip(red_row, nir_row):
|
||||
row.append(calculate_ndvi(float(red), float(nir)))
|
||||
grid.append(row)
|
||||
return grid
|
||||
|
||||
|
||||
def mean_ndvi(grid: list[list[float | None]]) -> float:
|
||||
values = [value for row in grid for value in row if value is not None]
|
||||
if not values:
|
||||
return 0.0
|
||||
return round(sum(values) / len(values), 4)
|
||||
|
||||
|
||||
def _default_bbox(location: Any, delta: float = 0.001) -> list[float]:
|
||||
lat = float(location.latitude)
|
||||
lon = float(location.longitude)
|
||||
return [lon - delta, lat - delta, lon + delta, lat + delta]
|
||||
|
||||
|
||||
def _geometry_payload(location: Any) -> dict:
|
||||
boundary = getattr(location, "farm_boundary", None) or {}
|
||||
if boundary:
|
||||
return boundary
|
||||
return {"bbox": _default_bbox(location)}
|
||||
|
||||
|
||||
@dataclass
|
||||
class SatelliteNdviResult:
|
||||
observation_date: str
|
||||
mean_ndvi: float
|
||||
ndvi_map: list[list[float | None]]
|
||||
vegetation_health_class: str
|
||||
satellite_source: str
|
||||
cloud_cover: float | None
|
||||
metadata: dict[str, Any]
|
||||
|
||||
|
||||
class SentinelCompatibleNdviClient:
|
||||
def __init__(self) -> None:
|
||||
self.endpoint = os.environ.get("SATELLITE_NDVI_ENDPOINT")
|
||||
self.api_key = os.environ.get("SATELLITE_NDVI_API_KEY")
|
||||
self.source = os.environ.get("SATELLITE_SOURCE", DEFAULT_SATELLITE_SOURCE)
|
||||
|
||||
@property
|
||||
def is_configured(self) -> bool:
|
||||
return bool(self.endpoint and self.api_key)
|
||||
|
||||
def fetch_red_nir(
|
||||
self,
|
||||
geometry: dict,
|
||||
date_from: date,
|
||||
date_to: date,
|
||||
cloud_cover: float,
|
||||
) -> dict[str, Any] | None:
|
||||
if not self.is_configured:
|
||||
return None
|
||||
|
||||
response = requests.post(
|
||||
self.endpoint,
|
||||
json={
|
||||
"geometry": geometry,
|
||||
"date_from": date_from.isoformat(),
|
||||
"date_to": date_to.isoformat(),
|
||||
"cloud_cover_max": cloud_cover,
|
||||
"source": self.source,
|
||||
"bands": ["B04", "B08"],
|
||||
},
|
||||
headers={
|
||||
"Authorization": f"Bearer {self.api_key}",
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
timeout=30,
|
||||
)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
|
||||
def fetch_or_get_ndvi_observation(
|
||||
location: Any,
|
||||
days_back: int = 7,
|
||||
cloud_cover: float = DEFAULT_CLOUD_COVER,
|
||||
) -> NdviObservation | None:
|
||||
observation = location.ndvi_observations.order_by("-observation_date", "-created_at").first()
|
||||
if observation is not None:
|
||||
return observation
|
||||
|
||||
client = SentinelCompatibleNdviClient()
|
||||
payload = client.fetch_red_nir(
|
||||
geometry=_geometry_payload(location),
|
||||
date_from=date.today() - timedelta(days=days_back),
|
||||
date_to=date.today(),
|
||||
cloud_cover=cloud_cover,
|
||||
)
|
||||
if not payload:
|
||||
return None
|
||||
|
||||
red_band = payload.get("red_band") or []
|
||||
nir_band = payload.get("nir_band") or []
|
||||
observation_date = payload.get("observation_date") or date.today().isoformat()
|
||||
ndvi_grid = calculate_ndvi_grid(red_band=red_band, nir_band=nir_band)
|
||||
ndvi_mean = mean_ndvi(ndvi_grid)
|
||||
return NdviObservation.objects.create(
|
||||
location=location,
|
||||
observation_date=date.fromisoformat(observation_date),
|
||||
mean_ndvi=ndvi_mean,
|
||||
ndvi_map={
|
||||
"grid": ndvi_grid,
|
||||
"red_band_source": "B04",
|
||||
"nir_band_source": "B08",
|
||||
},
|
||||
vegetation_health_class=classify_ndvi(ndvi_mean),
|
||||
satellite_source=payload.get("satellite_source", client.source),
|
||||
cloud_cover=payload.get("cloud_cover"),
|
||||
metadata={
|
||||
"geometry": _geometry_payload(location),
|
||||
"raw_payload_meta": payload.get("metadata", {}),
|
||||
},
|
||||
)
|
||||
@@ -0,0 +1,116 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from django.db.models import Avg, QuerySet
|
||||
|
||||
from .models import AnalysisGridObservation, RemoteSensingRun, SoilLocation
|
||||
|
||||
|
||||
SATELLITE_METRIC_FIELDS = (
|
||||
"ndvi",
|
||||
"ndwi",
|
||||
"lst_c",
|
||||
"soil_vv_db",
|
||||
"dem_m",
|
||||
"slope_deg",
|
||||
)
|
||||
|
||||
|
||||
def build_location_satellite_snapshot(
|
||||
location: SoilLocation,
|
||||
*,
|
||||
block_code: str = "",
|
||||
) -> dict[str, Any]:
|
||||
run = get_latest_completed_remote_sensing_run(location, block_code=block_code)
|
||||
if run is None:
|
||||
return {
|
||||
"status": "missing",
|
||||
"block_code": block_code,
|
||||
"run_id": None,
|
||||
"temporal_extent": None,
|
||||
"cell_count": 0,
|
||||
"resolved_metrics": {},
|
||||
"metric_sources": {},
|
||||
}
|
||||
|
||||
observations = get_run_observations(run)
|
||||
summary = summarize_observations(observations)
|
||||
return {
|
||||
"status": "completed",
|
||||
"block_code": run.block_code,
|
||||
"run_id": run.id,
|
||||
"temporal_extent": {
|
||||
"start_date": run.temporal_start.isoformat() if run.temporal_start else None,
|
||||
"end_date": run.temporal_end.isoformat() if run.temporal_end else None,
|
||||
},
|
||||
"cell_count": observations.count(),
|
||||
"resolved_metrics": summary,
|
||||
"metric_sources": {
|
||||
metric_name: "remote_sensing"
|
||||
for metric_name in summary
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def build_location_block_satellite_snapshots(location: SoilLocation) -> list[dict[str, Any]]:
|
||||
block_layout = location.block_layout or {}
|
||||
blocks = block_layout.get("blocks") or []
|
||||
if not blocks:
|
||||
return [build_location_satellite_snapshot(location)]
|
||||
snapshots = []
|
||||
for block in blocks:
|
||||
snapshots.append(
|
||||
build_location_satellite_snapshot(
|
||||
location,
|
||||
block_code=str(block.get("block_code") or "").strip(),
|
||||
)
|
||||
)
|
||||
return snapshots
|
||||
|
||||
|
||||
def get_latest_completed_remote_sensing_run(
|
||||
location: SoilLocation,
|
||||
*,
|
||||
block_code: str = "",
|
||||
) -> RemoteSensingRun | None:
|
||||
return (
|
||||
RemoteSensingRun.objects.filter(
|
||||
soil_location=location,
|
||||
block_code=block_code or "",
|
||||
status=RemoteSensingRun.STATUS_SUCCESS,
|
||||
)
|
||||
.order_by("-temporal_end", "-created_at", "-id")
|
||||
.first()
|
||||
)
|
||||
|
||||
|
||||
def get_run_observations(run: RemoteSensingRun) -> QuerySet[AnalysisGridObservation]:
|
||||
return (
|
||||
AnalysisGridObservation.objects.select_related("cell", "run")
|
||||
.filter(
|
||||
cell__soil_location=run.soil_location,
|
||||
cell__block_code=run.block_code or "",
|
||||
temporal_start=run.temporal_start,
|
||||
temporal_end=run.temporal_end,
|
||||
)
|
||||
.order_by("cell__cell_code")
|
||||
)
|
||||
|
||||
|
||||
def summarize_observations(
|
||||
observations: QuerySet[AnalysisGridObservation],
|
||||
) -> dict[str, float]:
|
||||
aggregates = observations.aggregate(
|
||||
**{
|
||||
f"{metric_name}_mean": Avg(metric_name)
|
||||
for metric_name in SATELLITE_METRIC_FIELDS
|
||||
}
|
||||
)
|
||||
summary: dict[str, float] = {}
|
||||
for metric_name in SATELLITE_METRIC_FIELDS:
|
||||
value = aggregates.get(f"{metric_name}_mean")
|
||||
if value is None:
|
||||
continue
|
||||
summary[metric_name] = round(float(value), 6)
|
||||
return summary
|
||||
@@ -0,0 +1,340 @@
|
||||
from rest_framework import serializers
|
||||
|
||||
from .data_driven_subdivision import SUPPORTED_CLUSTER_FEATURES
|
||||
from .models import (
|
||||
AnalysisGridObservation,
|
||||
BlockSubdivision,
|
||||
RemoteSensingRun,
|
||||
RemoteSensingClusterAssignment,
|
||||
RemoteSensingSubdivisionResult,
|
||||
SoilLocation,
|
||||
)
|
||||
from .satellite_snapshot import build_location_block_satellite_snapshots
|
||||
|
||||
|
||||
class SoilDataRequestSerializer(serializers.Serializer):
|
||||
"""ورودی ثبت مزرعه و بلوکهای تعریفشده توسط کشاورز."""
|
||||
|
||||
class BlockInputSerializer(serializers.Serializer):
|
||||
block_code = serializers.CharField(max_length=64)
|
||||
boundary = serializers.JSONField()
|
||||
order = serializers.IntegerField(required=False, min_value=1)
|
||||
|
||||
lon = serializers.DecimalField(max_digits=9, decimal_places=6, required=True)
|
||||
lat = serializers.DecimalField(max_digits=9, decimal_places=6, required=True)
|
||||
block_count = serializers.IntegerField(required=False, min_value=1, default=1)
|
||||
block_code = serializers.CharField(required=False, default="block-1", max_length=64)
|
||||
farm_boundary = serializers.JSONField(required=False)
|
||||
blocks = BlockInputSerializer(many=True, required=False)
|
||||
|
||||
def validate(self, attrs):
|
||||
blocks = attrs.get("blocks") or []
|
||||
if self.context.get("require_farm_boundary") and not attrs.get("farm_boundary"):
|
||||
raise serializers.ValidationError(
|
||||
{"farm_boundary": ["مختصات گوشههای کل زمین باید ارسال شود."]}
|
||||
)
|
||||
if self.context.get("require_farm_boundary") and not blocks:
|
||||
raise serializers.ValidationError(
|
||||
{"blocks": ["مختصات بلوکهای تعریفشده توسط کشاورز باید ارسال شود."]}
|
||||
)
|
||||
if blocks:
|
||||
attrs["block_count"] = len(blocks)
|
||||
return attrs
|
||||
|
||||
|
||||
class SoilLocationResponseSerializer(serializers.ModelSerializer):
|
||||
"""سریالایزر خروجی برای SoilLocation همراه با خلاصه سنجشازدور."""
|
||||
|
||||
lon = serializers.DecimalField(
|
||||
source="longitude",
|
||||
max_digits=9,
|
||||
decimal_places=6,
|
||||
read_only=True,
|
||||
)
|
||||
lat = serializers.DecimalField(
|
||||
source="latitude",
|
||||
max_digits=9,
|
||||
decimal_places=6,
|
||||
read_only=True,
|
||||
)
|
||||
input_block_count = serializers.IntegerField(read_only=True)
|
||||
farm_boundary = serializers.JSONField(read_only=True)
|
||||
block_layout = serializers.JSONField(read_only=True)
|
||||
block_subdivisions = serializers.SerializerMethodField()
|
||||
satellite_snapshots = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = SoilLocation
|
||||
fields = [
|
||||
"id",
|
||||
"lon",
|
||||
"lat",
|
||||
"input_block_count",
|
||||
"farm_boundary",
|
||||
"block_layout",
|
||||
"block_subdivisions",
|
||||
"satellite_snapshots",
|
||||
]
|
||||
|
||||
def get_block_subdivisions(self, obj):
|
||||
subdivisions = obj.block_subdivisions.all().order_by("block_code", "id")
|
||||
return BlockSubdivisionSerializer(subdivisions, many=True).data
|
||||
|
||||
def get_satellite_snapshots(self, obj):
|
||||
return build_location_block_satellite_snapshots(obj)
|
||||
|
||||
|
||||
class BlockSubdivisionSerializer(serializers.ModelSerializer):
|
||||
elbow_plot = serializers.ImageField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = BlockSubdivision
|
||||
fields = [
|
||||
"block_code",
|
||||
"chunk_size_sqm",
|
||||
"grid_points",
|
||||
"centroid_points",
|
||||
"grid_point_count",
|
||||
"centroid_count",
|
||||
"elbow_plot",
|
||||
"status",
|
||||
"metadata",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
]
|
||||
|
||||
|
||||
class SoilDataTaskResponseSerializer(serializers.Serializer):
|
||||
"""سریالایزر خروجی وقتی تسک در صف قرار گرفته (۲۰۲)."""
|
||||
|
||||
source = serializers.CharField(default="task")
|
||||
task_id = serializers.CharField()
|
||||
lon = serializers.FloatField(source="longitude")
|
||||
lat = serializers.FloatField(source="latitude")
|
||||
status_url = serializers.CharField(required=False)
|
||||
|
||||
|
||||
class NdviHealthRequestSerializer(serializers.Serializer):
|
||||
farm_uuid = serializers.UUIDField(required=True, help_text="شناسه یکتای مزرعه")
|
||||
|
||||
|
||||
class NdviHealthDataItemSerializer(serializers.Serializer):
|
||||
title = serializers.CharField()
|
||||
value = serializers.JSONField()
|
||||
color = serializers.CharField()
|
||||
icon = serializers.CharField()
|
||||
|
||||
|
||||
class NdviHealthResponseSerializer(serializers.Serializer):
|
||||
ndviIndex = serializers.FloatField(allow_null=True, required=False)
|
||||
mean_ndvi = serializers.FloatField(allow_null=True)
|
||||
ndvi_map = serializers.JSONField()
|
||||
vegetation_health_class = serializers.CharField(allow_null=True)
|
||||
observation_date = serializers.CharField(allow_null=True)
|
||||
satellite_source = serializers.CharField(allow_null=True)
|
||||
healthData = NdviHealthDataItemSerializer(many=True)
|
||||
|
||||
|
||||
class RemoteSensingTriggerSerializer(serializers.Serializer):
|
||||
lon = serializers.DecimalField(max_digits=9, decimal_places=6, required=True)
|
||||
lat = serializers.DecimalField(max_digits=9, decimal_places=6, required=True)
|
||||
block_code = serializers.CharField(required=False, allow_blank=True, default="", max_length=64)
|
||||
start_date = serializers.DateField(required=True)
|
||||
end_date = serializers.DateField(required=True)
|
||||
force_refresh = serializers.BooleanField(required=False, default=False)
|
||||
cluster_count = serializers.IntegerField(required=False, min_value=1, allow_null=True, default=None)
|
||||
selected_features = serializers.ListField(
|
||||
child=serializers.CharField(max_length=64),
|
||||
required=False,
|
||||
allow_empty=False,
|
||||
)
|
||||
|
||||
def validate(self, attrs):
|
||||
if attrs["start_date"] > attrs["end_date"]:
|
||||
raise serializers.ValidationError("start_date نمیتواند بعد از end_date باشد.")
|
||||
selected_features = attrs.get("selected_features") or []
|
||||
invalid_features = sorted(
|
||||
feature_name
|
||||
for feature_name in selected_features
|
||||
if feature_name not in SUPPORTED_CLUSTER_FEATURES
|
||||
)
|
||||
if invalid_features:
|
||||
raise serializers.ValidationError(
|
||||
{
|
||||
"selected_features": [
|
||||
"ویژگیهای نامعتبر برای خوشهبندی: "
|
||||
+ ", ".join(invalid_features)
|
||||
]
|
||||
}
|
||||
)
|
||||
return attrs
|
||||
|
||||
|
||||
class RemoteSensingResultQuerySerializer(RemoteSensingTriggerSerializer):
|
||||
page = serializers.IntegerField(required=False, min_value=1, default=1)
|
||||
page_size = serializers.IntegerField(required=False, min_value=1, max_value=200, default=100)
|
||||
|
||||
|
||||
class RemoteSensingCellObservationSerializer(serializers.ModelSerializer):
|
||||
cell_code = serializers.CharField(source="cell.cell_code", read_only=True)
|
||||
block_code = serializers.CharField(source="cell.block_code", read_only=True)
|
||||
chunk_size_sqm = serializers.IntegerField(source="cell.chunk_size_sqm", read_only=True)
|
||||
centroid_lat = serializers.DecimalField(source="cell.centroid_lat", max_digits=9, decimal_places=6, read_only=True)
|
||||
centroid_lon = serializers.DecimalField(source="cell.centroid_lon", max_digits=9, decimal_places=6, read_only=True)
|
||||
geometry = serializers.JSONField(source="cell.geometry", read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = AnalysisGridObservation
|
||||
fields = [
|
||||
"cell_code",
|
||||
"block_code",
|
||||
"chunk_size_sqm",
|
||||
"centroid_lat",
|
||||
"centroid_lon",
|
||||
"geometry",
|
||||
"temporal_start",
|
||||
"temporal_end",
|
||||
"ndvi",
|
||||
"ndwi",
|
||||
"lst_c",
|
||||
"soil_vv",
|
||||
"soil_vv_db",
|
||||
"dem_m",
|
||||
"slope_deg",
|
||||
"metadata",
|
||||
]
|
||||
|
||||
|
||||
class RemoteSensingSummarySerializer(serializers.Serializer):
|
||||
cell_count = serializers.IntegerField()
|
||||
ndvi_mean = serializers.FloatField(allow_null=True)
|
||||
ndwi_mean = serializers.FloatField(allow_null=True)
|
||||
lst_c_mean = serializers.FloatField(allow_null=True)
|
||||
soil_vv_db_mean = serializers.FloatField(allow_null=True)
|
||||
dem_m_mean = serializers.FloatField(allow_null=True)
|
||||
slope_deg_mean = serializers.FloatField(allow_null=True)
|
||||
|
||||
|
||||
class RemoteSensingRunSerializer(serializers.ModelSerializer):
|
||||
status_label = serializers.SerializerMethodField()
|
||||
pipeline_status = serializers.SerializerMethodField()
|
||||
stage = serializers.SerializerMethodField()
|
||||
selected_features = serializers.SerializerMethodField()
|
||||
requested_cluster_count = serializers.SerializerMethodField()
|
||||
|
||||
def get_status_label(self, obj):
|
||||
return obj.normalized_status
|
||||
|
||||
def get_pipeline_status(self, obj):
|
||||
return obj.normalized_status
|
||||
|
||||
def get_stage(self, obj):
|
||||
return (obj.metadata or {}).get("stage")
|
||||
|
||||
def get_selected_features(self, obj):
|
||||
return (obj.metadata or {}).get("selected_features", [])
|
||||
|
||||
def get_requested_cluster_count(self, obj):
|
||||
return (obj.metadata or {}).get("requested_cluster_count")
|
||||
|
||||
class Meta:
|
||||
model = RemoteSensingRun
|
||||
fields = [
|
||||
"id",
|
||||
"block_code",
|
||||
"chunk_size_sqm",
|
||||
"temporal_start",
|
||||
"temporal_end",
|
||||
"status",
|
||||
"status_label",
|
||||
"pipeline_status",
|
||||
"stage",
|
||||
"selected_features",
|
||||
"requested_cluster_count",
|
||||
"metadata",
|
||||
"error_message",
|
||||
"started_at",
|
||||
"finished_at",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
]
|
||||
|
||||
|
||||
class RemoteSensingClusterAssignmentSerializer(serializers.ModelSerializer):
|
||||
cell_code = serializers.CharField(source="cell.cell_code", read_only=True)
|
||||
centroid_lat = serializers.DecimalField(source="cell.centroid_lat", max_digits=9, decimal_places=6, read_only=True)
|
||||
centroid_lon = serializers.DecimalField(source="cell.centroid_lon", max_digits=9, decimal_places=6, read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = RemoteSensingClusterAssignment
|
||||
fields = [
|
||||
"cell_code",
|
||||
"cluster_label",
|
||||
"centroid_lat",
|
||||
"centroid_lon",
|
||||
"raw_feature_values",
|
||||
"scaled_feature_values",
|
||||
]
|
||||
|
||||
|
||||
class RemoteSensingSubdivisionResultSerializer(serializers.ModelSerializer):
|
||||
assignments = serializers.SerializerMethodField()
|
||||
|
||||
def get_assignments(self, obj):
|
||||
assignments = self.context.get("paginated_assignments")
|
||||
if assignments is None:
|
||||
assignments = obj.assignments.all().order_by("cluster_label", "cell__cell_code")
|
||||
return RemoteSensingClusterAssignmentSerializer(assignments, many=True).data
|
||||
|
||||
class Meta:
|
||||
model = RemoteSensingSubdivisionResult
|
||||
fields = [
|
||||
"id",
|
||||
"block_code",
|
||||
"chunk_size_sqm",
|
||||
"temporal_start",
|
||||
"temporal_end",
|
||||
"cluster_count",
|
||||
"selected_features",
|
||||
"skipped_cell_codes",
|
||||
"metadata",
|
||||
"assignments",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
]
|
||||
|
||||
|
||||
class RemoteSensingResponseSerializer(serializers.Serializer):
|
||||
status = serializers.CharField()
|
||||
source = serializers.CharField()
|
||||
location = SoilLocationResponseSerializer()
|
||||
block_code = serializers.CharField(allow_blank=True)
|
||||
chunk_size_sqm = serializers.IntegerField(allow_null=True)
|
||||
temporal_extent = serializers.JSONField()
|
||||
summary = RemoteSensingSummarySerializer()
|
||||
cells = RemoteSensingCellObservationSerializer(many=True)
|
||||
run = RemoteSensingRunSerializer(allow_null=True)
|
||||
subdivision_result = RemoteSensingSubdivisionResultSerializer(allow_null=True)
|
||||
pagination = serializers.JSONField(required=False)
|
||||
|
||||
|
||||
|
||||
class RemoteSensingRunStatusResponseSerializer(serializers.Serializer):
|
||||
status = serializers.CharField()
|
||||
source = serializers.CharField()
|
||||
run = RemoteSensingRunSerializer()
|
||||
task_id = serializers.CharField(allow_blank=True, allow_null=True, required=False)
|
||||
|
||||
|
||||
class RemoteSensingRunResultResponseSerializer(serializers.Serializer):
|
||||
status = serializers.CharField()
|
||||
source = serializers.CharField()
|
||||
location = SoilLocationResponseSerializer()
|
||||
block_code = serializers.CharField(allow_blank=True)
|
||||
chunk_size_sqm = serializers.IntegerField(allow_null=True)
|
||||
temporal_extent = serializers.JSONField()
|
||||
summary = RemoteSensingSummarySerializer()
|
||||
cells = RemoteSensingCellObservationSerializer(many=True)
|
||||
run = RemoteSensingRunSerializer()
|
||||
subdivision_result = RemoteSensingSubdivisionResultSerializer(allow_null=True)
|
||||
pagination = serializers.JSONField(required=False)
|
||||
@@ -0,0 +1,615 @@
|
||||
"""
|
||||
تسکهای Celery برای pipeline سنجشازدور و subdivision دادهمحور.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from config.celery import app
|
||||
from django.conf import settings
|
||||
from django.db import transaction
|
||||
from django.utils import timezone
|
||||
from django.utils.dateparse import parse_date
|
||||
|
||||
from .data_driven_subdivision import (
|
||||
DEFAULT_CLUSTER_FEATURES,
|
||||
DataDrivenSubdivisionError,
|
||||
create_remote_sensing_subdivision_result,
|
||||
)
|
||||
from .grid_analysis import create_or_get_analysis_grid_cells
|
||||
from .models import (
|
||||
AnalysisGridCell,
|
||||
AnalysisGridObservation,
|
||||
BlockSubdivision,
|
||||
RemoteSensingRun,
|
||||
RemoteSensingSubdivisionResult,
|
||||
SoilLocation,
|
||||
)
|
||||
from .openeo_service import (
|
||||
OpenEOAuthenticationError,
|
||||
OpenEOExecutionError,
|
||||
OpenEOServiceError,
|
||||
compute_remote_sensing_metrics,
|
||||
)
|
||||
|
||||
try:
|
||||
import requests
|
||||
except ImportError: # pragma: no cover - handled in stripped envs
|
||||
RequestException = Exception
|
||||
else:
|
||||
RequestException = requests.RequestException
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def run_remote_sensing_analysis(
|
||||
*,
|
||||
soil_location_id: int,
|
||||
block_code: str = "",
|
||||
temporal_start: Any,
|
||||
temporal_end: Any,
|
||||
force_refresh: bool = False,
|
||||
task_id: str = "",
|
||||
run_id: int | None = None,
|
||||
cluster_count: int | None = None,
|
||||
selected_features: list[str] | None = None,
|
||||
) -> dict[str, Any]:
|
||||
"""
|
||||
اجرای سنکرون تحلیل سنجشازدور برای یک location/block.
|
||||
این helper برای Celery task و هر orchestration داخلی دیگر قابل استفاده است.
|
||||
"""
|
||||
start_date = _normalize_temporal_date(temporal_start, "temporal_start")
|
||||
end_date = _normalize_temporal_date(temporal_end, "temporal_end")
|
||||
if start_date > end_date:
|
||||
raise ValueError("temporal_start نمیتواند بعد از temporal_end باشد.")
|
||||
|
||||
location = SoilLocation.objects.filter(pk=soil_location_id).first()
|
||||
if location is None:
|
||||
raise ValueError(f"SoilLocation با id={soil_location_id} پیدا نشد.")
|
||||
|
||||
resolved_block_code = str(block_code or "").strip()
|
||||
subdivision = _resolve_block_subdivision(location, resolved_block_code)
|
||||
run = _get_or_create_remote_sensing_run(
|
||||
run_id=run_id,
|
||||
location=location,
|
||||
subdivision=subdivision,
|
||||
block_code=resolved_block_code,
|
||||
temporal_start=start_date,
|
||||
temporal_end=end_date,
|
||||
task_id=task_id,
|
||||
cluster_count=cluster_count,
|
||||
selected_features=selected_features or list(DEFAULT_CLUSTER_FEATURES),
|
||||
)
|
||||
_mark_run_running(run)
|
||||
|
||||
try:
|
||||
_record_run_stage(
|
||||
run,
|
||||
"preparing_analysis_grid",
|
||||
{
|
||||
"block_code": resolved_block_code,
|
||||
"temporal_extent": {
|
||||
"start_date": start_date.isoformat(),
|
||||
"end_date": end_date.isoformat(),
|
||||
},
|
||||
},
|
||||
)
|
||||
grid_summary = create_or_get_analysis_grid_cells(
|
||||
location,
|
||||
block_code=resolved_block_code,
|
||||
block_subdivision=subdivision,
|
||||
)
|
||||
_record_run_stage(run, "analysis_grid_ready", {"grid_summary": grid_summary})
|
||||
all_cells = _load_grid_cells(location, resolved_block_code)
|
||||
cells_to_process = _select_cells_for_processing(
|
||||
all_cells=all_cells,
|
||||
temporal_start=start_date,
|
||||
temporal_end=end_date,
|
||||
force_refresh=force_refresh,
|
||||
)
|
||||
_record_run_stage(
|
||||
run,
|
||||
"analysis_cells_selected",
|
||||
{
|
||||
"cell_selection": {
|
||||
"total_cell_count": len(all_cells),
|
||||
"cell_count_to_process": len(cells_to_process),
|
||||
"existing_cell_count": len(all_cells) - len(cells_to_process),
|
||||
"force_refresh": force_refresh,
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
if not cells_to_process:
|
||||
_record_run_stage(
|
||||
run,
|
||||
"using_cached_observations",
|
||||
{"source": "database"},
|
||||
)
|
||||
observations = _load_observations(
|
||||
location=location,
|
||||
block_code=resolved_block_code,
|
||||
temporal_start=start_date,
|
||||
temporal_end=end_date,
|
||||
)
|
||||
subdivision_result = _ensure_subdivision_result(
|
||||
location=location,
|
||||
run=run,
|
||||
subdivision=subdivision,
|
||||
block_code=resolved_block_code,
|
||||
observations=observations,
|
||||
cluster_count=cluster_count,
|
||||
selected_features=selected_features,
|
||||
)
|
||||
_record_run_stage(
|
||||
run,
|
||||
"clustering_completed",
|
||||
_build_clustering_stage_metadata(subdivision_result),
|
||||
)
|
||||
summary = {
|
||||
"status": "completed",
|
||||
"source": "database",
|
||||
"run_id": run.id,
|
||||
"processed_cell_count": 0,
|
||||
"created_observation_count": 0,
|
||||
"updated_observation_count": 0,
|
||||
"existing_observation_count": len(all_cells),
|
||||
"failed_metric_count": 0,
|
||||
"chunk_size_sqm": grid_summary["chunk_size_sqm"],
|
||||
"block_code": resolved_block_code,
|
||||
"cell_count": len(all_cells),
|
||||
"subdivision_result_id": getattr(subdivision_result, "id", None),
|
||||
"cluster_count": getattr(subdivision_result, "cluster_count", 0),
|
||||
}
|
||||
_mark_run_success(run, summary)
|
||||
return summary
|
||||
|
||||
_record_run_stage(
|
||||
run,
|
||||
"fetching_remote_metrics",
|
||||
{"requested_cell_count": len(cells_to_process)},
|
||||
)
|
||||
remote_payload = compute_remote_sensing_metrics(
|
||||
cells_to_process,
|
||||
temporal_start=start_date,
|
||||
temporal_end=end_date,
|
||||
)
|
||||
_record_run_stage(
|
||||
run,
|
||||
"remote_metrics_fetched",
|
||||
{
|
||||
"failed_metric_count": len(remote_payload["metadata"].get("failed_metrics", [])),
|
||||
"service_metadata": remote_payload["metadata"],
|
||||
},
|
||||
)
|
||||
upsert_summary = _upsert_grid_observations(
|
||||
cells=cells_to_process,
|
||||
run=run,
|
||||
temporal_start=start_date,
|
||||
temporal_end=end_date,
|
||||
metric_payload=remote_payload,
|
||||
)
|
||||
_record_run_stage(run, "observations_persisted", upsert_summary)
|
||||
observations = _load_observations(
|
||||
location=location,
|
||||
block_code=resolved_block_code,
|
||||
temporal_start=start_date,
|
||||
temporal_end=end_date,
|
||||
)
|
||||
subdivision_result = _ensure_subdivision_result(
|
||||
location=location,
|
||||
run=run,
|
||||
subdivision=subdivision,
|
||||
block_code=resolved_block_code,
|
||||
observations=observations,
|
||||
cluster_count=cluster_count,
|
||||
selected_features=selected_features,
|
||||
)
|
||||
_record_run_stage(
|
||||
run,
|
||||
"clustering_completed",
|
||||
_build_clustering_stage_metadata(subdivision_result),
|
||||
)
|
||||
summary = {
|
||||
"status": "completed",
|
||||
"source": "openeo",
|
||||
"run_id": run.id,
|
||||
"processed_cell_count": len(cells_to_process),
|
||||
"created_observation_count": upsert_summary["created_count"],
|
||||
"updated_observation_count": upsert_summary["updated_count"],
|
||||
"existing_observation_count": len(all_cells) - len(cells_to_process),
|
||||
"failed_metric_count": len(remote_payload["metadata"].get("failed_metrics", [])),
|
||||
"chunk_size_sqm": grid_summary["chunk_size_sqm"],
|
||||
"block_code": resolved_block_code,
|
||||
"cell_count": len(all_cells),
|
||||
"subdivision_result_id": subdivision_result.id,
|
||||
"cluster_count": subdivision_result.cluster_count,
|
||||
}
|
||||
_mark_run_success(run, summary, remote_payload["metadata"])
|
||||
logger.info(
|
||||
"Remote sensing analysis completed",
|
||||
extra={
|
||||
"run_id": run.id,
|
||||
"soil_location_id": location.id,
|
||||
"block_code": resolved_block_code,
|
||||
"processed_cell_count": summary["processed_cell_count"],
|
||||
},
|
||||
)
|
||||
return summary
|
||||
except Exception as exc:
|
||||
_mark_run_failure(run, str(exc))
|
||||
raise
|
||||
|
||||
|
||||
@app.task(bind=True, max_retries=3, default_retry_delay=60)
|
||||
def run_remote_sensing_analysis_task(
|
||||
self,
|
||||
soil_location_id: int,
|
||||
block_code: str = "",
|
||||
temporal_start: Any = "",
|
||||
temporal_end: Any = "",
|
||||
force_refresh: bool = False,
|
||||
run_id: int | None = None,
|
||||
cluster_count: int | None = None,
|
||||
selected_features: list[str] | None = None,
|
||||
):
|
||||
"""
|
||||
اجرای async تحلیل سنجشازدور برای location/block و ذخیره نتایج در DB.
|
||||
"""
|
||||
logger.info(
|
||||
"Starting remote sensing analysis task",
|
||||
extra={
|
||||
"task_id": self.request.id,
|
||||
"soil_location_id": soil_location_id,
|
||||
"block_code": block_code,
|
||||
"temporal_start": temporal_start,
|
||||
"temporal_end": temporal_end,
|
||||
"force_refresh": force_refresh,
|
||||
},
|
||||
)
|
||||
try:
|
||||
return run_remote_sensing_analysis(
|
||||
soil_location_id=soil_location_id,
|
||||
block_code=block_code,
|
||||
temporal_start=temporal_start,
|
||||
temporal_end=temporal_end,
|
||||
force_refresh=force_refresh,
|
||||
task_id=self.request.id,
|
||||
run_id=run_id,
|
||||
cluster_count=cluster_count,
|
||||
selected_features=selected_features,
|
||||
)
|
||||
except OpenEOAuthenticationError:
|
||||
logger.exception(
|
||||
"Remote sensing auth failure",
|
||||
extra={"task_id": self.request.id, "soil_location_id": soil_location_id},
|
||||
)
|
||||
raise
|
||||
except (OpenEOExecutionError, OpenEOServiceError, RequestException, DataDrivenSubdivisionError) as exc:
|
||||
logger.warning(
|
||||
"Transient remote sensing failure, retrying task",
|
||||
extra={
|
||||
"task_id": self.request.id,
|
||||
"soil_location_id": soil_location_id,
|
||||
"block_code": block_code,
|
||||
"retry_count": self.request.retries,
|
||||
"error": str(exc),
|
||||
},
|
||||
)
|
||||
raise self.retry(exc=exc)
|
||||
|
||||
|
||||
def _normalize_temporal_date(value: Any, field_name: str):
|
||||
if hasattr(value, "isoformat") and not isinstance(value, str):
|
||||
return value
|
||||
parsed = parse_date(str(value))
|
||||
if parsed is None:
|
||||
raise ValueError(f"{field_name} نامعتبر است.")
|
||||
return parsed
|
||||
|
||||
|
||||
def _resolve_block_subdivision(location: SoilLocation, block_code: str) -> BlockSubdivision | None:
|
||||
if not block_code:
|
||||
return None
|
||||
return (
|
||||
BlockSubdivision.objects.filter(
|
||||
soil_location=location,
|
||||
block_code=block_code,
|
||||
)
|
||||
.order_by("-updated_at", "-id")
|
||||
.first()
|
||||
)
|
||||
|
||||
|
||||
def _get_or_create_remote_sensing_run(
|
||||
*,
|
||||
run_id: int | None,
|
||||
location: SoilLocation,
|
||||
subdivision: BlockSubdivision | None,
|
||||
block_code: str,
|
||||
temporal_start,
|
||||
temporal_end,
|
||||
task_id: str,
|
||||
cluster_count: int | None,
|
||||
selected_features: list[str],
|
||||
) -> RemoteSensingRun:
|
||||
queued_at = timezone.now().isoformat()
|
||||
if run_id is not None:
|
||||
run = RemoteSensingRun.objects.filter(pk=run_id, soil_location=location).first()
|
||||
if run is not None:
|
||||
metadata = dict(run.metadata or {})
|
||||
if task_id:
|
||||
metadata["task_id"] = task_id
|
||||
metadata.setdefault("status_label", "pending")
|
||||
metadata["stage"] = "queued"
|
||||
metadata["selected_features"] = selected_features
|
||||
metadata["requested_cluster_count"] = cluster_count
|
||||
metadata["pipeline"] = {
|
||||
"name": "remote_sensing_subdivision",
|
||||
"version": 2,
|
||||
}
|
||||
metadata["timestamps"] = {
|
||||
**dict(metadata.get("timestamps") or {}),
|
||||
"queued_at": queued_at,
|
||||
}
|
||||
run.block_subdivision = subdivision
|
||||
run.block_code = block_code
|
||||
run.chunk_size_sqm = int(getattr(settings, "SUBDIVISION_CHUNK_SQM", 900) or 900)
|
||||
run.temporal_start = temporal_start
|
||||
run.temporal_end = temporal_end
|
||||
run.metadata = metadata
|
||||
run.save(
|
||||
update_fields=[
|
||||
"block_subdivision",
|
||||
"block_code",
|
||||
"chunk_size_sqm",
|
||||
"temporal_start",
|
||||
"temporal_end",
|
||||
"metadata",
|
||||
"updated_at",
|
||||
]
|
||||
)
|
||||
return run
|
||||
metadata = {
|
||||
"status_label": "pending",
|
||||
"stage": "queued",
|
||||
"selected_features": selected_features,
|
||||
"requested_cluster_count": cluster_count,
|
||||
"pipeline": {
|
||||
"name": "remote_sensing_subdivision",
|
||||
"version": 2,
|
||||
},
|
||||
"timestamps": {"queued_at": queued_at},
|
||||
}
|
||||
if task_id:
|
||||
metadata["task_id"] = task_id
|
||||
return RemoteSensingRun.objects.create(
|
||||
soil_location=location,
|
||||
block_subdivision=subdivision,
|
||||
block_code=block_code,
|
||||
chunk_size_sqm=int(getattr(settings, "SUBDIVISION_CHUNK_SQM", 900) or 900),
|
||||
temporal_start=temporal_start,
|
||||
temporal_end=temporal_end,
|
||||
status=RemoteSensingRun.STATUS_PENDING,
|
||||
metadata=metadata,
|
||||
)
|
||||
|
||||
|
||||
def _mark_run_running(run: RemoteSensingRun) -> None:
|
||||
metadata = dict(run.metadata or {})
|
||||
metadata["status_label"] = "running"
|
||||
metadata["stage"] = "running"
|
||||
metadata["timestamps"] = {
|
||||
**dict(metadata.get("timestamps") or {}),
|
||||
"started_at": timezone.now().isoformat(),
|
||||
}
|
||||
run.status = RemoteSensingRun.STATUS_RUNNING
|
||||
run.started_at = timezone.now()
|
||||
run.metadata = metadata
|
||||
run.save(update_fields=["status", "started_at", "metadata", "updated_at"])
|
||||
|
||||
|
||||
def _mark_run_success(
|
||||
run: RemoteSensingRun,
|
||||
summary: dict[str, Any],
|
||||
service_metadata: dict[str, Any] | None = None,
|
||||
) -> None:
|
||||
metadata = dict(run.metadata or {})
|
||||
metadata["summary"] = summary
|
||||
metadata["status_label"] = "completed"
|
||||
metadata["stage"] = "completed"
|
||||
metadata["timestamps"] = {
|
||||
**dict(metadata.get("timestamps") or {}),
|
||||
"completed_at": timezone.now().isoformat(),
|
||||
}
|
||||
if service_metadata:
|
||||
metadata["service"] = service_metadata
|
||||
run.status = RemoteSensingRun.STATUS_SUCCESS
|
||||
run.finished_at = timezone.now()
|
||||
run.error_message = ""
|
||||
run.metadata = metadata
|
||||
run.save(
|
||||
update_fields=[
|
||||
"status",
|
||||
"finished_at",
|
||||
"error_message",
|
||||
"metadata",
|
||||
"updated_at",
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def _mark_run_failure(run: RemoteSensingRun, error_message: str) -> None:
|
||||
metadata = dict(run.metadata or {})
|
||||
metadata["status_label"] = "failed"
|
||||
metadata["failure_reason"] = error_message[:4000]
|
||||
metadata["timestamps"] = {
|
||||
**dict(metadata.get("timestamps") or {}),
|
||||
"failed_at": timezone.now().isoformat(),
|
||||
}
|
||||
run.status = RemoteSensingRun.STATUS_FAILURE
|
||||
run.finished_at = timezone.now()
|
||||
run.error_message = error_message[:4000]
|
||||
run.metadata = metadata
|
||||
run.save(
|
||||
update_fields=[
|
||||
"status",
|
||||
"finished_at",
|
||||
"error_message",
|
||||
"metadata",
|
||||
"updated_at",
|
||||
]
|
||||
)
|
||||
logger.exception(
|
||||
"Remote sensing analysis failed",
|
||||
extra={"run_id": run.id, "soil_location_id": run.soil_location_id, "block_code": run.block_code},
|
||||
)
|
||||
|
||||
|
||||
def _load_grid_cells(location: SoilLocation, block_code: str) -> list[AnalysisGridCell]:
|
||||
queryset = AnalysisGridCell.objects.filter(soil_location=location)
|
||||
queryset = queryset.filter(block_code=block_code or "")
|
||||
return list(queryset.order_by("cell_code"))
|
||||
|
||||
|
||||
def _load_observations(
|
||||
*,
|
||||
location: SoilLocation,
|
||||
block_code: str,
|
||||
temporal_start,
|
||||
temporal_end,
|
||||
) -> list[AnalysisGridObservation]:
|
||||
queryset = (
|
||||
AnalysisGridObservation.objects.select_related("cell", "run")
|
||||
.filter(
|
||||
cell__soil_location=location,
|
||||
cell__block_code=block_code or "",
|
||||
temporal_start=temporal_start,
|
||||
temporal_end=temporal_end,
|
||||
)
|
||||
.order_by("cell__cell_code")
|
||||
)
|
||||
return list(queryset)
|
||||
|
||||
|
||||
def _select_cells_for_processing(
|
||||
*,
|
||||
all_cells: list[AnalysisGridCell],
|
||||
temporal_start,
|
||||
temporal_end,
|
||||
force_refresh: bool,
|
||||
) -> list[AnalysisGridCell]:
|
||||
if force_refresh:
|
||||
return all_cells
|
||||
|
||||
existing_ids = set(
|
||||
AnalysisGridObservation.objects.filter(
|
||||
cell__in=all_cells,
|
||||
temporal_start=temporal_start,
|
||||
temporal_end=temporal_end,
|
||||
).values_list("cell_id", flat=True)
|
||||
)
|
||||
return [cell for cell in all_cells if cell.id not in existing_ids]
|
||||
|
||||
|
||||
def _upsert_grid_observations(
|
||||
*,
|
||||
cells: list[AnalysisGridCell],
|
||||
run: RemoteSensingRun,
|
||||
temporal_start,
|
||||
temporal_end,
|
||||
metric_payload: dict[str, Any],
|
||||
) -> dict[str, int]:
|
||||
metadata_template = {
|
||||
"backend_name": metric_payload["metadata"].get("backend"),
|
||||
"backend_url": metric_payload["metadata"].get("backend_url"),
|
||||
"collections_used": metric_payload["metadata"].get("collections_used", []),
|
||||
"slope_supported": metric_payload["metadata"].get("slope_supported", False),
|
||||
"job_refs": metric_payload["metadata"].get("job_refs", {}),
|
||||
"failed_metrics": metric_payload["metadata"].get("failed_metrics", []),
|
||||
"run_id": run.id,
|
||||
}
|
||||
result_by_cell = metric_payload.get("results", {})
|
||||
|
||||
created_count = 0
|
||||
updated_count = 0
|
||||
with transaction.atomic():
|
||||
for cell in cells:
|
||||
values = result_by_cell.get(cell.cell_code, {})
|
||||
defaults = {
|
||||
"run": run,
|
||||
"ndvi": values.get("ndvi"),
|
||||
"ndwi": values.get("ndwi"),
|
||||
"lst_c": values.get("lst_c"),
|
||||
"soil_vv": values.get("soil_vv"),
|
||||
"soil_vv_db": values.get("soil_vv_db"),
|
||||
"dem_m": values.get("dem_m"),
|
||||
"slope_deg": values.get("slope_deg"),
|
||||
"metadata": metadata_template,
|
||||
}
|
||||
observation, created = AnalysisGridObservation.objects.update_or_create(
|
||||
cell=cell,
|
||||
temporal_start=temporal_start,
|
||||
temporal_end=temporal_end,
|
||||
defaults=defaults,
|
||||
)
|
||||
if created:
|
||||
created_count += 1
|
||||
else:
|
||||
updated_count += 1
|
||||
return {"created_count": created_count, "updated_count": updated_count}
|
||||
|
||||
|
||||
def _ensure_subdivision_result(
|
||||
*,
|
||||
location: SoilLocation,
|
||||
run: RemoteSensingRun,
|
||||
subdivision: BlockSubdivision | None,
|
||||
block_code: str,
|
||||
observations: list[AnalysisGridObservation],
|
||||
cluster_count: int | None,
|
||||
selected_features: list[str] | None,
|
||||
) -> RemoteSensingSubdivisionResult:
|
||||
if not observations:
|
||||
raise DataDrivenSubdivisionError("هیچ observation برای ساخت subdivision دادهمحور پیدا نشد.")
|
||||
result = create_remote_sensing_subdivision_result(
|
||||
location=location,
|
||||
run=run,
|
||||
observations=observations,
|
||||
block_subdivision=subdivision,
|
||||
block_code=block_code,
|
||||
selected_features=selected_features or list(DEFAULT_CLUSTER_FEATURES),
|
||||
explicit_k=cluster_count,
|
||||
)
|
||||
return result
|
||||
|
||||
|
||||
def _record_run_stage(run: RemoteSensingRun, stage: str, details: dict[str, Any] | None = None) -> None:
|
||||
metadata = dict(run.metadata or {})
|
||||
metadata["stage"] = stage
|
||||
metadata["stage_details"] = {
|
||||
**dict(metadata.get("stage_details") or {}),
|
||||
stage: details or {},
|
||||
}
|
||||
metadata["timestamps"] = {
|
||||
**dict(metadata.get("timestamps") or {}),
|
||||
f"{stage}_at": timezone.now().isoformat(),
|
||||
}
|
||||
run.metadata = metadata
|
||||
run.save(update_fields=["metadata", "updated_at"])
|
||||
|
||||
|
||||
def _build_clustering_stage_metadata(
|
||||
result: RemoteSensingSubdivisionResult,
|
||||
) -> dict[str, Any]:
|
||||
metadata = dict(result.metadata or {})
|
||||
return {
|
||||
"subdivision_result_id": result.id,
|
||||
"cluster_count": result.cluster_count,
|
||||
"selected_features": result.selected_features,
|
||||
"used_cell_count": metadata.get("used_cell_count", 0),
|
||||
"skipped_cell_count": metadata.get("skipped_cell_count", 0),
|
||||
"skipped_cell_codes": result.skipped_cell_codes,
|
||||
"kmeans_params": metadata.get("kmeans_params", {}),
|
||||
}
|
||||
@@ -0,0 +1,44 @@
|
||||
from django.test import SimpleTestCase, override_settings
|
||||
|
||||
from location_data.block_subdivision import (
|
||||
build_block_subdivision_payload,
|
||||
detect_elbow_point,
|
||||
)
|
||||
|
||||
|
||||
@override_settings(SUBDIVISION_CHUNK_SQM=100)
|
||||
class BlockSubdivisionServiceTests(SimpleTestCase):
|
||||
def test_detect_elbow_point_from_sse_curve(self):
|
||||
inertia_curve = [
|
||||
{"k": 1, "sse": 1000.0},
|
||||
{"k": 2, "sse": 400.0},
|
||||
{"k": 3, "sse": 220.0},
|
||||
{"k": 4, "sse": 180.0},
|
||||
]
|
||||
|
||||
optimal_k = detect_elbow_point(inertia_curve)
|
||||
|
||||
self.assertEqual(optimal_k, 2)
|
||||
|
||||
def test_build_block_subdivision_payload_returns_grid_and_centroids(self):
|
||||
boundary = {
|
||||
"type": "Polygon",
|
||||
"coordinates": [
|
||||
[
|
||||
[51.3890, 35.6890],
|
||||
[51.3902, 35.6890],
|
||||
[51.3902, 35.6900],
|
||||
[51.3890, 35.6900],
|
||||
[51.3890, 35.6890],
|
||||
]
|
||||
],
|
||||
}
|
||||
|
||||
result = build_block_subdivision_payload(boundary, block_code="block-1")
|
||||
|
||||
self.assertEqual(result["block_code"], "block-1")
|
||||
self.assertEqual(result["chunk_size_sqm"], 100)
|
||||
self.assertGreater(result["grid_point_count"], 0)
|
||||
self.assertGreater(result["centroid_count"], 0)
|
||||
self.assertIn("optimal_k", result["metadata"])
|
||||
self.assertTrue(result["metadata"]["inertia_curve"])
|
||||
@@ -0,0 +1,114 @@
|
||||
from django.test import TestCase, override_settings
|
||||
|
||||
from location_data.grid_analysis import create_or_get_analysis_grid_cells
|
||||
from location_data.models import AnalysisGridCell, BlockSubdivision, SoilLocation
|
||||
|
||||
|
||||
@override_settings(SUBDIVISION_CHUNK_SQM=900)
|
||||
class AnalysisGridServiceTests(TestCase):
|
||||
def setUp(self):
|
||||
self.boundary = {
|
||||
"type": "Polygon",
|
||||
"coordinates": [
|
||||
[
|
||||
[51.389000, 35.689000],
|
||||
[51.389760, 35.689000],
|
||||
[51.389760, 35.689620],
|
||||
[51.389000, 35.689620],
|
||||
[51.389000, 35.689000],
|
||||
]
|
||||
],
|
||||
}
|
||||
self.location = SoilLocation.objects.create(
|
||||
latitude="35.689310",
|
||||
longitude="51.389380",
|
||||
farm_boundary=self.boundary,
|
||||
)
|
||||
self.location.set_input_block_count(1)
|
||||
self.location.save(update_fields=["input_block_count", "block_layout", "updated_at"])
|
||||
self.subdivision = BlockSubdivision.objects.create(
|
||||
soil_location=self.location,
|
||||
block_code="block-1",
|
||||
source_boundary=self.boundary,
|
||||
chunk_size_sqm=900,
|
||||
status="created",
|
||||
)
|
||||
|
||||
def test_create_analysis_grid_cells_persists_30x30_cells(self):
|
||||
result = create_or_get_analysis_grid_cells(
|
||||
self.location,
|
||||
block_code="block-1",
|
||||
block_subdivision=self.subdivision,
|
||||
)
|
||||
|
||||
self.assertTrue(result["created"])
|
||||
self.assertEqual(result["chunk_size_sqm"], 900)
|
||||
self.assertGreater(result["created_count"], 0)
|
||||
self.assertEqual(result["created_count"], result["total_count"])
|
||||
|
||||
cells = list(
|
||||
AnalysisGridCell.objects.filter(
|
||||
soil_location=self.location,
|
||||
block_code="block-1",
|
||||
chunk_size_sqm=900,
|
||||
).order_by("cell_code")
|
||||
)
|
||||
self.assertEqual(len(cells), result["total_count"])
|
||||
self.assertTrue(all(cell.block_subdivision_id == self.subdivision.id for cell in cells))
|
||||
self.assertTrue(all(cell.geometry.get("type") == "Polygon" for cell in cells))
|
||||
self.assertTrue(all(len(cell.geometry.get("coordinates", [[]])[0]) == 5 for cell in cells))
|
||||
|
||||
self.subdivision.refresh_from_db()
|
||||
self.location.refresh_from_db()
|
||||
self.assertEqual(
|
||||
self.subdivision.metadata["analysis_grid"]["chunk_size_sqm"],
|
||||
900,
|
||||
)
|
||||
self.assertEqual(
|
||||
self.subdivision.metadata["analysis_grid"]["cell_count"],
|
||||
result["total_count"],
|
||||
)
|
||||
self.assertEqual(
|
||||
self.location.block_layout["blocks"][0]["analysis_grid_summary"]["chunk_size_sqm"],
|
||||
900,
|
||||
)
|
||||
|
||||
def test_create_analysis_grid_cells_is_idempotent(self):
|
||||
first = create_or_get_analysis_grid_cells(
|
||||
self.location,
|
||||
block_code="block-1",
|
||||
block_subdivision=self.subdivision,
|
||||
)
|
||||
second = create_or_get_analysis_grid_cells(
|
||||
self.location,
|
||||
block_code="block-1",
|
||||
block_subdivision=self.subdivision,
|
||||
)
|
||||
|
||||
self.assertTrue(first["created"])
|
||||
self.assertFalse(second["created"])
|
||||
self.assertEqual(second["created_count"], 0)
|
||||
self.assertEqual(second["existing_count"], first["total_count"])
|
||||
self.assertEqual(
|
||||
AnalysisGridCell.objects.filter(
|
||||
soil_location=self.location,
|
||||
block_code="block-1",
|
||||
chunk_size_sqm=900,
|
||||
).count(),
|
||||
first["total_count"],
|
||||
)
|
||||
|
||||
def test_create_analysis_grid_cells_uses_location_boundary_without_subdivision(self):
|
||||
result = create_or_get_analysis_grid_cells(
|
||||
self.location,
|
||||
block_code="",
|
||||
)
|
||||
|
||||
self.assertGreater(result["total_count"], 0)
|
||||
self.assertTrue(
|
||||
AnalysisGridCell.objects.filter(
|
||||
soil_location=self.location,
|
||||
block_code="",
|
||||
chunk_size_sqm=900,
|
||||
).exists()
|
||||
)
|
||||
@@ -0,0 +1,66 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from types import SimpleNamespace
|
||||
from unittest.mock import patch
|
||||
|
||||
from django.test import TestCase, override_settings
|
||||
from rest_framework.test import APIClient
|
||||
|
||||
|
||||
@override_settings(ROOT_URLCONF="location_data.urls")
|
||||
class NdviHealthApiTests(TestCase):
|
||||
def setUp(self):
|
||||
self.client = APIClient()
|
||||
|
||||
@patch("location_data.views.apps.get_app_config")
|
||||
def test_ndvi_health_api_returns_payload(self, mock_get_app_config):
|
||||
mock_service = SimpleNamespace(
|
||||
get_ndvi_health=lambda **_kwargs: {
|
||||
"ndviIndex": 0.68,
|
||||
"mean_ndvi": 0.68,
|
||||
"ndvi_map": {"grid": [[0.61, 0.7]]},
|
||||
"vegetation_health_class": "Healthy vegetation",
|
||||
"observation_date": "2026-04-02",
|
||||
"satellite_source": "sentinel-2",
|
||||
"healthData": [
|
||||
{
|
||||
"title": "سلامت پوشش گیاهی",
|
||||
"value": "Healthy vegetation",
|
||||
"color": "success",
|
||||
"icon": "tabler-plant",
|
||||
}
|
||||
],
|
||||
}
|
||||
)
|
||||
mock_get_app_config.return_value = SimpleNamespace(
|
||||
get_ndvi_health_service=lambda: mock_service
|
||||
)
|
||||
|
||||
response = self.client.post(
|
||||
"/ndvi-health/",
|
||||
data={"farm_uuid": "550e8400-e29b-41d4-a716-446655440000"},
|
||||
format="json",
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
payload = response.json()["data"]
|
||||
self.assertEqual(payload["mean_ndvi"], 0.68)
|
||||
self.assertEqual(payload["vegetation_health_class"], "Healthy vegetation")
|
||||
|
||||
@patch("location_data.views.apps.get_app_config")
|
||||
def test_ndvi_health_api_returns_404_for_missing_farm(self, mock_get_app_config):
|
||||
mock_service = SimpleNamespace(
|
||||
get_ndvi_health=lambda **_kwargs: (_ for _ in ()).throw(ValueError("Farm not found."))
|
||||
)
|
||||
mock_get_app_config.return_value = SimpleNamespace(
|
||||
get_ndvi_health_service=lambda: mock_service
|
||||
)
|
||||
|
||||
response = self.client.post(
|
||||
"/ndvi-health/",
|
||||
data={"farm_uuid": "550e8400-e29b-41d4-a716-446655440000"},
|
||||
format="json",
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 404)
|
||||
self.assertEqual(response.json()["msg"], "Farm not found.")
|
||||
@@ -0,0 +1,66 @@
|
||||
from decimal import Decimal
|
||||
|
||||
from django.test import SimpleTestCase
|
||||
|
||||
from location_data.openeo_service import (
|
||||
build_empty_metric_payload,
|
||||
linear_to_db,
|
||||
merge_metric_results,
|
||||
parse_aggregate_spatial_response,
|
||||
)
|
||||
|
||||
|
||||
class OpenEOServiceParsingTests(SimpleTestCase):
|
||||
def test_parse_feature_collection_results(self):
|
||||
payload = {
|
||||
"type": "FeatureCollection",
|
||||
"features": [
|
||||
{
|
||||
"type": "Feature",
|
||||
"id": "cell-1",
|
||||
"properties": {"mean": 0.61},
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"id": "cell-2",
|
||||
"properties": {"mean": 0.47},
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
result = parse_aggregate_spatial_response(payload, "ndvi")
|
||||
|
||||
self.assertEqual(result["cell-1"]["ndvi"], 0.61)
|
||||
self.assertEqual(result["cell-2"]["ndvi"], 0.47)
|
||||
|
||||
def test_parse_mapping_results(self):
|
||||
payload = {
|
||||
"cell-1": {"mean": 12.4},
|
||||
"cell-2": {"mean": 15.1},
|
||||
}
|
||||
|
||||
result = parse_aggregate_spatial_response(payload, "lst_c")
|
||||
|
||||
self.assertEqual(result["cell-1"]["lst_c"], 12.4)
|
||||
self.assertEqual(result["cell-2"]["lst_c"], 15.1)
|
||||
|
||||
def test_linear_to_db(self):
|
||||
self.assertEqual(linear_to_db(10.0), 10.0)
|
||||
self.assertEqual(linear_to_db(Decimal("1.0")), 0.0)
|
||||
self.assertIsNone(linear_to_db(0))
|
||||
self.assertIsNone(linear_to_db(-1))
|
||||
|
||||
def test_merge_metric_results(self):
|
||||
target = {"cell-1": build_empty_metric_payload()}
|
||||
|
||||
merge_metric_results(
|
||||
target,
|
||||
{
|
||||
"cell-1": {"ndvi": 0.5},
|
||||
"cell-2": {"ndwi": 0.2},
|
||||
},
|
||||
)
|
||||
|
||||
self.assertEqual(target["cell-1"]["ndvi"], 0.5)
|
||||
self.assertEqual(target["cell-2"]["ndwi"], 0.2)
|
||||
self.assertIn("soil_vv_db", target["cell-2"])
|
||||
@@ -0,0 +1,265 @@
|
||||
from datetime import date
|
||||
from types import SimpleNamespace
|
||||
from unittest.mock import patch
|
||||
|
||||
from django.test import TestCase, override_settings
|
||||
from rest_framework.test import APIClient
|
||||
|
||||
from location_data.models import (
|
||||
AnalysisGridCell,
|
||||
AnalysisGridObservation,
|
||||
BlockSubdivision,
|
||||
RemoteSensingClusterAssignment,
|
||||
RemoteSensingRun,
|
||||
RemoteSensingSubdivisionResult,
|
||||
SoilLocation,
|
||||
)
|
||||
|
||||
|
||||
@override_settings(ROOT_URLCONF="location_data.urls")
|
||||
class RemoteSensingApiTests(TestCase):
|
||||
def setUp(self):
|
||||
self.client = APIClient()
|
||||
self.boundary = {
|
||||
"type": "Polygon",
|
||||
"coordinates": [
|
||||
[
|
||||
[51.3890, 35.6890],
|
||||
[51.3900, 35.6890],
|
||||
[51.3900, 35.6900],
|
||||
[51.3890, 35.6900],
|
||||
[51.3890, 35.6890],
|
||||
]
|
||||
],
|
||||
}
|
||||
self.location = SoilLocation.objects.create(
|
||||
latitude="35.689200",
|
||||
longitude="51.389000",
|
||||
farm_boundary=self.boundary,
|
||||
)
|
||||
self.location.set_input_block_count(1)
|
||||
self.location.save(update_fields=["input_block_count", "block_layout", "updated_at"])
|
||||
self.subdivision = BlockSubdivision.objects.create(
|
||||
soil_location=self.location,
|
||||
block_code="block-1",
|
||||
source_boundary=self.boundary,
|
||||
chunk_size_sqm=900,
|
||||
status="created",
|
||||
)
|
||||
|
||||
def test_post_remote_sensing_returns_404_when_location_missing(self):
|
||||
response = self.client.post(
|
||||
"/remote-sensing/",
|
||||
data={
|
||||
"lat": 35.7000,
|
||||
"lon": 51.4000,
|
||||
"start_date": "2025-01-01",
|
||||
"end_date": "2025-01-31",
|
||||
},
|
||||
format="json",
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 404)
|
||||
self.assertEqual(response.json()["msg"], "location پیدا نشد.")
|
||||
|
||||
@patch("location_data.views.run_remote_sensing_analysis_task.delay")
|
||||
def test_post_remote_sensing_enqueues_task_and_returns_processing(self, mock_delay):
|
||||
mock_delay.return_value = SimpleNamespace(id="task-123")
|
||||
|
||||
response = self.client.post(
|
||||
"/remote-sensing/",
|
||||
data={
|
||||
"lat": 35.6892,
|
||||
"lon": 51.3890,
|
||||
"block_code": "block-1",
|
||||
"start_date": "2025-01-01",
|
||||
"end_date": "2025-01-31",
|
||||
"force_refresh": False,
|
||||
},
|
||||
format="json",
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 202)
|
||||
payload = response.json()["data"]
|
||||
self.assertEqual(payload["status"], "processing")
|
||||
self.assertEqual(payload["source"], "processing")
|
||||
self.assertEqual(payload["task_id"], "task-123")
|
||||
self.assertEqual(payload["block_code"], "block-1")
|
||||
self.assertEqual(payload["summary"]["cell_count"], 0)
|
||||
run = RemoteSensingRun.objects.get(id=payload["run"]["id"])
|
||||
self.assertEqual(run.block_code, "block-1")
|
||||
self.assertEqual(run.status, RemoteSensingRun.STATUS_PENDING)
|
||||
self.assertEqual(run.metadata["stage"], "queued")
|
||||
self.assertEqual(run.metadata["selected_features"], [])
|
||||
mock_delay.assert_called_once()
|
||||
|
||||
def test_get_remote_sensing_returns_processing_when_run_exists_without_results(self):
|
||||
RemoteSensingRun.objects.create(
|
||||
soil_location=self.location,
|
||||
block_subdivision=self.subdivision,
|
||||
block_code="block-1",
|
||||
chunk_size_sqm=900,
|
||||
temporal_start=date(2025, 1, 1),
|
||||
temporal_end=date(2025, 1, 31),
|
||||
status=RemoteSensingRun.STATUS_RUNNING,
|
||||
metadata={"task_id": "task-123"},
|
||||
)
|
||||
|
||||
response = self.client.get(
|
||||
"/remote-sensing/",
|
||||
data={
|
||||
"lat": 35.6892,
|
||||
"lon": 51.3890,
|
||||
"block_code": "block-1",
|
||||
"start_date": "2025-01-01",
|
||||
"end_date": "2025-01-31",
|
||||
},
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
payload = response.json()["data"]
|
||||
self.assertEqual(payload["status"], "processing")
|
||||
self.assertEqual(payload["source"], "processing")
|
||||
self.assertEqual(payload["cells"], [])
|
||||
self.assertEqual(payload["run"]["status"], RemoteSensingRun.STATUS_RUNNING)
|
||||
|
||||
def test_get_remote_sensing_returns_cached_results(self):
|
||||
run = RemoteSensingRun.objects.create(
|
||||
soil_location=self.location,
|
||||
block_subdivision=self.subdivision,
|
||||
block_code="block-1",
|
||||
chunk_size_sqm=900,
|
||||
temporal_start=date(2025, 1, 1),
|
||||
temporal_end=date(2025, 1, 31),
|
||||
status=RemoteSensingRun.STATUS_SUCCESS,
|
||||
)
|
||||
cell = AnalysisGridCell.objects.create(
|
||||
soil_location=self.location,
|
||||
block_subdivision=self.subdivision,
|
||||
block_code="block-1",
|
||||
cell_code="cell-1",
|
||||
chunk_size_sqm=900,
|
||||
geometry=self.boundary,
|
||||
centroid_lat="35.689500",
|
||||
centroid_lon="51.389500",
|
||||
)
|
||||
AnalysisGridObservation.objects.create(
|
||||
cell=cell,
|
||||
run=run,
|
||||
temporal_start=date(2025, 1, 1),
|
||||
temporal_end=date(2025, 1, 31),
|
||||
ndvi=0.61,
|
||||
ndwi=0.22,
|
||||
lst_c=24.5,
|
||||
soil_vv=0.13,
|
||||
soil_vv_db=-8.860566,
|
||||
dem_m=1550.0,
|
||||
slope_deg=4.2,
|
||||
metadata={"backend_name": "openeo"},
|
||||
)
|
||||
|
||||
response = self.client.get(
|
||||
"/remote-sensing/",
|
||||
data={
|
||||
"lat": 35.6892,
|
||||
"lon": 51.3890,
|
||||
"block_code": "block-1",
|
||||
"start_date": "2025-01-01",
|
||||
"end_date": "2025-01-31",
|
||||
},
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
payload = response.json()["data"]
|
||||
self.assertEqual(payload["status"], "success")
|
||||
self.assertEqual(payload["source"], "database")
|
||||
self.assertEqual(payload["summary"]["cell_count"], 1)
|
||||
self.assertEqual(payload["summary"]["ndvi_mean"], 0.61)
|
||||
self.assertEqual(payload["summary"]["soil_vv_db_mean"], -8.860566)
|
||||
self.assertEqual(len(payload["cells"]), 1)
|
||||
self.assertEqual(payload["cells"][0]["cell_code"], "cell-1")
|
||||
|
||||
def test_run_status_endpoint_returns_normalized_status(self):
|
||||
run = RemoteSensingRun.objects.create(
|
||||
soil_location=self.location,
|
||||
block_subdivision=self.subdivision,
|
||||
block_code="block-1",
|
||||
chunk_size_sqm=900,
|
||||
temporal_start=date(2025, 1, 1),
|
||||
temporal_end=date(2025, 1, 31),
|
||||
status=RemoteSensingRun.STATUS_SUCCESS,
|
||||
metadata={"stage": "completed", "selected_features": ["ndvi"]},
|
||||
)
|
||||
|
||||
response = self.client.get(f"/remote-sensing/runs/{run.id}/status/")
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
payload = response.json()["data"]
|
||||
self.assertEqual(payload["status"], "completed")
|
||||
self.assertEqual(payload["run"]["pipeline_status"], "completed")
|
||||
self.assertEqual(payload["run"]["stage"], "completed")
|
||||
self.assertEqual(payload["run"]["selected_features"], ["ndvi"])
|
||||
|
||||
def test_run_result_endpoint_returns_paginated_assignments(self):
|
||||
run = RemoteSensingRun.objects.create(
|
||||
soil_location=self.location,
|
||||
block_subdivision=self.subdivision,
|
||||
block_code="block-1",
|
||||
chunk_size_sqm=900,
|
||||
temporal_start=date(2025, 1, 1),
|
||||
temporal_end=date(2025, 1, 31),
|
||||
status=RemoteSensingRun.STATUS_SUCCESS,
|
||||
metadata={"stage": "completed"},
|
||||
)
|
||||
cell = AnalysisGridCell.objects.create(
|
||||
soil_location=self.location,
|
||||
block_subdivision=self.subdivision,
|
||||
block_code="block-1",
|
||||
cell_code="cell-1",
|
||||
chunk_size_sqm=900,
|
||||
geometry=self.boundary,
|
||||
centroid_lat="35.689500",
|
||||
centroid_lon="51.389500",
|
||||
)
|
||||
AnalysisGridObservation.objects.create(
|
||||
cell=cell,
|
||||
run=run,
|
||||
temporal_start=date(2025, 1, 1),
|
||||
temporal_end=date(2025, 1, 31),
|
||||
ndvi=0.61,
|
||||
ndwi=0.22,
|
||||
lst_c=24.5,
|
||||
soil_vv=0.13,
|
||||
soil_vv_db=-8.860566,
|
||||
dem_m=1550.0,
|
||||
slope_deg=4.2,
|
||||
metadata={"backend_name": "openeo"},
|
||||
)
|
||||
result = RemoteSensingSubdivisionResult.objects.create(
|
||||
soil_location=self.location,
|
||||
run=run,
|
||||
block_subdivision=self.subdivision,
|
||||
block_code="block-1",
|
||||
chunk_size_sqm=900,
|
||||
temporal_start=date(2025, 1, 1),
|
||||
temporal_end=date(2025, 1, 31),
|
||||
cluster_count=1,
|
||||
selected_features=["ndvi"],
|
||||
metadata={"used_cell_count": 1, "skipped_cell_count": 0},
|
||||
)
|
||||
RemoteSensingClusterAssignment.objects.create(
|
||||
result=result,
|
||||
cell=cell,
|
||||
cluster_label=0,
|
||||
raw_feature_values={"ndvi": 0.61},
|
||||
scaled_feature_values={"ndvi": 0.0},
|
||||
)
|
||||
|
||||
response = self.client.get(f"/remote-sensing/runs/{run.id}/result/", data={"page": 1, "page_size": 10})
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
payload = response.json()["data"]
|
||||
self.assertEqual(payload["status"], "completed")
|
||||
self.assertEqual(payload["subdivision_result"]["cluster_count"], 1)
|
||||
self.assertEqual(len(payload["subdivision_result"]["assignments"]), 1)
|
||||
self.assertEqual(payload["pagination"]["assignments"]["total_items"], 1)
|
||||
@@ -0,0 +1,126 @@
|
||||
from django.test import TestCase, override_settings
|
||||
from rest_framework.test import APIClient
|
||||
|
||||
from location_data.models import BlockSubdivision, SoilLocation
|
||||
|
||||
|
||||
@override_settings(ROOT_URLCONF="location_data.urls")
|
||||
class SoilDataApiTests(TestCase):
|
||||
def setUp(self):
|
||||
self.client = APIClient()
|
||||
self.boundary = {
|
||||
"type": "Polygon",
|
||||
"coordinates": [
|
||||
[
|
||||
[51.3890, 35.6890],
|
||||
[51.3902, 35.6890],
|
||||
[51.3902, 35.6900],
|
||||
[51.3890, 35.6900],
|
||||
[51.3890, 35.6890],
|
||||
]
|
||||
],
|
||||
}
|
||||
self.block_boundary = {
|
||||
"type": "Polygon",
|
||||
"coordinates": [
|
||||
[
|
||||
[51.3890, 35.6890],
|
||||
[51.3896, 35.6890],
|
||||
[51.3896, 35.6900],
|
||||
[51.3890, 35.6900],
|
||||
[51.3890, 35.6890],
|
||||
]
|
||||
],
|
||||
}
|
||||
|
||||
def test_post_creates_default_single_block_layout(self):
|
||||
response = self.client.post(
|
||||
"/",
|
||||
data={
|
||||
"lat": 35.6892,
|
||||
"lon": 51.3890,
|
||||
"farm_boundary": self.boundary,
|
||||
"blocks": [
|
||||
{
|
||||
"block_code": "block-1",
|
||||
"boundary": self.block_boundary,
|
||||
}
|
||||
],
|
||||
},
|
||||
format="json",
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
payload = response.json()["data"]
|
||||
self.assertEqual(payload["source"], "created")
|
||||
self.assertEqual(payload["input_block_count"], 1)
|
||||
self.assertEqual(len(payload["block_layout"]["blocks"]), 1)
|
||||
self.assertEqual(payload["block_layout"]["blocks"][0]["boundary"], self.block_boundary)
|
||||
self.assertEqual(payload["block_layout"]["algorithm_status"], "pending")
|
||||
self.assertEqual(len(payload["block_subdivisions"]), 1)
|
||||
self.assertEqual(payload["block_subdivisions"][0]["status"], "defined")
|
||||
self.assertEqual(payload["satellite_snapshots"][0]["status"], "missing")
|
||||
|
||||
def test_post_updates_block_layout_from_input(self):
|
||||
SoilLocation.objects.create(
|
||||
latitude="35.689200",
|
||||
longitude="51.389000",
|
||||
)
|
||||
|
||||
response = self.client.post(
|
||||
"/",
|
||||
data={
|
||||
"lat": 35.6892,
|
||||
"lon": 51.3890,
|
||||
"farm_boundary": self.boundary,
|
||||
"blocks": [
|
||||
{"block_code": "block-a", "boundary": self.block_boundary},
|
||||
{"block_code": "block-b", "boundary": self.block_boundary},
|
||||
],
|
||||
},
|
||||
format="json",
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
payload = response.json()["data"]
|
||||
self.assertEqual(payload["input_block_count"], 2)
|
||||
self.assertEqual(len(payload["block_layout"]["blocks"]), 2)
|
||||
self.assertEqual(len(payload["block_subdivisions"]), 2)
|
||||
|
||||
location = SoilLocation.objects.get(latitude="35.689200", longitude="51.389000")
|
||||
self.assertEqual(location.input_block_count, 2)
|
||||
self.assertEqual(len(location.block_layout["blocks"]), 2)
|
||||
self.assertEqual(location.block_layout["algorithm_status"], "pending")
|
||||
self.assertTrue(
|
||||
BlockSubdivision.objects.filter(
|
||||
soil_location=location,
|
||||
block_code="block-a",
|
||||
status="defined",
|
||||
).exists()
|
||||
)
|
||||
|
||||
def test_get_returns_stored_subdivisions_without_processing(self):
|
||||
self.client.post(
|
||||
"/",
|
||||
data={
|
||||
"lat": 35.6892,
|
||||
"lon": 51.3890,
|
||||
"farm_boundary": self.boundary,
|
||||
"blocks": [
|
||||
{
|
||||
"block_code": "block-1",
|
||||
"boundary": self.block_boundary,
|
||||
}
|
||||
],
|
||||
},
|
||||
format="json",
|
||||
)
|
||||
|
||||
response = self.client.get(
|
||||
"/",
|
||||
data={"lat": 35.6892, "lon": 51.3890},
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(response.json()["data"]["source"], "database")
|
||||
self.assertEqual(len(response.json()["data"]["block_subdivisions"]), 1)
|
||||
@@ -0,0 +1,17 @@
|
||||
from django.urls import path
|
||||
|
||||
from .views import (
|
||||
NdviHealthView,
|
||||
RemoteSensingAnalysisView,
|
||||
RemoteSensingRunResultView,
|
||||
RemoteSensingRunStatusView,
|
||||
SoilDataView,
|
||||
)
|
||||
|
||||
urlpatterns = [
|
||||
path("", SoilDataView.as_view(), name="soil-data"),
|
||||
path("remote-sensing/", RemoteSensingAnalysisView.as_view(), name="remote-sensing"),
|
||||
path("remote-sensing/runs/<int:run_id>/status/", RemoteSensingRunStatusView.as_view(), name="remote-sensing-run-status"),
|
||||
path("remote-sensing/runs/<int:run_id>/result/", RemoteSensingRunResultView.as_view(), name="remote-sensing-run-result"),
|
||||
path("ndvi-health/", NdviHealthView.as_view(), name="ndvi-health"),
|
||||
]
|
||||
@@ -0,0 +1,938 @@
|
||||
from django.apps import apps
|
||||
from django.core.paginator import EmptyPage, Paginator
|
||||
from django.db.models import Avg
|
||||
from django.db import transaction
|
||||
from rest_framework import status
|
||||
from drf_spectacular.utils import (
|
||||
OpenApiExample,
|
||||
OpenApiResponse,
|
||||
extend_schema,
|
||||
inline_serializer,
|
||||
)
|
||||
from rest_framework import serializers as drf_serializers
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from config.openapi import (
|
||||
build_envelope_serializer,
|
||||
build_response,
|
||||
)
|
||||
from .models import (
|
||||
AnalysisGridObservation,
|
||||
BlockSubdivision,
|
||||
RemoteSensingRun,
|
||||
RemoteSensingSubdivisionResult,
|
||||
SoilLocation,
|
||||
)
|
||||
from .serializers import (
|
||||
BlockSubdivisionSerializer,
|
||||
NdviHealthRequestSerializer,
|
||||
NdviHealthResponseSerializer,
|
||||
RemoteSensingCellObservationSerializer,
|
||||
RemoteSensingResponseSerializer,
|
||||
RemoteSensingResultQuerySerializer,
|
||||
RemoteSensingRunResultResponseSerializer,
|
||||
RemoteSensingRunSerializer,
|
||||
RemoteSensingRunStatusResponseSerializer,
|
||||
RemoteSensingSummarySerializer,
|
||||
RemoteSensingSubdivisionResultSerializer,
|
||||
RemoteSensingTriggerSerializer,
|
||||
SoilDataRequestSerializer,
|
||||
SoilLocationResponseSerializer,
|
||||
)
|
||||
from .tasks import run_remote_sensing_analysis_task
|
||||
|
||||
MAX_REMOTE_SENSING_PAGE_SIZE = 200
|
||||
|
||||
SoilLocationPayloadSerializer = inline_serializer(
|
||||
name="SoilLocationPayloadSerializer",
|
||||
fields={
|
||||
"source": drf_serializers.CharField(),
|
||||
"id": drf_serializers.IntegerField(),
|
||||
"lon": drf_serializers.DecimalField(max_digits=9, decimal_places=6),
|
||||
"lat": drf_serializers.DecimalField(max_digits=9, decimal_places=6),
|
||||
"input_block_count": drf_serializers.IntegerField(),
|
||||
"farm_boundary": drf_serializers.JSONField(),
|
||||
"block_layout": drf_serializers.JSONField(),
|
||||
"block_subdivisions": BlockSubdivisionSerializer(many=True),
|
||||
"satellite_snapshots": drf_serializers.JSONField(),
|
||||
},
|
||||
)
|
||||
SoilDataResponseSerializer = build_envelope_serializer(
|
||||
"SoilDataResponseSerializer",
|
||||
SoilLocationPayloadSerializer,
|
||||
)
|
||||
SoilErrorResponseSerializer = build_envelope_serializer(
|
||||
"SoilErrorResponseSerializer",
|
||||
data_required=False,
|
||||
allow_null=True,
|
||||
)
|
||||
NdviHealthEnvelopeSerializer = build_envelope_serializer(
|
||||
"NdviHealthEnvelopeSerializer",
|
||||
NdviHealthResponseSerializer,
|
||||
)
|
||||
RemoteSensingEnvelopeSerializer = build_envelope_serializer(
|
||||
"RemoteSensingEnvelopeSerializer",
|
||||
RemoteSensingResponseSerializer,
|
||||
)
|
||||
RemoteSensingQueuedEnvelopeSerializer = build_envelope_serializer(
|
||||
"RemoteSensingQueuedEnvelopeSerializer",
|
||||
inline_serializer(
|
||||
name="RemoteSensingQueuedPayloadSerializer",
|
||||
fields={
|
||||
"status": drf_serializers.CharField(),
|
||||
"source": drf_serializers.CharField(),
|
||||
"location": drf_serializers.JSONField(),
|
||||
"block_code": drf_serializers.CharField(),
|
||||
"chunk_size_sqm": drf_serializers.IntegerField(allow_null=True),
|
||||
"temporal_extent": drf_serializers.JSONField(),
|
||||
"summary": RemoteSensingSummarySerializer(),
|
||||
"cells": drf_serializers.JSONField(),
|
||||
"run": drf_serializers.JSONField(allow_null=True),
|
||||
"task_id": drf_serializers.CharField(),
|
||||
},
|
||||
),
|
||||
)
|
||||
RemoteSensingRunStatusEnvelopeSerializer = build_envelope_serializer(
|
||||
"RemoteSensingRunStatusEnvelopeSerializer",
|
||||
RemoteSensingRunStatusResponseSerializer,
|
||||
)
|
||||
RemoteSensingRunResultEnvelopeSerializer = build_envelope_serializer(
|
||||
"RemoteSensingRunResultEnvelopeSerializer",
|
||||
RemoteSensingRunResultResponseSerializer,
|
||||
)
|
||||
|
||||
|
||||
class SoilDataView(APIView):
|
||||
"""
|
||||
ثبت مختصات گوشههای مزرعه و بلوکهای تعریفشده توسط کشاورز.
|
||||
"""
|
||||
|
||||
@extend_schema(
|
||||
tags=["Soil Data"],
|
||||
summary="خواندن ساختار مزرعه و بلوکها (GET)",
|
||||
description="با ارسال lat و lon، ساختار ذخیرهشده مزرعه، بلوکها و آخرین خلاصه سنجشازدور هر بلوک بازگردانده میشود.",
|
||||
parameters=[
|
||||
{
|
||||
"name": "lat",
|
||||
"in": "query",
|
||||
"required": True,
|
||||
"schema": {"type": "number"},
|
||||
"description": "عرض جغرافیایی",
|
||||
},
|
||||
{
|
||||
"name": "lon",
|
||||
"in": "query",
|
||||
"required": True,
|
||||
"schema": {"type": "number"},
|
||||
"description": "طول جغرافیایی",
|
||||
},
|
||||
{
|
||||
"name": "block_code",
|
||||
"in": "query",
|
||||
"required": False,
|
||||
"schema": {"type": "string", "default": "block-1"},
|
||||
"description": "در GET فقط برای فیلتر کلاینتی است و الگوریتمی اجرا نمیکند.",
|
||||
},
|
||||
],
|
||||
responses={
|
||||
200: build_response(
|
||||
SoilDataResponseSerializer,
|
||||
"ساختار بلوکهای زمین از دیتابیس بازگردانده شد.",
|
||||
),
|
||||
404: build_response(
|
||||
SoilErrorResponseSerializer,
|
||||
"location موردنظر پیدا نشد.",
|
||||
),
|
||||
400: build_response(
|
||||
SoilErrorResponseSerializer,
|
||||
"پارامترهای ورودی نامعتبر هستند.",
|
||||
),
|
||||
},
|
||||
)
|
||||
def get(self, request):
|
||||
serializer = SoilDataRequestSerializer(data=request.query_params)
|
||||
if not serializer.is_valid():
|
||||
return Response(
|
||||
{"code": 400, "msg": "داده نامعتبر.", "data": serializer.errors},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
lat = serializer.validated_data["lat"]
|
||||
lon = serializer.validated_data["lon"]
|
||||
location = _get_location_by_lat_lon(lat, lon, prefetch=True)
|
||||
if location is None:
|
||||
return Response(
|
||||
{"code": 404, "msg": "location پیدا نشد.", "data": None},
|
||||
status=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
|
||||
data_serializer = SoilLocationResponseSerializer(location)
|
||||
return Response(
|
||||
{"code": 200, "msg": "success", "data": {"source": "database", **data_serializer.data}},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
@extend_schema(
|
||||
tags=["Soil Data"],
|
||||
summary="ثبت مزرعه و بلوکهای کشاورز (POST)",
|
||||
description="مختصات گوشههای مزرعه و boundary هر بلوک کشاورز ذخیره میشود. هیچ subdivision سنکرونی اجرا نمیشود.",
|
||||
request=SoilDataRequestSerializer,
|
||||
responses={
|
||||
200: build_response(
|
||||
SoilDataResponseSerializer,
|
||||
"اطلاعات location ذخیره یا بهروزرسانی شد.",
|
||||
),
|
||||
400: build_response(
|
||||
SoilErrorResponseSerializer,
|
||||
"پارامترهای ورودی نامعتبر هستند.",
|
||||
),
|
||||
},
|
||||
examples=[
|
||||
OpenApiExample(
|
||||
"نمونه درخواست",
|
||||
value={
|
||||
"lat": 35.6892,
|
||||
"lon": 51.3890,
|
||||
"farm_boundary": {
|
||||
"type": "Polygon",
|
||||
"coordinates": [
|
||||
[
|
||||
[51.3890, 35.6890],
|
||||
[51.3902, 35.6890],
|
||||
[51.3902, 35.6900],
|
||||
[51.3890, 35.6900],
|
||||
[51.3890, 35.6890],
|
||||
]
|
||||
],
|
||||
},
|
||||
"blocks": [
|
||||
{
|
||||
"block_code": "block-1",
|
||||
"boundary": {
|
||||
"type": "Polygon",
|
||||
"coordinates": [
|
||||
[
|
||||
[51.3890, 35.6890],
|
||||
[51.3896, 35.6890],
|
||||
[51.3896, 35.6900],
|
||||
[51.3890, 35.6900],
|
||||
[51.3890, 35.6890],
|
||||
]
|
||||
],
|
||||
},
|
||||
}
|
||||
],
|
||||
},
|
||||
request_only=True,
|
||||
),
|
||||
],
|
||||
)
|
||||
def post(self, request):
|
||||
serializer = SoilDataRequestSerializer(
|
||||
data=request.data,
|
||||
context={"require_farm_boundary": True},
|
||||
)
|
||||
if not serializer.is_valid():
|
||||
return Response(
|
||||
{"code": 400, "msg": "داده نامعتبر.", "data": serializer.errors},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
lat = serializer.validated_data["lat"]
|
||||
lon = serializer.validated_data["lon"]
|
||||
block_count = serializer.validated_data.get("block_count", 1)
|
||||
farm_boundary = serializer.validated_data.get("farm_boundary")
|
||||
blocks = serializer.validated_data.get("blocks") or []
|
||||
lat_rounded = round(lat, 6)
|
||||
lon_rounded = round(lon, 6)
|
||||
|
||||
location, created = SoilLocation.objects.get_or_create(
|
||||
latitude=lat_rounded,
|
||||
longitude=lon_rounded,
|
||||
defaults={
|
||||
"input_block_count": block_count,
|
||||
"farm_boundary": farm_boundary or {},
|
||||
},
|
||||
)
|
||||
if created:
|
||||
location.set_input_block_count(block_count, blocks=blocks or None)
|
||||
if farm_boundary is not None:
|
||||
location.farm_boundary = farm_boundary
|
||||
location.save(update_fields=["input_block_count", "farm_boundary", "block_layout", "updated_at"])
|
||||
else:
|
||||
changed_fields = []
|
||||
if block_count != location.input_block_count or blocks:
|
||||
location.set_input_block_count(block_count, blocks=blocks or None)
|
||||
changed_fields.extend(["input_block_count", "block_layout"])
|
||||
if farm_boundary is not None and location.farm_boundary != farm_boundary:
|
||||
location.farm_boundary = farm_boundary
|
||||
changed_fields.append("farm_boundary")
|
||||
if changed_fields:
|
||||
changed_fields.append("updated_at")
|
||||
location.save(update_fields=changed_fields)
|
||||
|
||||
if not (farm_boundary or location.farm_boundary):
|
||||
return Response(
|
||||
{
|
||||
"code": 400,
|
||||
"msg": "داده نامعتبر.",
|
||||
"data": {"farm_boundary": ["برای ثبت location باید گوشههای کل زمین ارسال یا قبلاً ذخیره شده باشد."]},
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
_sync_defined_blocks(location, blocks)
|
||||
|
||||
location = _get_location_by_lat_lon(lat, lon, prefetch=True)
|
||||
data_serializer = SoilLocationResponseSerializer(location)
|
||||
return Response(
|
||||
{
|
||||
"code": 200,
|
||||
"msg": "success",
|
||||
"data": {
|
||||
"source": "created" if created else "database",
|
||||
**data_serializer.data,
|
||||
},
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
|
||||
class NdviHealthView(APIView):
|
||||
@extend_schema(
|
||||
tags=["Soil Data"],
|
||||
summary="دریافت NDVI سلامت مزرعه",
|
||||
description="با دریافت farm_uuid، داده NDVI سلامت پوشش گیاهی مزرعه را به صورت مستقل از dashboard برمی گرداند.",
|
||||
request=NdviHealthRequestSerializer,
|
||||
responses={
|
||||
200: build_response(
|
||||
NdviHealthEnvelopeSerializer,
|
||||
"داده NDVI مزرعه با موفقیت بازگردانده شد.",
|
||||
),
|
||||
400: build_response(
|
||||
SoilErrorResponseSerializer,
|
||||
"داده ورودی نامعتبر است.",
|
||||
),
|
||||
404: build_response(
|
||||
SoilErrorResponseSerializer,
|
||||
"مزرعه یافت نشد.",
|
||||
),
|
||||
},
|
||||
examples=[
|
||||
OpenApiExample(
|
||||
"نمونه درخواست NDVI",
|
||||
value={"farm_uuid": "11111111-1111-1111-1111-111111111111"},
|
||||
request_only=True,
|
||||
)
|
||||
],
|
||||
)
|
||||
def post(self, request):
|
||||
serializer = NdviHealthRequestSerializer(data=request.data)
|
||||
if not serializer.is_valid():
|
||||
return Response(
|
||||
{"code": 400, "msg": "داده نامعتبر.", "data": serializer.errors},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
service = apps.get_app_config("location_data").get_ndvi_health_service()
|
||||
try:
|
||||
data = service.get_ndvi_health(
|
||||
farm_uuid=str(serializer.validated_data["farm_uuid"])
|
||||
)
|
||||
except ValueError as exc:
|
||||
return Response(
|
||||
{"code": 404, "msg": str(exc), "data": None},
|
||||
status=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
|
||||
return Response(
|
||||
{"code": 200, "msg": "success", "data": data},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
|
||||
class RemoteSensingAnalysisView(APIView):
|
||||
@extend_schema(
|
||||
tags=["Soil Data"],
|
||||
summary="اجرای async تحلیل سنجشازدور و subdivision دادهمحور",
|
||||
description="برای location موجود، pipeline کامل grid + openEO + observation persistence + KMeans clustering در Celery صف میشود و sync اجرا نمیشود.",
|
||||
request=RemoteSensingTriggerSerializer,
|
||||
responses={
|
||||
202: build_response(
|
||||
RemoteSensingQueuedEnvelopeSerializer,
|
||||
"درخواست تحلیل سنجشازدور در صف قرار گرفت.",
|
||||
),
|
||||
400: build_response(
|
||||
SoilErrorResponseSerializer,
|
||||
"داده ورودی نامعتبر است.",
|
||||
),
|
||||
404: build_response(
|
||||
SoilErrorResponseSerializer,
|
||||
"location موردنظر پیدا نشد.",
|
||||
),
|
||||
},
|
||||
examples=[
|
||||
OpenApiExample(
|
||||
"نمونه درخواست remote sensing",
|
||||
value={
|
||||
"lat": 35.6892,
|
||||
"lon": 51.3890,
|
||||
"block_code": "block-1",
|
||||
"start_date": "2025-01-01",
|
||||
"end_date": "2025-01-31",
|
||||
"force_refresh": False,
|
||||
"cluster_count": 3,
|
||||
"selected_features": ["ndvi", "ndwi", "soil_vv_db"],
|
||||
},
|
||||
request_only=True,
|
||||
),
|
||||
],
|
||||
)
|
||||
def post(self, request):
|
||||
serializer = RemoteSensingTriggerSerializer(data=request.data)
|
||||
if not serializer.is_valid():
|
||||
return Response(
|
||||
{"code": 400, "msg": "داده نامعتبر.", "data": serializer.errors},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
payload = serializer.validated_data
|
||||
location = _get_location_by_lat_lon(payload["lat"], payload["lon"], prefetch=True)
|
||||
if location is None:
|
||||
return Response(
|
||||
{"code": 404, "msg": "location پیدا نشد.", "data": None},
|
||||
status=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
|
||||
block_code = str(payload.get("block_code", "") or "").strip()
|
||||
run = RemoteSensingRun.objects.create(
|
||||
soil_location=location,
|
||||
block_code=block_code,
|
||||
chunk_size_sqm=_resolve_chunk_size_for_location(location, block_code),
|
||||
temporal_start=payload["start_date"],
|
||||
temporal_end=payload["end_date"],
|
||||
status=RemoteSensingRun.STATUS_PENDING,
|
||||
metadata={
|
||||
"requested_via": "api",
|
||||
"status_label": "pending",
|
||||
"cluster_count": payload.get("cluster_count"),
|
||||
"selected_features": payload.get("selected_features") or [],
|
||||
},
|
||||
)
|
||||
task_result = run_remote_sensing_analysis_task.delay(
|
||||
soil_location_id=location.id,
|
||||
block_code=block_code,
|
||||
temporal_start=payload["start_date"].isoformat(),
|
||||
temporal_end=payload["end_date"].isoformat(),
|
||||
force_refresh=payload.get("force_refresh", False),
|
||||
run_id=run.id,
|
||||
cluster_count=payload.get("cluster_count"),
|
||||
selected_features=payload.get("selected_features"),
|
||||
)
|
||||
run.metadata = {**(run.metadata or {}), "task_id": task_result.id}
|
||||
run.save(update_fields=["metadata", "updated_at"])
|
||||
|
||||
location_data = SoilLocationResponseSerializer(location).data
|
||||
response_payload = {
|
||||
"status": "processing",
|
||||
"source": "processing",
|
||||
"location": location_data,
|
||||
"block_code": block_code,
|
||||
"chunk_size_sqm": run.chunk_size_sqm,
|
||||
"temporal_extent": {
|
||||
"start_date": payload["start_date"].isoformat(),
|
||||
"end_date": payload["end_date"].isoformat(),
|
||||
},
|
||||
"summary": _empty_remote_sensing_summary(),
|
||||
"cells": [],
|
||||
"run": RemoteSensingRunSerializer(run).data,
|
||||
"task_id": task_result.id,
|
||||
}
|
||||
return Response(
|
||||
{"code": 202, "msg": "تحلیل سنجشازدور در صف قرار گرفت.", "data": response_payload},
|
||||
status=status.HTTP_202_ACCEPTED,
|
||||
)
|
||||
|
||||
@extend_schema(
|
||||
tags=["Soil Data"],
|
||||
summary="خواندن نتایج cache شده سنجشازدور و subdivision",
|
||||
description="فقط نتایج ذخیرهشده remote sensing و clustering را برمیگرداند و هیچ پردازش sync اجرا نمیکند.",
|
||||
parameters=[
|
||||
{"name": "lat", "in": "query", "required": True, "schema": {"type": "number"}},
|
||||
{"name": "lon", "in": "query", "required": True, "schema": {"type": "number"}},
|
||||
{"name": "block_code", "in": "query", "required": False, "schema": {"type": "string"}},
|
||||
{"name": "start_date", "in": "query", "required": True, "schema": {"type": "string", "format": "date"}},
|
||||
{"name": "end_date", "in": "query", "required": True, "schema": {"type": "string", "format": "date"}},
|
||||
{"name": "page", "in": "query", "required": False, "schema": {"type": "integer", "default": 1}},
|
||||
{"name": "page_size", "in": "query", "required": False, "schema": {"type": "integer", "default": 100}},
|
||||
],
|
||||
responses={
|
||||
200: build_response(
|
||||
RemoteSensingEnvelopeSerializer,
|
||||
"نتایج cache شده remote sensing بازگردانده شد.",
|
||||
),
|
||||
404: build_response(
|
||||
SoilErrorResponseSerializer,
|
||||
"location موردنظر پیدا نشد.",
|
||||
),
|
||||
400: build_response(
|
||||
SoilErrorResponseSerializer,
|
||||
"داده ورودی نامعتبر است.",
|
||||
),
|
||||
},
|
||||
)
|
||||
def get(self, request):
|
||||
serializer = RemoteSensingResultQuerySerializer(data=request.query_params)
|
||||
if not serializer.is_valid():
|
||||
return Response(
|
||||
{"code": 400, "msg": "داده نامعتبر.", "data": serializer.errors},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
payload = serializer.validated_data
|
||||
location = _get_location_by_lat_lon(payload["lat"], payload["lon"], prefetch=True)
|
||||
if location is None:
|
||||
return Response(
|
||||
{"code": 404, "msg": "location پیدا نشد.", "data": None},
|
||||
status=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
|
||||
block_code = str(payload.get("block_code", "") or "").strip()
|
||||
observations = _get_remote_sensing_observations(
|
||||
location=location,
|
||||
block_code=block_code,
|
||||
start_date=payload["start_date"],
|
||||
end_date=payload["end_date"],
|
||||
)
|
||||
run = _get_latest_remote_sensing_run(
|
||||
location=location,
|
||||
block_code=block_code,
|
||||
start_date=payload["start_date"],
|
||||
end_date=payload["end_date"],
|
||||
)
|
||||
subdivision_result = _get_remote_sensing_subdivision_result(
|
||||
location=location,
|
||||
block_code=block_code,
|
||||
start_date=payload["start_date"],
|
||||
end_date=payload["end_date"],
|
||||
)
|
||||
|
||||
if not observations.exists():
|
||||
processing = run is not None and run.status in {
|
||||
RemoteSensingRun.STATUS_PENDING,
|
||||
RemoteSensingRun.STATUS_RUNNING,
|
||||
}
|
||||
response_payload = {
|
||||
"status": "processing" if processing else "not_found",
|
||||
"source": "processing" if processing else "database",
|
||||
"location": SoilLocationResponseSerializer(location).data,
|
||||
"block_code": block_code,
|
||||
"chunk_size_sqm": getattr(run, "chunk_size_sqm", None),
|
||||
"temporal_extent": {
|
||||
"start_date": payload["start_date"].isoformat(),
|
||||
"end_date": payload["end_date"].isoformat(),
|
||||
},
|
||||
"summary": _empty_remote_sensing_summary(),
|
||||
"cells": [],
|
||||
"run": RemoteSensingRunSerializer(run).data if run else None,
|
||||
"subdivision_result": None,
|
||||
}
|
||||
return Response(
|
||||
{"code": 200, "msg": "success", "data": response_payload},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
paginated_observations = _paginate_observations(
|
||||
observations,
|
||||
page=payload["page"],
|
||||
page_size=payload["page_size"],
|
||||
)
|
||||
paginated_assignments = []
|
||||
pagination = {"cells": paginated_observations["pagination"]}
|
||||
if subdivision_result is not None:
|
||||
paginated = _paginate_assignments(
|
||||
subdivision_result,
|
||||
page=payload["page"],
|
||||
page_size=payload["page_size"],
|
||||
)
|
||||
paginated_assignments = paginated["items"]
|
||||
pagination["assignments"] = paginated["pagination"]
|
||||
|
||||
cells_data = RemoteSensingCellObservationSerializer(paginated_observations["items"], many=True).data
|
||||
subdivision_data = None
|
||||
if subdivision_result is not None:
|
||||
subdivision_data = RemoteSensingSubdivisionResultSerializer(
|
||||
subdivision_result,
|
||||
context={"paginated_assignments": paginated_assignments},
|
||||
).data
|
||||
|
||||
response_payload = {
|
||||
"status": "success",
|
||||
"source": "database",
|
||||
"location": SoilLocationResponseSerializer(location).data,
|
||||
"block_code": block_code,
|
||||
"chunk_size_sqm": observations.first().cell.chunk_size_sqm,
|
||||
"temporal_extent": {
|
||||
"start_date": payload["start_date"].isoformat(),
|
||||
"end_date": payload["end_date"].isoformat(),
|
||||
},
|
||||
"summary": _build_remote_sensing_summary(observations),
|
||||
"cells": cells_data,
|
||||
"run": RemoteSensingRunSerializer(run).data if run else None,
|
||||
"subdivision_result": subdivision_data,
|
||||
}
|
||||
if pagination is not None:
|
||||
response_payload["pagination"] = pagination
|
||||
return Response(
|
||||
{"code": 200, "msg": "success", "data": response_payload},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
|
||||
class RemoteSensingRunStatusView(APIView):
|
||||
@extend_schema(
|
||||
tags=["Soil Data"],
|
||||
summary="وضعیت run تحلیل سنجشازدور",
|
||||
description="وضعیت async pipeline را با شناسه run برمیگرداند.",
|
||||
responses={
|
||||
200: build_response(
|
||||
RemoteSensingRunStatusEnvelopeSerializer,
|
||||
"وضعیت run بازگردانده شد.",
|
||||
),
|
||||
404: build_response(
|
||||
SoilErrorResponseSerializer,
|
||||
"run موردنظر پیدا نشد.",
|
||||
),
|
||||
},
|
||||
)
|
||||
def get(self, request, run_id):
|
||||
run = RemoteSensingRun.objects.filter(pk=run_id).select_related("soil_location").first()
|
||||
if run is None:
|
||||
return Response(
|
||||
{"code": 404, "msg": "run پیدا نشد.", "data": None},
|
||||
status=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
|
||||
task_id = (run.metadata or {}).get("task_id")
|
||||
response_payload = {
|
||||
"status": RemoteSensingRunSerializer(run).data["status_label"],
|
||||
"source": "database",
|
||||
"run": RemoteSensingRunSerializer(run).data,
|
||||
"task_id": task_id,
|
||||
}
|
||||
return Response(
|
||||
{"code": 200, "msg": "success", "data": response_payload},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
|
||||
class RemoteSensingRunResultView(APIView):
|
||||
@extend_schema(
|
||||
tags=["Soil Data"],
|
||||
summary="نتیجه نهایی run تحلیل سنجشازدور",
|
||||
description="نتایج observation و subdivision دادهمحور را با شناسه run برمیگرداند.",
|
||||
parameters=[
|
||||
{"name": "page", "in": "query", "required": False, "schema": {"type": "integer", "default": 1}},
|
||||
{"name": "page_size", "in": "query", "required": False, "schema": {"type": "integer", "default": 100}},
|
||||
],
|
||||
responses={
|
||||
200: build_response(
|
||||
RemoteSensingRunResultEnvelopeSerializer,
|
||||
"نتیجه run بازگردانده شد.",
|
||||
),
|
||||
404: build_response(
|
||||
SoilErrorResponseSerializer,
|
||||
"run موردنظر پیدا نشد.",
|
||||
),
|
||||
},
|
||||
)
|
||||
def get(self, request, run_id):
|
||||
page = _safe_positive_int(request.query_params.get("page"), default=1)
|
||||
page_size = min(_safe_positive_int(request.query_params.get("page_size"), default=100), MAX_REMOTE_SENSING_PAGE_SIZE)
|
||||
run = (
|
||||
RemoteSensingRun.objects.filter(pk=run_id)
|
||||
.select_related("soil_location")
|
||||
.first()
|
||||
)
|
||||
if run is None:
|
||||
return Response(
|
||||
{"code": 404, "msg": "run پیدا نشد.", "data": None},
|
||||
status=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
|
||||
location = _get_location_by_lat_lon(run.soil_location.latitude, run.soil_location.longitude, prefetch=True)
|
||||
observations = _get_remote_sensing_observations(
|
||||
location=run.soil_location,
|
||||
block_code=run.block_code,
|
||||
start_date=run.temporal_start,
|
||||
end_date=run.temporal_end,
|
||||
)
|
||||
subdivision_result = getattr(run, "subdivision_result", None)
|
||||
|
||||
if not observations.exists():
|
||||
response_payload = {
|
||||
"status": RemoteSensingRunSerializer(run).data["status_label"],
|
||||
"source": "processing" if run.status in {RemoteSensingRun.STATUS_PENDING, RemoteSensingRun.STATUS_RUNNING} else "database",
|
||||
"location": SoilLocationResponseSerializer(location).data,
|
||||
"block_code": run.block_code,
|
||||
"chunk_size_sqm": run.chunk_size_sqm,
|
||||
"temporal_extent": {
|
||||
"start_date": run.temporal_start.isoformat() if run.temporal_start else None,
|
||||
"end_date": run.temporal_end.isoformat() if run.temporal_end else None,
|
||||
},
|
||||
"summary": _empty_remote_sensing_summary(),
|
||||
"cells": [],
|
||||
"run": RemoteSensingRunSerializer(run).data,
|
||||
"subdivision_result": None,
|
||||
}
|
||||
return Response(
|
||||
{"code": 200, "msg": "success", "data": response_payload},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
paginated_observations = _paginate_observations(
|
||||
observations,
|
||||
page=page,
|
||||
page_size=page_size,
|
||||
)
|
||||
paginated_assignments = []
|
||||
pagination = {"cells": paginated_observations["pagination"]}
|
||||
if subdivision_result is not None:
|
||||
paginated = _paginate_assignments(
|
||||
subdivision_result,
|
||||
page=page,
|
||||
page_size=page_size,
|
||||
)
|
||||
paginated_assignments = paginated["items"]
|
||||
pagination["assignments"] = paginated["pagination"]
|
||||
|
||||
subdivision_data = None
|
||||
if subdivision_result is not None:
|
||||
subdivision_data = RemoteSensingSubdivisionResultSerializer(
|
||||
subdivision_result,
|
||||
context={"paginated_assignments": paginated_assignments},
|
||||
).data
|
||||
|
||||
response_payload = {
|
||||
"status": RemoteSensingRunSerializer(run).data["status_label"],
|
||||
"source": "database",
|
||||
"location": SoilLocationResponseSerializer(location).data,
|
||||
"block_code": run.block_code,
|
||||
"chunk_size_sqm": run.chunk_size_sqm,
|
||||
"temporal_extent": {
|
||||
"start_date": run.temporal_start.isoformat() if run.temporal_start else None,
|
||||
"end_date": run.temporal_end.isoformat() if run.temporal_end else None,
|
||||
},
|
||||
"summary": _build_remote_sensing_summary(observations),
|
||||
"cells": RemoteSensingCellObservationSerializer(paginated_observations["items"], many=True).data,
|
||||
"run": RemoteSensingRunSerializer(run).data,
|
||||
"subdivision_result": subdivision_data,
|
||||
}
|
||||
if pagination is not None:
|
||||
response_payload["pagination"] = pagination
|
||||
return Response(
|
||||
{"code": 200, "msg": "success", "data": response_payload},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
|
||||
def _get_location_by_lat_lon(lat, lon, *, prefetch: bool = False):
|
||||
lat_rounded = round(lat, 6)
|
||||
lon_rounded = round(lon, 6)
|
||||
queryset = SoilLocation.objects.filter(latitude=lat_rounded, longitude=lon_rounded)
|
||||
if prefetch:
|
||||
queryset = queryset.prefetch_related("block_subdivisions")
|
||||
return queryset.first()
|
||||
|
||||
|
||||
def _sync_defined_blocks(location: SoilLocation, blocks: list[dict]) -> None:
|
||||
if not blocks:
|
||||
return
|
||||
|
||||
with transaction.atomic():
|
||||
for index, block in enumerate(blocks):
|
||||
block_code = str(block.get("block_code") or f"block-{index + 1}").strip()
|
||||
boundary = block.get("boundary") or {}
|
||||
BlockSubdivision.objects.update_or_create(
|
||||
soil_location=location,
|
||||
block_code=block_code,
|
||||
defaults={
|
||||
"source_boundary": boundary,
|
||||
"chunk_size_sqm": 900,
|
||||
"status": "defined",
|
||||
"metadata": {
|
||||
"definition_source": "farmer_input",
|
||||
"order": int(block.get("order") or index + 1),
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
def _resolve_chunk_size_for_location(location: SoilLocation, block_code: str) -> int | None:
|
||||
if block_code:
|
||||
subdivision = location.block_subdivisions.filter(block_code=block_code).first()
|
||||
if subdivision is not None:
|
||||
return subdivision.chunk_size_sqm
|
||||
block_layout = location.block_layout or {}
|
||||
if not block_code:
|
||||
return block_layout.get("analysis_grid_summary", {}).get("chunk_size_sqm")
|
||||
for block in block_layout.get("blocks", []):
|
||||
if block.get("block_code") == block_code:
|
||||
return block.get("analysis_grid_summary", {}).get("chunk_size_sqm")
|
||||
return None
|
||||
|
||||
|
||||
def _get_remote_sensing_observations(*, location, block_code: str, start_date, end_date):
|
||||
queryset = (
|
||||
AnalysisGridObservation.objects.select_related("cell", "run")
|
||||
.filter(
|
||||
cell__soil_location=location,
|
||||
temporal_start=start_date,
|
||||
temporal_end=end_date,
|
||||
)
|
||||
.order_by("cell__cell_code")
|
||||
)
|
||||
return queryset.filter(cell__block_code=block_code or "")
|
||||
|
||||
|
||||
def _get_latest_remote_sensing_run(*, location, block_code: str, start_date, end_date):
|
||||
return (
|
||||
RemoteSensingRun.objects.filter(
|
||||
soil_location=location,
|
||||
block_code=block_code or "",
|
||||
temporal_start=start_date,
|
||||
temporal_end=end_date,
|
||||
)
|
||||
.order_by("-created_at", "-id")
|
||||
.first()
|
||||
)
|
||||
|
||||
|
||||
def _get_remote_sensing_subdivision_result(*, location, block_code: str, start_date, end_date):
|
||||
return (
|
||||
RemoteSensingSubdivisionResult.objects.filter(
|
||||
soil_location=location,
|
||||
block_code=block_code or "",
|
||||
temporal_start=start_date,
|
||||
temporal_end=end_date,
|
||||
)
|
||||
.select_related("run")
|
||||
.prefetch_related("assignments__cell")
|
||||
.order_by("-created_at", "-id")
|
||||
.first()
|
||||
)
|
||||
|
||||
|
||||
def _build_remote_sensing_summary(observations):
|
||||
aggregates = observations.aggregate(
|
||||
cell_count=Avg("cell_id"),
|
||||
ndvi_mean=Avg("ndvi"),
|
||||
ndwi_mean=Avg("ndwi"),
|
||||
lst_c_mean=Avg("lst_c"),
|
||||
soil_vv_db_mean=Avg("soil_vv_db"),
|
||||
dem_m_mean=Avg("dem_m"),
|
||||
slope_deg_mean=Avg("slope_deg"),
|
||||
)
|
||||
summary = {
|
||||
"cell_count": observations.count(),
|
||||
"ndvi_mean": _round_or_none(aggregates.get("ndvi_mean")),
|
||||
"ndwi_mean": _round_or_none(aggregates.get("ndwi_mean")),
|
||||
"lst_c_mean": _round_or_none(aggregates.get("lst_c_mean")),
|
||||
"soil_vv_db_mean": _round_or_none(aggregates.get("soil_vv_db_mean")),
|
||||
"dem_m_mean": _round_or_none(aggregates.get("dem_m_mean")),
|
||||
"slope_deg_mean": _round_or_none(aggregates.get("slope_deg_mean")),
|
||||
}
|
||||
return summary
|
||||
|
||||
|
||||
def _empty_remote_sensing_summary():
|
||||
return {
|
||||
"cell_count": 0,
|
||||
"ndvi_mean": None,
|
||||
"ndwi_mean": None,
|
||||
"lst_c_mean": None,
|
||||
"soil_vv_db_mean": None,
|
||||
"dem_m_mean": None,
|
||||
"slope_deg_mean": None,
|
||||
}
|
||||
|
||||
|
||||
def _round_or_none(value):
|
||||
if value is None:
|
||||
return None
|
||||
return round(float(value), 6)
|
||||
|
||||
|
||||
def _paginate_assignments(result: RemoteSensingSubdivisionResult, *, page: int, page_size: int) -> dict:
|
||||
page_size = min(max(page_size, 1), MAX_REMOTE_SENSING_PAGE_SIZE)
|
||||
assignments = result.assignments.select_related("cell").order_by("cluster_label", "cell__cell_code")
|
||||
paginator = Paginator(assignments, page_size)
|
||||
if paginator.count == 0:
|
||||
return {
|
||||
"items": [],
|
||||
"pagination": {
|
||||
"page": 1,
|
||||
"page_size": page_size,
|
||||
"total_items": 0,
|
||||
"total_pages": 0,
|
||||
"has_next": False,
|
||||
"has_previous": False,
|
||||
},
|
||||
}
|
||||
try:
|
||||
page_obj = paginator.page(page)
|
||||
except EmptyPage:
|
||||
page_obj = paginator.page(paginator.num_pages)
|
||||
return {
|
||||
"items": list(page_obj.object_list),
|
||||
"pagination": {
|
||||
"page": page_obj.number,
|
||||
"page_size": page_size,
|
||||
"total_items": paginator.count,
|
||||
"total_pages": paginator.num_pages,
|
||||
"has_next": page_obj.has_next(),
|
||||
"has_previous": page_obj.has_previous(),
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def _safe_positive_int(value, *, default: int) -> int:
|
||||
try:
|
||||
parsed = int(value)
|
||||
except (TypeError, ValueError):
|
||||
return default
|
||||
return parsed if parsed > 0 else default
|
||||
|
||||
|
||||
|
||||
def _paginate_observations(observations, *, page: int, page_size: int) -> dict:
|
||||
page_size = min(max(page_size, 1), MAX_REMOTE_SENSING_PAGE_SIZE)
|
||||
paginator = Paginator(observations, page_size)
|
||||
if paginator.count == 0:
|
||||
return {
|
||||
"items": [],
|
||||
"pagination": {
|
||||
"page": 1,
|
||||
"page_size": page_size,
|
||||
"total_items": 0,
|
||||
"total_pages": 0,
|
||||
"has_next": False,
|
||||
"has_previous": False,
|
||||
},
|
||||
}
|
||||
try:
|
||||
page_obj = paginator.page(page)
|
||||
except EmptyPage:
|
||||
page_obj = paginator.page(paginator.num_pages)
|
||||
return {
|
||||
"items": list(page_obj.object_list),
|
||||
"pagination": {
|
||||
"page": page_obj.number,
|
||||
"page_size": page_size,
|
||||
"total_items": paginator.count,
|
||||
"total_pages": paginator.num_pages,
|
||||
"has_next": page_obj.has_next(),
|
||||
"has_previous": page_obj.has_previous(),
|
||||
},
|
||||
}
|
||||
Reference in New Issue
Block a user