This commit is contained in:
2026-05-09 16:55:06 +03:30
parent 1679825ae2
commit cead7dafe2
51 changed files with 7514 additions and 1221 deletions
+1
View File
@@ -193,6 +193,7 @@ WEATHER_TIMEOUT_SECONDS = float(os.environ.get("WEATHER_TIMEOUT_SECONDS", "60"))
SOIL_DATA_PROVIDER = os.environ.get("SOIL_DATA_PROVIDER", "soilgrids").strip().lower() SOIL_DATA_PROVIDER = os.environ.get("SOIL_DATA_PROVIDER", "soilgrids").strip().lower()
SOIL_MOCK_DELAY_SECONDS = float(os.environ.get("SOIL_MOCK_DELAY_SECONDS", "0.8")) SOIL_MOCK_DELAY_SECONDS = float(os.environ.get("SOIL_MOCK_DELAY_SECONDS", "0.8"))
SOILGRIDS_TIMEOUT_SECONDS = float(os.environ.get("SOILGRIDS_TIMEOUT_SECONDS", "60")) SOILGRIDS_TIMEOUT_SECONDS = float(os.environ.get("SOILGRIDS_TIMEOUT_SECONDS", "60"))
SUBDIVISION_CHUNK_SQM = int(os.environ.get("SUBDIVISION_CHUNK_SQM", "900"))
BACKEND_PLANT_SYNC_BASE_URL = os.environ.get("BACKEND_PLANT_SYNC_BASE_URL", "") BACKEND_PLANT_SYNC_BASE_URL = os.environ.get("BACKEND_PLANT_SYNC_BASE_URL", "")
BACKEND_PLANT_SYNC_API_KEY = os.environ.get("BACKEND_PLANT_SYNC_API_KEY", "") BACKEND_PLANT_SYNC_API_KEY = os.environ.get("BACKEND_PLANT_SYNC_API_KEY", "")
BACKEND_PLANT_SYNC_TIMEOUT = int(os.environ.get("BACKEND_PLANT_SYNC_TIMEOUT", "20")) BACKEND_PLANT_SYNC_TIMEOUT = int(os.environ.get("BACKEND_PLANT_SYNC_TIMEOUT", "20"))
+6 -9
View File
@@ -11,6 +11,7 @@ from django.core.paginator import EmptyPage, Paginator
from farm_data.models import SensorData from farm_data.models import SensorData
from farm_data.services import get_canonical_farm_record, get_runtime_plant_for_farm from farm_data.services import get_canonical_farm_record, get_runtime_plant_for_farm
from location_data.satellite_snapshot import build_location_satellite_snapshot
from plant.gdd import calculate_daily_gdd, resolve_growth_profile from plant.gdd import calculate_daily_gdd, resolve_growth_profile
from weather.models import WeatherForecast from weather.models import WeatherForecast
@@ -188,14 +189,11 @@ def _build_weather_from_farm(sensor: SensorData) -> list[dict[str, Any]]:
def _build_soil_and_site_from_farm(sensor: SensorData) -> tuple[dict[str, Any], dict[str, Any]]: def _build_soil_and_site_from_farm(sensor: SensorData) -> tuple[dict[str, Any], dict[str, Any]]:
depths = list(sensor.center_location.depths.all()) satellite_metrics = build_location_satellite_snapshot(sensor.center_location).get("resolved_metrics") or {}
top_depth = depths[0] if depths else None ndwi = _safe_float(satellite_metrics.get("ndwi"), 0.28)
smfcf = _safe_float(getattr(top_depth, "wv0033", None), 0.34) smfcf = _safe_float(ndwi, 0.34)
smw = _safe_float(getattr(top_depth, "wv1500", None), 0.14) smw = max(round(smfcf * 0.45, 3), 0.12)
sm0 = _safe_float( sm0 = min(max(smfcf + 0.08, smw + 0.12), 0.6)
_pick_first_not_none(getattr(top_depth, "porosity", None), getattr(top_depth, "wv0000", None)),
min(max(smfcf + 0.08, smw + 0.12), 0.6),
)
soil_moisture = None soil_moisture = None
payload = sensor.sensor_payload or {} payload = sensor.sensor_payload or {}
if isinstance(payload, dict): if isinstance(payload, dict):
@@ -292,7 +290,6 @@ def build_growth_context(payload: dict[str, Any]) -> GrowthSimulationContext:
if payload.get("farm_uuid"): if payload.get("farm_uuid"):
sensor = ( sensor = (
SensorData.objects.select_related("center_location") SensorData.objects.select_related("center_location")
.prefetch_related("center_location__depths")
.filter(farm_uuid=payload["farm_uuid"]) .filter(farm_uuid=payload["farm_uuid"])
.first() .first()
) )
+9 -7
View File
@@ -6,6 +6,7 @@ from statistics import mean
from typing import Any from typing import Any
from django.apps import apps from django.apps import apps
from location_data.satellite_snapshot import build_location_satellite_snapshot
from crop_simulation.services import CropSimulationService from crop_simulation.services import CropSimulationService
@@ -141,14 +142,15 @@ def _build_weather_records(forecasts: list[Any], *, latitude: float, longitude:
def _build_soil_parameters(sensor: Any) -> tuple[dict[str, Any], dict[str, Any]]: def _build_soil_parameters(sensor: Any) -> tuple[dict[str, Any], dict[str, Any]]:
moisture_pct = _sensor_metric(sensor, "soil_moisture") moisture_pct = _sensor_metric(sensor, "soil_moisture")
depths = []
center_location = getattr(sensor, "center_location", None) center_location = getattr(sensor, "center_location", None)
if center_location is not None: satellite_metrics = (
depths = list(center_location.depths.all()) build_location_satellite_snapshot(center_location).get("resolved_metrics") or {}
if center_location is not None
top_depth = depths[0] if depths else None else {}
wv0033 = _safe_float(getattr(top_depth, "wv0033", None), 0.34) )
wv1500 = _safe_float(getattr(top_depth, "wv1500", None), 0.14) ndwi = _safe_float(satellite_metrics.get("ndwi"), 0.34)
wv0033 = ndwi if ndwi > 0 else 0.34
wv1500 = max(round(wv0033 * 0.45, 3), 0.14)
smfcf = _clamp(wv0033 if wv0033 > 0 else 0.34, 0.2, 0.55) smfcf = _clamp(wv0033 if wv0033 > 0 else 0.34, 0.2, 0.55)
smw = _clamp(wv1500 if wv1500 > 0 else 0.12, 0.05, smfcf - 0.02) smw = _clamp(wv1500 if wv1500 > 0 else 0.12, 0.05, smfcf - 0.02)
+12 -14
View File
@@ -7,6 +7,7 @@ from datetime import date, datetime, timedelta
from typing import Any from typing import Any
from django.db import transaction from django.db import transaction
from location_data.satellite_snapshot import build_location_satellite_snapshot
from .models import SimulationRun, SimulationScenario from .models import SimulationRun, SimulationScenario
@@ -475,15 +476,12 @@ def build_simulation_payload_from_farm(
longitude=float(farm.center_location.longitude), longitude=float(farm.center_location.longitude),
) )
depths = list(farm.center_location.depths.all()) satellite_metrics = build_location_satellite_snapshot(farm.center_location).get("resolved_metrics") or {}
top_depth = depths[0] if depths else None ndwi = _safe_float(satellite_metrics.get("ndwi"), 0.28)
smfcf = _clamp(_safe_float(getattr(top_depth, "wv0033", None), 0.34), 0.2, 0.55) smfcf = _clamp(ndwi if ndwi is not None else 0.34, 0.2, 0.55)
smw = _clamp(_safe_float(getattr(top_depth, "wv1500", None), 0.14), 0.05, max(smfcf - 0.02, 0.06)) smw = _clamp(smfcf * 0.45, 0.05, max(smfcf - 0.02, 0.06))
sm0 = _clamp( sm0 = _clamp(
_safe_float(
_pick_first_not_none(getattr(top_depth, "porosity", None), getattr(top_depth, "wv0000", None)),
min(max(smfcf + 0.08, smw + 0.12), 0.6), min(max(smfcf + 0.08, smw + 0.12), 0.6),
),
max(smfcf + 0.02, smw + 0.05), max(smfcf + 0.02, smw + 0.05),
0.8, 0.8,
) )
@@ -493,10 +491,10 @@ def build_simulation_payload_from_farm(
if soil_moisture is not None if soil_moisture is not None
else DEFAULT_WAV else DEFAULT_WAV
) )
nitrogen = _pick_first_not_none(_sensor_metric(farm, "nitrogen"), getattr(top_depth, "nitrogen", None)) nitrogen = _pick_first_not_none(_sensor_metric(farm, "nitrogen"), satellite_metrics.get("soil_vv_db"))
phosphorus = _sensor_metric(farm, "phosphorus") phosphorus = _sensor_metric(farm, "phosphorus")
potassium = _sensor_metric(farm, "potassium") potassium = _sensor_metric(farm, "potassium")
soil_ph = _pick_first_not_none(_sensor_metric(farm, "soil_ph"), getattr(top_depth, "phh2o", None)) soil_ph = _pick_first_not_none(_sensor_metric(farm, "soil_ph"), None)
ec = _sensor_metric(farm, "electrical_conductivity") ec = _sensor_metric(farm, "electrical_conductivity")
resolved_soil = { resolved_soil = {
@@ -513,11 +511,11 @@ def build_simulation_payload_from_farm(
"potassium": _safe_float(potassium, 0.0), "potassium": _safe_float(potassium, 0.0),
"soil_ph": _safe_float(soil_ph, 7.0), "soil_ph": _safe_float(soil_ph, 7.0),
"electrical_conductivity": _safe_float(ec, 0.0), "electrical_conductivity": _safe_float(ec, 0.0),
"clay": _safe_float(getattr(top_depth, "clay", None), 0.0), "clay": 0.0,
"sand": _safe_float(getattr(top_depth, "sand", None), 0.0), "sand": 0.0,
"silt": _safe_float(getattr(top_depth, "silt", None), 0.0), "silt": 0.0,
"cec": _safe_float(getattr(top_depth, "cec", None), 0.0), "cec": 0.0,
"soc": _safe_float(getattr(top_depth, "soc", None), 0.0), "soc": 0.0,
} }
if soil: if soil:
resolved_soil.update(soil) resolved_soil.update(soil)
+1 -1
View File
@@ -708,7 +708,7 @@ class YieldHarvestSummaryService:
) -> dict[str, Any]: ) -> dict[str, Any]:
farm = ( farm = (
SensorData.objects.select_related("center_location", "weather_forecast") SensorData.objects.select_related("center_location", "weather_forecast")
.prefetch_related("center_location__depths", "plant_assignments__plant") .prefetch_related("plant_assignments__plant")
.filter(farm_uuid=farm_uuid) .filter(farm_uuid=farm_uuid)
.first() .first()
) )
+371
View File
@@ -0,0 +1,371 @@
# ساختار فعلی `location_data`
این فایل وضعیت فعلی اپ `location_data` را توضیح می‌دهد؛ هم از نظر ساختار فایل‌ها و هم از نظر مدل‌ها، APIها و جریان داده.
## هدف فعلی اپ
اپ `location_data` فعلاً این مسئولیت‌ها را دارد:
- نگه‌داری موقعیت زمین با `lat` و `lon`
- نگه‌داری مرز مزرعه در `farm_boundary`
- نگه‌داری ساختار بلوک‌های زمین در `block_layout`
- نگه‌داری داده‌های خاک برای عمق‌های مختلف در `SoilDepthData`
- نگه‌داری مشاهدات NDVI در `NdviObservation`
- برگرداندن ساختار بلوک‌های زمین از API محلی بدون نیاز به API خارجی در فاز فعلی
نکته مهم:
- در فاز فعلی، endpoint اصلی `location_data` برای ساختار زمین، فقط داده را در دیتابیس می‌خواند/ذخیره می‌کند.
- فعلاً برای بلوک‌ها، زیر‌بلوک‌ها و داده‌های ماهواره‌ای هیچ درخواست خارجی زده نمی‌شود.
## ساختار فایل‌ها
```text
location_data/
├── admin.py
├── apps.py
├── models.py
├── serializers.py
├── views.py
├── urls.py
├── tasks.py
├── soil_adapters.py
├── remote_sensing.py
├── ndvi.py
├── test_soil_api.py
├── test_soil_adapters.py
├── test_ndvi_health_api.py
├── postman/
│ └── soil_data.json
├── management/
│ └── commands/
└── migrations/
├── 0001_initial.py
├── 0002_soildepthdata_refactor.py
├── 0002_soillocation_ideal_sensor_profile.py
├── 0003_rename_app_label.py
├── 0004_soillocation_farm_boundary.py
├── 0005_merge_20260327_0840.py
├── 0006_remove_soillocation_ideal_sensor_profile.py
├── 0007_ndviobservation.py
└── 0008_soillocation_block_layout.py
```
## مدل‌ها و جدول‌ها
### 1) `SoilLocation`
مدل اصلی اپ است و نماینده یک موقعیت یکتا برای زمین یا مرکز زمین محسوب می‌شود.
فیلدهای اصلی:
| فیلد | نوع | توضیح |
|---|---|---|
| `id` | `BigAutoField` | شناسه داخلی رکورد |
| `latitude` | `DecimalField(9,6)` | عرض جغرافیایی |
| `longitude` | `DecimalField(9,6)` | طول جغرافیایی |
| `task_id` | `CharField` | شناسه تسک Celery برای جریان قدیمی واکشی خاک |
| `farm_boundary` | `JSONField` | مرز مزرعه به شکل Polygon یا corners |
| `input_block_count` | `PositiveIntegerField` | تعداد بلوک اولیه‌ای که از ورودی کشاورز می‌آید |
| `block_layout` | `JSONField` | ساختار بلوک‌ها و زیر‌بلوک‌های زمین |
| `created_at` | `DateTimeField` | زمان ایجاد |
| `updated_at` | `DateTimeField` | زمان آخرین تغییر |
قیدها:
- روی ترکیب `latitude` و `longitude` یکتا است.
رفتار مهم:
- اگر `input_block_count` ارسال نشود، مقدار پیش‌فرض `1` است.
- اگر `block_layout` خالی باشد، به صورت خودکار با یک بلوک کامل ساخته می‌شود.
- متد `set_input_block_count()` ساختار اولیه بلوک‌ها را می‌سازد.
### 2) `SoilDepthData`
این مدل داده‌های خاک را برای هر عمق نگه می‌دارد و به `SoilLocation` وصل است.
عمق‌های فعلی:
- `0-5cm`
- `5-15cm`
- `15-30cm`
فیلدهای مهم:
| فیلد | نوع | توضیح |
|---|---|---|
| `soil_location` | `ForeignKey` | ارتباط با `SoilLocation` |
| `depth_label` | `CharField` | برچسب عمق |
| `bdod` تا `wv1500` | `FloatField` | پارامترهای مختلف خاک |
| `created_at` | `DateTimeField` | زمان ثبت رکورد |
قیدها:
- برای هر `soil_location` و هر `depth_label` فقط یک رکورد وجود دارد.
### 3) `NdviObservation`
این مدل برای ذخیره مشاهده‌های NDVI استفاده می‌شود.
فیلدهای مهم:
| فیلد | نوع | توضیح |
|---|---|---|
| `location` | `ForeignKey` | ارتباط با `SoilLocation` |
| `observation_date` | `DateField` | تاریخ مشاهده |
| `mean_ndvi` | `FloatField` | میانگین NDVI |
| `ndvi_map` | `JSONField` | داده مکانی NDVI |
| `vegetation_health_class` | `CharField` | کلاس سلامت پوشش گیاهی |
| `satellite_source` | `CharField` | منبع تصویر ماهواره‌ای |
| `cloud_cover` | `FloatField` | درصد ابر |
| `metadata` | `JSONField` | داده تکمیلی |
## ساختار `block_layout`
فیلد `block_layout` فعلاً ساختار پایه تقسیم زمین را نگه می‌دارد.
نمونه پیش‌فرض وقتی کل زمین یک بلوک باشد:
```json
{
"input_block_count": 1,
"default_full_farm": true,
"algorithm_status": "pending",
"blocks": [
{
"block_code": "block-1",
"order": 1,
"source": "default",
"needs_subdivision": null,
"sub_blocks": []
}
]
}
```
نمونه وقتی ورودی مثلاً `block_count = 3` باشد:
```json
{
"input_block_count": 3,
"default_full_farm": false,
"algorithm_status": "pending",
"blocks": [
{
"block_code": "block-1",
"order": 1,
"source": "input",
"needs_subdivision": null,
"sub_blocks": []
},
{
"block_code": "block-2",
"order": 2,
"source": "input",
"needs_subdivision": null,
"sub_blocks": []
},
{
"block_code": "block-3",
"order": 3,
"source": "input",
"needs_subdivision": null,
"sub_blocks": []
}
]
}
```
معنای فیلدها:
| فیلد | توضیح |
|---|---|
| `input_block_count` | تعداد بلوک اولیه |
| `default_full_farm` | آیا کل زمین هنوز یک بلوک کامل است یا نه |
| `algorithm_status` | وضعیت اجرای الگوریتم تقسیم‌بندی |
| `blocks` | لیست بلوک‌های فعلی |
| `block_code` | کد بلوک |
| `order` | ترتیب بلوک |
| `source` | منشأ بلوک: `default` یا `input` |
| `needs_subdivision` | آیا الگوریتم تشخیص داده که این بلوک باید خردتر شود یا نه |
| `sub_blocks` | لیست زیر‌بلوک‌ها |
## Serializerها
### `SoilDataRequestSerializer`
ورودی endpoint اصلی `location_data`:
| فیلد | اجباری | توضیح |
|---|---|---|
| `lat` | بله | عرض جغرافیایی |
| `lon` | بله | طول جغرافیایی |
| `block_count` | خیر | تعداد بلوک اولیه، پیش‌فرض `1` |
### `SoilLocationResponseSerializer`
خروجی اصلی برای یک location:
- `id`
- `lat`
- `lon`
- `input_block_count`
- `block_layout`
- `depths`
### `SoilDepthDataSerializer`
لیست پارامترهای خاک برای هر عمق را برمی‌گرداند.
### `NdviHealthRequestSerializer` و `NdviHealthResponseSerializer`
برای endpoint مربوط به NDVI استفاده می‌شوند.
## Viewها و APIها
### 1) `SoilDataView`
مسیر:
- `GET /api/soil-data/`
- `POST /api/soil-data/`
وظیفه فعلی:
- گرفتن `lat` و `lon`
- گرفتن `block_count` در صورت وجود
- ساخت یا پیدا کردن `SoilLocation`
- ذخیره `input_block_count`
- ساخت `block_layout`
- برگرداندن پاسخ با `source = local`
رفتار فعلی:
- اگر location وجود نداشته باشد، ساخته می‌شود.
- اگر `block_count` تغییر کند، ساختار `block_layout` دوباره ساخته می‌شود.
- فعلاً هیچ fetch خارجی برای اطلاعات خاک یا ماهواره‌ای انجام نمی‌شود.
### 2) `SoilDataTaskStatusView`
مسیر:
- `GET /api/soil-data/tasks/<task_id>/status/`
وضعیت فعلی:
- هنوز در کد وجود دارد.
- برای جریان قدیمی مبتنی بر Celery طراحی شده است.
- با تغییر اخیر، endpoint اصلی `location_data` دیگر به‌طور پیش‌فرض task جدیدی صف نمی‌کند.
### 3) `NdviHealthView`
مسیر:
- `POST /api/soil-data/ndvi-health/`
وظیفه:
- دریافت `farm_uuid`
- خواندن داده NDVI از سرویس داخلی NDVI
- برگرداندن اطلاعات سلامت پوشش گیاهی
## فایل `tasks.py`
این فایل هنوز منطق قدیمی واکشی داده خاک را نگه می‌دارد.
اجزای اصلی:
- `fetch_soil_data_for_coordinates()`
- `fetch_soil_data_task()`
نکته:
- این بخش هنوز برای سازگاری و جریان‌های قدیمی در پروژه باقی مانده است.
- ولی در فاز فعلی تقسیم بلوک‌ها، از این task برای endpoint اصلی `location_data` استفاده نمی‌شود.
## فایل `soil_adapters.py`
این فایل abstraction مربوط به تامین داده خاک را نگه می‌دارد.
کاربرد آن:
- mock provider
- live soil provider
- ساختار depth-based data fetch
در وضعیت فعلی:
- برای منطق بلوک‌بندی فعلی لازم نیست.
- اما برای جریان قدیمی یا مراحل بعدی می‌تواند دوباره استفاده شود.
## فایل `remote_sensing.py`
این فایل مربوط به منطق سنجش‌ازدور و داده‌های ماهواره‌ای است.
در وضعیت فعلی:
- برای block layout فعلاً استفاده فعال ندارد.
- بعداً می‌تواند برای تحلیل هر بلوک یا زیر‌بلوک استفاده شود.
## فایل `ndvi.py`
این فایل سرویس/منطق NDVI را نگه می‌دارد و برای endpoint NDVI استفاده می‌شود.
## migrationها
مهم‌ترین migrationهای فعلی:
| migration | توضیح |
|---|---|
| `0001_initial.py` | ساختار اولیه `SoilLocation` |
| `0002_soildepthdata_refactor.py` | جداسازی داده‌های عمقی در `SoilDepthData` |
| `0004_soillocation_farm_boundary.py` | اضافه شدن `farm_boundary` |
| `0007_ndviobservation.py` | اضافه شدن `NdviObservation` |
| `0008_soillocation_block_layout.py` | اضافه شدن `input_block_count` و `block_layout` |
## تست‌ها
فایل‌های تست اصلی:
- `location_data/test_soil_api.py`
- تست ساختار محلی بلوک‌ها
- تست پیش‌فرض یک بلوک
- تست تغییر `block_count`
- `location_data/test_soil_adapters.py`
- تست adapterهای خاک
- تست ذخیره depth data
- `location_data/test_ndvi_health_api.py`
- تست endpoint NDVI
## ارتباط با `farm_data`
`location_data` مستقیماً توسط `farm_data` استفاده می‌شود.
نمونه وابستگی‌ها:
- `farm_data` از `SoilLocation` به عنوان `center_location` استفاده می‌کند.
- `farm_boundary` از سمت `farm_data` می‌آید.
- `block_count` هم از ورودی `farm_data` قابل ثبت است.
- `farm_data` فعلاً فقط location و block layout را ذخیره می‌کند و برای این بخش sync خارجی انجام نمی‌دهد.
## جمع‌بندی ساختار فعلی
الان `location_data` دو لایه دارد:
1. لایه فعلی فعال برای بلوک‌بندی زمین
- محلی
- ساده
- بدون API خارجی
- با `input_block_count` و `block_layout`
2. لایه قدیمی/جانبی برای خاک و NDVI
- `SoilDepthData`
- `tasks.py`
- `soil_adapters.py`
- `NdviObservation`
- `remote_sensing.py`
یعنی از نظر معماری، اپ الان هم داده مکانی زمین را نگه می‌دارد و هم زیرساختی برای تحلیل خاک/NDVI دارد، ولی منطق جدید بلوک‌ها فعلاً مستقل و محلی پیاده شده است.
+685
View File
@@ -0,0 +1,685 @@
# مستند کامل عملکرد فعلی `location_data`
این فایل شرح می‌دهد که اپ `location_data` در وضعیت فعلی دقیقاً چه کاری انجام می‌دهد، چه مدل‌هایی دارد، جریان درخواست‌ها چگونه است، منطق تقسیم‌بندی بلوک‌ها چگونه اجرا می‌شود و چه بخش‌هایی فقط داده ذخیره‌شده را برمی‌گردانند.
---
## 1) هدف فعلی اپ `location_data`
اپ `location_data` در وضعیت فعلی چند مسئولیت اصلی دارد:
- نگه‌داری موقعیت جغرافیایی زمین با `lat` و `lon`
- نگه‌داری مرز زمین یا بلوک در `farm_boundary`
- نگه‌داری ساختار بلوک‌های اصلی زمین در `block_layout`
- نگه‌داری نتیجه خردسازی هوشمند هر بلوک در مدل `BlockSubdivision`
- تولید نقاط شبکه‌ای 100 متری یا هر اندازه‌ای که با `SUBDIVISION_CHUNK_SQM` تنظیم شود
- اجرای خوشه‌بندی `KMeans` روی نقاط شبکه‌ای
- پیدا کردن تعداد بهینه خوشه‌ها با روش `Elbow`
- ذخیره centroidهای نهایی هر بخش خردشده
- تولید و ذخیره تصویر نمودار `K-SSE` برای هر subdivision
- نگه‌داری داده‌های خاک در `SoilDepthData`
- نگه‌داری داده‌های NDVI در `NdviObservation`
نکته مهم:
- در فاز فعلی، `GET` هیچ پردازش جدیدی انجام نمی‌دهد.
- تمام پردازش subdivision فقط در زمان `POST` و فقط اگر subdivision آن بلوک قبلاً ساخته نشده باشد اجرا می‌شود.
---
## 2) تنظیمات محیطی
### `SUBDIVISION_CHUNK_SQM`
در `config/settings.py` یک متغیر جدید اضافه شده است:
- `SUBDIVISION_CHUNK_SQM`
- مقدار پیش‌فرض: `100`
- واحد: متر مربع
کاربرد:
- تعیین می‌کند شبکه اولیه برای subdivision با چه اندازه‌ای ساخته شود.
- اگر مقدار `100` باشد، هر chunk تقریباً یک سلول `10m x 10m` خواهد بود، چون:
```text
step = sqrt(100) = 10 meters
```
این مقدار از `.env` یا environment خوانده می‌شود:
```env
SUBDIVISION_CHUNK_SQM=100
```
---
## 3) مدل‌های اصلی اپ
## 3.1) `SoilLocation`
این مدل رکورد اصلی location را نگه می‌دارد.
### فیلدها
- `latitude`
- `longitude`
- `task_id`
- `farm_boundary`
- `input_block_count`
- `block_layout`
- `created_at`
- `updated_at`
### نقش
- هر location با ترکیب `latitude + longitude` یکتا است.
- اطلاعات کلی زمین یا مرکز زمین را نگه می‌دارد.
- اگر هنوز هیچ تقسیم‌بندی انجام نشده باشد، ساختار اولیه بلوک‌ها را در `block_layout` نگه می‌دارد.
### `block_layout`
این فیلد JSON ساختار بلوک‌ها را نگه می‌دارد. نمونه ساده:
```json
{
"input_block_count": 1,
"default_full_farm": true,
"algorithm_status": "completed",
"blocks": [
{
"block_code": "block-1",
"order": 1,
"source": "default",
"needs_subdivision": true,
"sub_blocks": [
{
"sub_block_code": "sub-block-1",
"centroid_lat": 35.689123,
"centroid_lon": 51.389456
}
],
"subdivision_summary": {
"chunk_size_sqm": 100,
"grid_point_count": 24,
"centroid_count": 3,
"optimal_k": 3
}
}
]
}
```
### رفتار مهم
- اگر `block_layout` خالی باشد، به صورت پیش‌فرض با یک بلوک کامل ساخته می‌شود.
- متد `set_input_block_count()` ساختار اولیه بلوک‌های اصلی را می‌سازد.
---
## 3.2) `BlockSubdivision`
این مدل نتیجه واقعی subdivision برای هر بلوک را ذخیره می‌کند.
### فیلدها
- `soil_location`: ارتباط با `SoilLocation`
- `block_code`: شناسه بلوکی که subdivision روی آن اجرا شده
- `source_boundary`: مرز همان بلوک
- `chunk_size_sqm`: اندازه هر chunk
- `grid_points`: نقاط اولیه شبکه
- `centroid_points`: centroidهای نهایی خوشه‌ها
- `grid_point_count`: تعداد نقاط اولیه
- `centroid_count`: تعداد centroidهای نهایی
- `elbow_plot`: تصویر نمودار elbow
- `status`: وضعیت رکورد
- `metadata`: داده تکمیلی مانند `optimal_k` و `inertia_curve`
- `created_at`
- `updated_at`
### نقش
این مدل منبع اصلی داده subdivision است.
یعنی:
- نقاط خام شبکه در این مدل ذخیره می‌شوند
- centroidهای نهایی هم در این مدل ذخیره می‌شوند
- نمودار elbow هم در همین مدل ذخیره می‌شود
### قید یکتا
برای هر location و هر `block_code` فقط یک subdivision وجود دارد:
```text
(soil_location, block_code) unique
```
بنابراین اگر برای یک بلوک قبلاً subdivision ساخته شده باشد، دوباره ایجاد نمی‌شود.
---
## 3.3) `SoilDepthData`
این مدل داده‌های خاک برای عمق‌های مختلف را نگه می‌دارد.
عمق‌های فعلی:
- `0-5cm`
- `5-15cm`
- `15-30cm`
این بخش در حال حاضر مستقل از subdivision است و هنوز برای هر sub-block جداگانه داده خاک تولید نمی‌کند.
---
## 3.4) `NdviObservation`
این مدل داده‌های NDVI و سلامت پوشش گیاهی را نگه می‌دارد.
این بخش هم فعلاً مستقل از منطق subdivision است.
---
## 4) فایل `block_subdivision.py`
فایل `location_data/block_subdivision.py` مرکز اصلی منطق هوشمند subdivision است.
### وظایف اصلی این فایل
- استخراج polygon از ورودی
- تبدیل مختصات جغرافیایی به صفحه محلی متری
- ساخت grid points با اندازه chunk مشخص
- اجرای `KMeans` برای `K=1..10`
- ذخیره `SSE` یا همان `Inertia`
- پیدا کردن elbow point
- ساخت centroidهای نهایی خوشه‌ها
- sync کردن نتیجه با `block_layout`
- تولید تصویر نمودار elbow
- ذخیره تصویر در مدل با `ContentFile`
---
## 5) روند هندسی subdivision
## 5.1) استخراج Polygon
ورودی boundary می‌تواند به چند شکل بیاید:
- GeoJSON Polygon
- `corners`
- آرایه مستقیم از نقاط
تابع `extract_polygon()` این ورودی را به لیستی از نقاط جغرافیایی تبدیل می‌کند.
نمونه ورودی معتبر:
```json
{
"type": "Polygon",
"coordinates": [
[
[51.3890, 35.6890],
[51.3902, 35.6890],
[51.3902, 35.6900],
[51.3890, 35.6900],
[51.3890, 35.6890]
]
]
}
```
---
## 5.2) تبدیل مختصات به فضای محلی متری
برای اینکه بتوانیم فاصله‌ها و گریدبندی را بر اساس متر حساب کنیم، polygon از مختصات جغرافیایی به مختصات محلی متری تبدیل می‌شود.
تابع مربوط:
- `project_polygon_to_local_meters()`
ویژگی این تبدیل:
- نقطه اول polygon به عنوان origin در نظر گرفته می‌شود
- با تقریب محلی، `lat/lon` به `x/y` در واحد متر تبدیل می‌شوند
این تبدیل برای subdivision کوچک و محلی مناسب است.
---
## 5.3) تولید grid points
تابع:
- `generate_grid_points()`
منطق:
1. ابتدا اندازه گام محاسبه می‌شود:
```text
step_m = sqrt(chunk_size_sqm)
```
2. روی bounding box polygon، نقاط مرکزی grid بررسی می‌شوند.
3. هر نقطه‌ای که داخل polygon باشد نگه داشته می‌شود.
خروجی:
- `grid_points`: مختصات جغرافیایی قابل ذخیره در JSON
- `grid_vectors`: مختصات محلی متری برای ورود به `KMeans`
نمونه هر grid point:
```json
{
"point_code": "pt-1",
"lat": 35.689123,
"lon": 51.389456
}
```
---
## 6) الگوریتم خوشه‌بندی هوشمند
## 6.1) اجرای `KMeans`
تابع:
- `cluster_grid_points()`
منطق:
- روی `grid_vectors` خوشه‌بندی انجام می‌شود
- برای `K=1` تا `K=10` اجرا می‌شود
- اگر تعداد نقاط کمتر از 10 باشد، `max_k = len(grid_vectors)` در نظر گرفته می‌شود
برای هر `K`:
- مدل `KMeans` ساخته می‌شود
- `fit()` اجرا می‌شود
- مقدار `model.inertia_` به عنوان `SSE` ذخیره می‌شود
خروجی میانی:
```json
[
{"k": 1, "sse": 1300.5},
{"k": 2, "sse": 640.2},
{"k": 3, "sse": 390.1}
]
```
---
## 6.2) پیدا کردن Elbow Point
تابع:
- `detect_elbow_point()`
منطق فعلی:
1. از روی SSEها، شیب افت بین نقاط متوالی محاسبه می‌شود.
2. سپس تغییرات شیب محاسبه می‌شود.
3. هر جایی که افت شیب ناگهان متوقف شود، همان نقطه elbow در نظر گرفته می‌شود.
یعنی در عمل:
- ابتدا `slopes` محاسبه می‌شود
- سپس اختلاف شیب‌ها بررسی می‌شود
- بیشترین تغییر شیب به عنوان elbow انتخاب می‌شود
خروجی:
- `optimal_k`
---
## 6.3) تولید centroidهای نهایی
بعد از پیدا شدن `optimal_k`:
- مدل `KMeans` همان `K` نهایی انتخاب می‌شود
- مختصات مراکز خوشه‌ها (`cluster_centers_`) گرفته می‌شود
- از فضای متری به `lat/lon` تبدیل می‌شود
- در `centroid_points` ذخیره می‌شود
نمونه centroid:
```json
{
"sub_block_code": "sub-block-1",
"centroid_lat": 35.689321,
"centroid_lon": 51.389789
}
```
این centroidها در عمل همان مراکز بخش‌های کوچکتر زمین هستند.
---
## 7) تولید و ذخیره نمودار Elbow
### تابع
- `render_elbow_plot()`
### منطق
پس از محاسبه `inertia_curve` و `optimal_k`:
1. نمودار `K` در برابر `SSE` رسم می‌شود
2. نقطه elbow با رنگ قرمز مشخص می‌شود
3. تصویر به صورت PNG در `BytesIO` ذخیره می‌شود
4. با `ContentFile` به `ImageField` مدل `BlockSubdivision` داده می‌شود
### نکته مهم حافظه
برای جلوگیری از memory leak:
- از backend غیرتعاملی `Agg` استفاده می‌شود
- بعد از ذخیره تصویر، `plt.close(fig)` اجرا می‌شود
- buffer هم بسته می‌شود
این برای پردازش‌های همزمان سرور ضروری است.
---
## 8) جریان کامل `POST /api/soil-data/`
این endpoint الان مهم‌ترین ورودی subdivision است.
### ورودی‌های قابل پشتیبانی
- `lat`
- `lon`
- `block_count`
- `block_code`
- `farm_boundary`
### سناریوی اجرا
#### مرحله 1: اعتبارسنجی ورودی
سریالایزر `SoilDataRequestSerializer` داده را validate می‌کند.
#### مرحله 2: پیدا کردن یا ساخت location
بر اساس `lat/lon`:
- اگر location وجود نداشته باشد ساخته می‌شود
- اگر وجود داشته باشد از همان رکورد استفاده می‌شود
#### مرحله 3: آپدیت ساختار اولیه بلوک‌ها
اگر `block_count` فرق کرده باشد:
- `block_layout` دوباره با `set_input_block_count()` ساخته می‌شود
#### مرحله 4: انتخاب boundary برای subdivision
اولویت:
1. `farm_boundary` ارسالی در request
2. اگر نبود، `location.farm_boundary` ذخیره‌شده
#### مرحله 5: اجرای subdivision فقط در صورت نیاز
تابع:
- `create_or_get_block_subdivision()`
اگر رکورد `(location, block_code)` از قبل وجود داشته باشد:
- هیچ پردازش جدیدی اجرا نمی‌شود
- همان رکورد قبلی برگردانده می‌شود
اگر وجود نداشته باشد:
- grid ساخته می‌شود
- KMeans اجرا می‌شود
- elbow پیدا می‌شود
- centroidها ساخته می‌شوند
- نمودار elbow ساخته می‌شود
- همه چیز در `BlockSubdivision` ذخیره می‌شود
- `block_layout` با `sub_blocks` sync می‌شود
#### مرحله 6: response
خروجی شامل این‌هاست:
- اطلاعات `SoilLocation`
- `farm_boundary`
- `block_layout`
- `block_subdivisions`
- `depths`
فیلد `source` در response:
- `created` اگر location یا subdivision جدید ساخته شده باشد
- `database` اگر قبلاً وجود داشته باشد
---
## 9) جریان کامل `GET /api/soil-data/`
این endpoint الان فقط برای read استفاده می‌شود.
### ورودی
- `lat`
- `lon`
- `block_code` اختیاری
### رفتار
- location را از دیتابیس پیدا می‌کند
- subdivisionهای ذخیره‌شده را می‌خواند
- هیچ الگوریتمی را اجرا نمی‌کند
- هیچ `KMeans` یا پردازش هندسی انجام نمی‌دهد
### پاسخ
داده ذخیره‌شده را با `source = database` برمی‌گرداند.
اگر location پیدا نشود:
- `404`
---
## 10) نقش `serializers.py`
### `SoilDataRequestSerializer`
ورودی endpoint اصلی را مدیریت می‌کند:
- `lat`
- `lon`
- `block_count`
- `block_code`
- `farm_boundary`
### `SoilLocationResponseSerializer`
خروجی location را برمی‌گرداند:
- `id`
- `lat`
- `lon`
- `input_block_count`
- `farm_boundary`
- `block_layout`
- `block_subdivisions`
- `depths`
### `BlockSubdivisionSerializer`
خروجی subdivision را برمی‌گرداند:
- `block_code`
- `chunk_size_sqm`
- `grid_points`
- `centroid_points`
- `grid_point_count`
- `centroid_count`
- `elbow_plot`
- `status`
- `metadata`
- `created_at`
- `updated_at`
---
## 11) نقش `block_layout` در کنار `BlockSubdivision`
در معماری فعلی دو سطح ذخیره‌سازی داریم:
### 11.1) `BlockSubdivision`
منبع اصلی و canonical برای subdivision
### 11.2) `block_layout`
خلاصه‌ای از نتیجه subdivision برای مصرف سریع‌تر در response و ساختار کلی location
یعنی:
- داده دقیق در `BlockSubdivision` است
- خلاصه آن در `block_layout.blocks[].sub_blocks` قرار می‌گیرد
---
## 12) وضعیت فعلی بخش‌های قدیمی‌تر اپ
## 12.1) `tasks.py`
این فایل هنوز وجود دارد و برای fetch داده خاک به صورت قدیمی استفاده می‌شود، اما در مسیر subdivision فعلی نقشی ندارد.
## 12.2) `soil_adapters.py`
این فایل adapterهای داده خاک را نگه می‌دارد و فعلاً برای subdivision استفاده نمی‌شود.
## 12.3) `remote_sensing.py`
منطق سنجش‌ازدور را نگه می‌دارد و هنوز مستقیماً به subdivision وصل نشده است.
## 12.4) `ndvi.py`
برای endpoint مربوط به NDVI استفاده می‌شود و فعلاً از centroidهای subdivision استفاده نمی‌کند.
---
## 13) وابستگی‌های جدید
برای عملکرد فعلی subdivision این dependencyها لازم هستند:
- `scikit-learn`
- `matplotlib`
- `Pillow`
- `numpy`
### دلیل هرکدام
- `scikit-learn`: اجرای `KMeans`
- `matplotlib`: رسم elbow plot
- `Pillow`: پشتیبانی از `ImageField`
- `numpy`: وابستگی پایه `scikit-learn`
---
## 14) migrationهای مهم مرتبط با ساختار فعلی
- `0008_soillocation_block_layout.py`
- اضافه شدن `input_block_count`
- اضافه شدن `block_layout`
- `0009_blocksubdivision.py`
- اضافه شدن مدل `BlockSubdivision`
- `0010_blocksubdivision_elbow_plot.py`
- اضافه شدن فیلد `elbow_plot`
---
## 15) محدودیت‌های فعلی
چند محدودیت مهم در پیاده‌سازی فعلی وجود دارد:
- subdivision فعلاً بر اساس هندسه و خوشه‌بندی نقاط انجام می‌شود، نه بر اساس داده واقعی خاک یا NDVI
- برای هر `block_code` فرض می‌شود یک مرز مستقل از بیرون داده می‌شود
- هنوز برای هر `sub_block` رکورد location مستقل ساخته نمی‌شود
- هنوز داده خاک، هوا و NDVI برای centroidهای جدید به صورت جداگانه fetch نمی‌شود
- elbow detection فعلی heuristic-based است و هنوز نسخه پیشرفته‌تر آماری ندارد
---
## 16) تست‌های مرتبط
### `location_data/test_block_subdivision.py`
این تست‌ها بررسی می‌کنند:
- elbow detection کار می‌کند
- payload subdivision ساخته می‌شود
- grid points و centroid points خروجی دارند
### `location_data/test_soil_api.py`
این تست‌ها بررسی می‌کنند:
- `POST` subdivision جدید می‌سازد
- `GET` فقط داده ذخیره‌شده را برمی‌گرداند
- الگوریتم در `GET` دوباره اجرا نمی‌شود
---
## 17) جمع‌بندی معماری فعلی
در وضعیت فعلی، `location_data` این معماری را دارد:
### لایه 1: Location پایه
- `SoilLocation`
- `farm_boundary`
- `block_layout`
### لایه 2: Subdivision هوشمند
- `BlockSubdivision`
- grid generation
- KMeans
- elbow detection
- centroid generation
- elbow plot generation
### لایه 3: داده‌های مکمل
- `SoilDepthData`
- `NdviObservation`
- بخش‌های legacy مثل `tasks.py`
در نتیجه، اپ الان می‌تواند:
- یک بلوک با مرز مشخص بگیرد
- آن را به نقاط شبکه‌ای خرد کند
- تعداد بهینه بخش‌ها را با KMeans + Elbow پیدا کند
- centroidهای نهایی را ذخیره کند
- نمودار elbow را ذخیره کند
- و در درخواست‌های بعدی فقط همان نتیجه ذخیره‌شده را بدون پردازش مجدد برگرداند
---
## 18) پیشنهاد برای مراحل بعدی
اگر در مرحله بعد بخواهی این ساختار را توسعه بدهی، منطقی‌ترین قدم‌ها این‌ها هستند:
1. ساخت endpoint مستقل برای subdivision هر block
2. اتصال هر centroid به fetch داده خاک و هوا
3. ساخت رکورد مستقل برای هر `sub_block`
4. استفاده از NDVI یا داده سنسور برای تعیین `K` یا وزن‌دهی خوشه‌ها
5. نمایش مستقیم `elbow_plot` با URL کامل media
+972
View File
@@ -0,0 +1,972 @@
# مستند کامل اپ `location_data`
این سند، وضعیت فعلی اپ `location_data` را به صورت کامل توضیح می‌دهد:
- مدل‌های داده
- منطق business
- جریان ساخت location و block
- subdivision و خوشه‌بندی
- تولید analysis grid
- سنجش‌ازدور با openEO
- تسک‌های Celery
- APIهای فعلی
- ساختار responseها
- محدودیت‌ها و فرضیات فعلی
این فایل بر اساس کد فعلی پروژه نوشته شده است و هدفش این است که یک مرجع فنی برای توسعه‌دهنده‌های بعدی باشد.
---
## 1) هدف اپ `location_data`
اپ `location_data` در وضعیت فعلی چند نقش اصلی دارد:
1. نگه‌داری موقعیت جغرافیایی زمین با `lat/lon`
2. نگه‌داری مرز زمین یا مرز blockها
3. ساخت ساختار blockهای مزرعه
4. اجرای subdivision برای blockها
5. تولید grid analysis با ابعاد 30x30 متر
6. نگه‌داری نتایج سنجش‌ازدور روی هر grid cell
7. نگه‌داری داده‌های خاک و NDVI سنتی
8. فراهم کردن API برای:
- location data
- subdivision
- remote sensing trigger/result
- NDVI health
به صورت خلاصه، `location_data` الان فقط یک جدول مختصات نیست؛ بلکه هاب مکانی پروژه است.
---
## 2) مفاهیم اصلی دامنه
### 2.1) SoilLocation
`SoilLocation` نماینده یک location اصلی برای یک مزرعه یا مرکز زمین است.
این مدل:
- مختصات `latitude` و `longitude` را نگه می‌دارد
- `farm_boundary` را ذخیره می‌کند
- تعداد blockهای اولیه را نگه می‌دارد
- `block_layout` را نگه می‌دارد
- مبنای ارتباط با:
- `SoilDepthData`
- `BlockSubdivision`
- `AnalysisGridCell`
- `RemoteSensingRun`
- `NdviObservation`
---
### 2.2) BlockSubdivision
`BlockSubdivision` نتیجه خردسازی یک block است.
این مدل نگه می‌دارد:
- block code
- مرز همان block
- chunk size برای subdivision
- grid points اولیه
- centroid points نهایی
- elbow plot
- metadata الگوریتم
این مدل برای مرحله‌ای است که یک block را به بخش‌های کوچک‌تر تقسیم می‌کنیم.
---
### 2.3) AnalysisGridCell
`AnalysisGridCell` سلول‌های 30x30 متری تحلیل سنجش‌ازدور را نگه می‌دارد.
هر cell:
- به یک `SoilLocation` وصل است
- در صورت نیاز به یک `BlockSubdivision` وصل است
- یک `cell_code` یکتا دارد
- geometry خودش را به صورت Polygon نگه می‌دارد
- centroid خودش را نگه می‌دارد
این مدل واحد اصلی تحلیل remote sensing است.
---
### 2.4) AnalysisGridObservation
`AnalysisGridObservation` داده زمانی هر سلول را نگه می‌دارد.
برای هر cell و بازه زمانی:
- `ndvi`
- `ndwi`
- `lst_c`
- `soil_vv`
- `soil_vv_db`
- `dem_m`
- `slope_deg`
ذخیره می‌شود.
این مدل cache دیتابیسی اصلی برای نتایج openEO است.
---
### 2.5) RemoteSensingRun
`RemoteSensingRun` وضعیت یک اجرای async سنجش‌ازدور را نگه می‌دارد.
این مدل:
- به `SoilLocation` وصل است
- optionally به `BlockSubdivision` وصل است
- `block_code` و بازه زمانی را نگه می‌دارد
- status execution را نگه می‌دارد
- metadata مربوط به task/backend/result summary را نگه می‌دارد
این مدل برای tracking jobها در Celery استفاده می‌شود.
---
### 2.6) SoilDepthData
این مدل داده‌های خاک را در عمق‌های مختلف نگه می‌دارد:
- `0-5cm`
- `5-15cm`
- `15-30cm`
---
### 2.7) NdviObservation
این مدل نگه‌دارنده NDVI سنتی است که جدا از workflow جدید openEO هم هنوز وجود دارد.
---
## 3) ساختار فایل‌های مهم اپ
```text
location_data/
├── admin.py
├── apps.py
├── block_subdivision.py
├── grid_analysis.py
├── models.py
├── ndvi.py
├── openeo_service.py
├── remote_sensing.py
├── serializers.py
├── soil_adapters.py
├── tasks.py
├── urls.py
├── views.py
├── migrations/
└── tests...
```
### نقش فایل‌ها
- `models.py`: مدل‌های اصلی
- `serializers.py`: serializerهای API
- `views.py`: endpointهای DRF
- `urls.py`: routeها
- `tasks.py`: تسک‌های Celery
- `block_subdivision.py`: subdivision و elbow/kmeans
- `grid_analysis.py`: ساخت analysis grid cells
- `openeo_service.py`: لایه سرویس openEO
- `remote_sensing.py`: منطق قدیمی‌تر سنجش‌ازدور/NDVI ساده
- `soil_adapters.py`: adapterهای داده خاک
---
## 4) تنظیمات مهم
### `SUBDIVISION_CHUNK_SQM`
در `config/settings.py`:
```python
SUBDIVISION_CHUNK_SQM = int(os.environ.get("SUBDIVISION_CHUNK_SQM", "900"))
```
مقدار پیش‌فرض فعلی:
- `900`
معنا:
- grid analysis با سلول‌های `30m x 30m`
چون:
```text
step_m = sqrt(900) = 30m
```
---
## 5) مدل‌های دیتابیس و منطق آن‌ها
## 5.1) SoilLocation
فیلدهای مهم:
- `latitude`
- `longitude`
- `task_id`
- `farm_boundary`
- `input_block_count`
- `block_layout`
- `created_at`
- `updated_at`
### قید مهم
- `latitude + longitude` یکتا هستند
### block_layout
`block_layout` JSON summary کلی blockها را نگه می‌دارد.
نمونه:
```json
{
"input_block_count": 1,
"default_full_farm": true,
"algorithm_status": "completed",
"blocks": [
{
"block_code": "block-1",
"order": 1,
"source": "default",
"needs_subdivision": true,
"sub_blocks": [
{
"sub_block_code": "sub-block-1",
"centroid_lat": 35.689123,
"centroid_lon": 51.389456
}
],
"subdivision_summary": {
"chunk_size_sqm": 900,
"grid_point_count": 12,
"centroid_count": 3,
"optimal_k": 3
},
"analysis_grid_summary": {
"chunk_size_sqm": 900,
"cell_count": 18
}
}
]
}
```
`block_layout` canonical source نیست؛ بیشتر یک summary سریع برای API است.
---
## 5.2) BlockSubdivision
فیلدهای مهم:
- `soil_location`
- `block_code`
- `source_boundary`
- `chunk_size_sqm`
- `grid_points`
- `centroid_points`
- `grid_point_count`
- `centroid_count`
- `elbow_plot`
- `status`
- `metadata`
### نقش
برای هر `block_code` در هر location، نتیجه subdivision در این مدل ذخیره می‌شود.
### metadata
شامل مواردی مثل:
- `estimated_area_sqm`
- `optimal_k`
- `inertia_curve`
- `analysis_grid`
---
## 5.3) RemoteSensingRun
فیلدهای مهم:
- `soil_location`
- `block_subdivision`
- `block_code`
- `provider`
- `chunk_size_sqm`
- `temporal_start`
- `temporal_end`
- `status`
- `metadata`
- `error_message`
- `started_at`
- `finished_at`
### statusها
- `pending`
- `running`
- `success`
- `failure`
### نقش
این جدول وضعیت اجرای async را نگه می‌دارد.
---
## 5.4) AnalysisGridCell
فیلدهای مهم:
- `soil_location`
- `block_subdivision`
- `block_code`
- `cell_code`
- `chunk_size_sqm`
- `geometry`
- `centroid_lat`
- `centroid_lon`
### نقش
واحد spatial اصلی برای تحلیل remote sensing است.
### idempotency
سطح سرویس با این شرط enforce می‌شود:
- اگر برای یک `SoilLocation + block_code + chunk_size_sqm` cellها قبلاً ساخته شده باشند، دوباره ساخته نمی‌شوند.
### geometry
به صورت GeoJSON-like polygon ذخیره می‌شود.
---
## 5.5) AnalysisGridObservation
فیلدهای مهم:
- `cell`
- `run`
- `temporal_start`
- `temporal_end`
- `ndvi`
- `ndwi`
- `lst_c`
- `soil_vv`
- `soil_vv_db`
- `dem_m`
- `slope_deg`
- `metadata`
### uniqueness
برای جلوگیری از duplicate:
- روی `cell + temporal_start + temporal_end` constraint داریم.
این باعث می‌شود cache دیتابیسی پایدار باشد.
---
## 5.6) SoilDepthData
این مدل داده‌های خاک را در عمق‌های مختلف نگه می‌دارد.
هنوز به صورت مستقیم برای هر analysis grid cell جداگانه استفاده نشده است.
---
## 5.7) NdviObservation
این مدل legacy / parallel NDVI store است.
workflow جدید openEO جایگزین آن نشده، بلکه کنار آن وجود دارد.
---
## 6) منطق subdivision در `block_subdivision.py`
این فایل مسئول خردسازی blockها است.
### کارهایی که انجام می‌دهد
- استخراج polygon از boundary
- تبدیل مختصات جغرافیایی به مختصات محلی متری
- تولید grid points اولیه
- اجرای KMeans برای `K=1..10`
- محاسبه SSE/Inertia
- پیدا کردن elbow point
- انتخاب centroidها
- رسم elbow plot با matplotlib
- ذخیره plot در `ImageField`
- sync کردن نتیجه با `block_layout`
### input
ممکن است boundary به شکل‌های زیر برسد:
- GeoJSON Polygon
- corners
- list مستقیم از points
### خروجی
- centroidهای نهایی block
- metadata الگوریتم
- elbow plot
---
## 7) منطق ساخت analysis grid در `grid_analysis.py`
این فایل مسئول تولید سلول‌های 30x30 متری برای تحلیل remote sensing است.
### تابع اصلی
- `create_or_get_analysis_grid_cells(...)`
### ورودی‌ها
- `location`
- optional `boundary`
- optional `block_code`
- optional `block_subdivision`
- optional `chunk_size_sqm`
### رفتار
1. chunk size را تعیین می‌کند
2. boundary را resolve می‌کند
3. polygon را extract می‌کند
4. اگر قبلاً برای همان `location + block_code + chunk_size` cell ساخته شده باشد، خروجی existing برمی‌گرداند
5. اگر نه، grid cellها ساخته می‌شوند و `AnalysisGridCell` ذخیره می‌شود
### نحوه ساخت شبکه
- polygon به دستگاه محلی متری تبدیل می‌شود
- `step_m = sqrt(chunk_size_sqm)` محاسبه می‌شود
- یک grid مستطیلی روی bounding box ساخته می‌شود
- هر cell که با polygon intersect داشته باشد نگه داشته می‌شود
### cell_code
فرمت فعلی deterministic است:
```text
loc-{location_id}__block-{block_code}__chunk-{chunk_size_sqm}__rXXXXcYYYY
```
### metadata summary
پس از ساخت grid:
- روی `BlockSubdivision.metadata["analysis_grid"]`
- و روی `SoilLocation.block_layout`
summary ذخیره می‌شود.
---
## 8) منطق openEO در `openeo_service.py`
این فایل لایه service اصلی برای تحلیل openEO است.
### backend
```text
https://openeofed.dataspace.copernicus.eu
```
### هدف
گرفتن batch metricها برای مجموعه‌ای از `AnalysisGridCell`ها.
### جریان کلی
1. اتصال و auth به openEO
2. ساخت `FeatureCollection` از cellها
3. ساخت `spatial_extent`
4. اجرای یک job per metric روی همه cellها
5. parse کردن نتیجه aggregate_spatial
6. merge کردن metricها روی map keyed by `cell_code`
### metricهای فعلی
- `ndvi` از `SENTINEL2_L2A`
- `ndwi` از `SENTINEL2_L2A`
- `lst_c` از `SENTINEL3_SLSTR_L2_LST`
- `soil_vv` از `SENTINEL1_GRD`
- `soil_vv_db` در Python از `soil_vv`
- `dem_m` از `COPERNICUS_30`
- `slope_deg` از DEM اگر backend پشتیبانی کند
### cloud mask Sentinel-2
کلاس‌های معتبر SCL:
- `4`
- `5`
- `6`
نکته مهم:
- از `isin()` استفاده نمی‌شود
- فقط logical comparison استفاده می‌شود
### aggregate_spatial
فقط از:
```python
aggregate_spatial(geometries=feature_collection, reducer="mean")
```
استفاده می‌شود.
### slope support
اگر backend `slope()` را پشتیبانی نکند:
- `slope_deg = null`
- و metadata می‌گوید `slope_supported=False`
### normalized output
خروجی نهایی به این شکل است:
```python
{
"results": {
"cell-1": {
"ndvi": ...,
"ndwi": ...,
"lst_c": ...,
"soil_vv": ...,
"soil_vv_db": ...,
"dem_m": ...,
"slope_deg": ...,
}
},
"metadata": {
"backend": "openeo",
"collections_used": [...],
"slope_supported": True,
"job_refs": {},
"failed_metrics": []
}
}
```
---
## 9) Celery workflow در `tasks.py`
### تسک قدیمی
- `fetch_soil_data_task`
برای خاک legacy است.
### workflow جدید remote sensing
تابع/تسک‌های اصلی:
- `run_remote_sensing_analysis(...)`
- `run_remote_sensing_analysis_task.delay(...)`
### ورودی task
- `soil_location_id`
- optional `block_code`
- `temporal_start`
- `temporal_end`
- optional `force_refresh`
- optional `run_id`
### رفتار task
1. `SoilLocation` را پیدا می‌کند
2. `BlockSubdivision` را اگر لازم باشد resolve می‌کند
3. `RemoteSensingRun` را create/update می‌کند
4. `AnalysisGridCell`ها را ensure می‌کند
5. اگر observation برای همان range قبلاً باشد و `force_refresh=False`، دوباره process نمی‌کند
6. در غیر این صورت، `compute_remote_sensing_metrics()` را صدا می‌زند
7. `AnalysisGridObservation`ها را upsert می‌کند
8. status run را success/failure می‌کند
### idempotency
اگر observation قبلاً برای همان:
- cell
- temporal_start
- temporal_end
وجود داشته باشد، duplicate ساخته نمی‌شود.
### retry behavior
task روی خطاهای transient مثل:
- `OpenEOExecutionError`
- `OpenEOServiceError`
- request-level failures
retry می‌کند.
روی auth failure retry نمی‌کند.
---
## 10) APIهای فعلی `location_data`
## 10.1) `GET /api/soil-data/`
کاربرد:
- فقط اطلاعات ذخیره‌شده location را برمی‌گرداند
- subdivision را rerun نمی‌کند
ورودی:
- `lat`
- `lon`
- optional `block_code`
خروجی:
- location data
- block layout
- block subdivisions
- depths
---
## 10.2) `POST /api/soil-data/`
کاربرد:
- `SoilLocation` را create/get می‌کند
- در صورت نیاز `BlockSubdivision` می‌سازد
ورودی:
- `lat`
- `lon`
- `block_count`
- `block_code`
- `farm_boundary`
خروجی:
- location کامل
- `source` = `created` یا `database`
---
## 10.3) `GET /api/soil-data/tasks/<task_id>/status/`
کاربرد:
- status task قدیمی fetch خاک
---
## 10.4) `POST /api/soil-data/ndvi-health/`
کاربرد:
- NDVI health مستقل برای farm
---
## 10.5) `POST /api/soil-data/remote-sensing/`
کاربرد:
- remote sensing analysis را queue می‌کند
- heavy work را sync اجرا نمی‌کند
ورودی:
- `lat`
- `lon`
- optional `block_code`
- `start_date`
- `end_date`
- optional `force_refresh`
رفتار:
1. location را پیدا می‌کند
2. run می‌سازد
3. Celery task را enqueue می‌کند
4. `202 Accepted` برمی‌گرداند
خروجی شامل:
- `status=processing`
- `source=processing`
- `location`
- `block_code`
- `chunk_size_sqm`
- `temporal_extent`
- `summary` خالی
- `cells=[]`
- `run`
- `task_id`
---
## 10.6) `GET /api/soil-data/remote-sensing/`
کاربرد:
- فقط cache دیتابیسی remote sensing را می‌خواند
- هیچ openEO یا subdivision sync اجرا نمی‌کند
ورودی:
- `lat`
- `lon`
- optional `block_code`
- `start_date`
- `end_date`
خروجی حالت‌ها:
### حالت 1: result موجود است
- `status=success`
- `source=database`
- summary metrics
- cells list
- run info
### حالت 2: result هنوز نیست ولی job در حال اجراست
- `status=processing`
- `source=processing`
- summary خالی
- cells empty
- run info
### حالت 3: location نیست
- `404`
---
## 11) serializerهای مهم
### `SoilDataRequestSerializer`
برای endpoint اصلی location.
### `SoilLocationResponseSerializer`
برای بازگشت location + blocks + depths.
### `BlockSubdivisionSerializer`
برای بازگشت subdivision data.
### `RemoteSensingTriggerSerializer`
برای trigger API remote sensing.
### `RemoteSensingCellObservationSerializer`
برای بازگشت per-cell remote sensing metrics.
### `RemoteSensingSummarySerializer`
برای بازگشت summary statisticها.
### `RemoteSensingRunSerializer`
برای بازگشت status run.
### `RemoteSensingResponseSerializer`
برای payload کامل remote sensing GET.
---
## 12) منطق summary statistics در remote sensing GET
در response مربوط به `GET /remote-sensing/` این فیلدها برمی‌گردند:
- `cell_count`
- `ndvi_mean`
- `ndwi_mean`
- `lst_c_mean`
- `soil_vv_db_mean`
- `dem_m_mean`
- `slope_deg_mean`
این‌ها از روی observationهای موجود در DB محاسبه می‌شوند، نه از openEO live.
---
## 13) admin
در `admin.py` الان موارد زیر رجیستر شده‌اند:
- `SoilLocation`
- `SoilDepthData`
- `BlockSubdivision`
- `RemoteSensingRun`
- `AnalysisGridCell`
- `AnalysisGridObservation`
این باعث می‌شود debugging و inspection از طریق admin ممکن باشد.
---
## 14) تست‌های فعلی
### `test_soil_api.py`
- ساخت location
- ساخت subdivision
- رفتار GET/POST location
### `test_block_subdivision.py`
- elbow detection
- payload subdivision
### `test_grid_analysis.py`
- ساخت analysis grid 30x30
- idempotency grid cells
- استفاده از boundary location
### `test_openeo_service.py`
- parse نتیجه aggregate_spatial
- merge metricها
- conversion به dB
### `test_remote_sensing_api.py`
- queue شدن remote sensing task
- processing response
- cache read response
- not found behavior
### `test_ndvi_health_api.py`
- NDVI health API
---
## 15) وابستگی‌های مهم
در `requirements.txt` dependencyهای مهم این بخش‌ها شامل این‌ها هستند:
- `scikit-learn`
- `matplotlib`
- `Pillow`
- `numpy`
- `openeo`
### نقش آن‌ها
- `scikit-learn`: KMeans
- `matplotlib`: elbow plot
- `Pillow`: ImageField support
- `numpy`: وابستگی عددی
- `openeo`: ارتباط با backend سنجش‌ازدور
---
## 16) migrationهای مهم
- `0008_soillocation_block_layout.py`
- `0009_blocksubdivision.py`
- `0010_blocksubdivision_elbow_plot.py`
- `0011_remote_sensing_models.py`
این migrationها ساختار فعلی location/subdivision/remote sensing را ساخته‌اند.
---
## 17) محدودیت‌ها و فرضیات فعلی
### محدودیت‌های فعلی
1. `block_layout` canonical source نیست و summary است.
2. subdivision و analysis grid دو لایه جدا هستند.
3. slope ممکن است روی backend همیشه پشتیبانی نشود.
4. API GET remote-sensing فقط cache می‌خواند.
5. هنوز endpoint مجزای status run نداریم.
6. grid generation از projection محلی استفاده می‌کند، نه GIS stack سنگین.
7. openEO calls فعلاً برای batch metric processing طراحی شده‌اند، نه orchestration پیچیده job lifecycle.
### فرضیات
1. مزرعه‌ها آن‌قدر کوچک هستند که local projected approximation مناسب باشد.
2. `SUBDIVISION_CHUNK_SQM=900` برای workflow فعلی درست است.
3. `cell_code` deterministic بودن برای idempotency کافی است.
4. `AnalysisGridObservation` cache اصلی remote sensing است.
---
## 18) جریان کامل داده از ابتدا تا نتیجه remote sensing
### مرحله 1: ایجاد location
کاربر `POST /api/soil-data/` را صدا می‌زند.
نتیجه:
- `SoilLocation` ساخته می‌شود
- `farm_boundary` ذخیره می‌شود
- block layout ساخته می‌شود
- در صورت نیاز `BlockSubdivision` ساخته می‌شود
### مرحله 2: تولید analysis grid
وقتی task remote sensing اجرا می‌شود:
- اگر cellها قبلاً نباشند، `AnalysisGridCell`ها ساخته می‌شوند
### مرحله 3: اجرای openEO
Celery task:
- FeatureCollection از cellها می‌سازد
- metricها را batch اجرا می‌کند
- نتیجه را parse می‌کند
### مرحله 4: ذخیره observation
برای هر cell:
- یک `AnalysisGridObservation` برای بازه زمانی موردنظر ذخیره/آپدیت می‌شود
### مرحله 5: بازگشت نتیجه از API
کاربر `GET /api/soil-data/remote-sensing/` را صدا می‌زند.
سیستم:
- فقط DB را می‌خواند
- summary می‌سازد
- cells را برمی‌گرداند
---
## 19) پیشنهاد توسعه بعدی
برای ادامه توسعه، این‌ها منطقی‌ترین قدم‌ها هستند:
1. ساخت endpoint مستقل status برای `RemoteSensingRun`
2. اضافه کردن pagination برای cell responseها
3. اضافه کردن job reference واقعی openEO در metadata
4. پشتیبانی از چند resolution دیگر غیر از 30x30
5. ساخت serializer/model جدا برای summaryهای precomputed
6. اضافه کردن نمودارها یا aggregationهای block-level
7. اتصال remote sensing resultها به recommendation engine
---
## 20) جمع‌بندی نهایی
اپ `location_data` الان یک سیستم چندلایه است:
### لایه مکانی پایه
- `SoilLocation`
- `farm_boundary`
- `block_layout`
### لایه subdivision
- `BlockSubdivision`
- KMeans
- elbow plot
### لایه grid analysis
- `AnalysisGridCell`
### لایه observation
- `AnalysisGridObservation`
- `RemoteSensingRun`
### لایه سرویس
- `block_subdivision.py`
- `grid_analysis.py`
- `openeo_service.py`
- `tasks.py`
### لایه API
- `SoilDataView`
- `RemoteSensingAnalysisView`
- `NdviHealthView`
در نتیجه، `location_data` الان از یک app ساده location عبور کرده و به یک زیرسیستم کامل spatial + remote sensing تبدیل شده است.
+3 -3
View File
@@ -3,7 +3,7 @@ from datetime import date
def load_farm_context(sensor_id: str) -> dict | None: def load_farm_context(sensor_id: str) -> dict | None:
from irrigation.models import IrrigationMethod from irrigation.models import IrrigationMethod
from location_data.models import SoilDepthData from location_data.satellite_snapshot import build_location_block_satellite_snapshots
from farm_data.models import SensorData from farm_data.models import SensorData
from farm_data.services import get_farm_plant_snapshots from farm_data.services import get_farm_plant_snapshots
from weather.models import WeatherForecast from weather.models import WeatherForecast
@@ -16,7 +16,7 @@ def load_farm_context(sensor_id: str) -> dict | None:
return None return None
location = sensor.center_location location = sensor.center_location
depths = list(SoilDepthData.objects.filter(soil_location=location).order_by("depth_label")) satellite_snapshots = build_location_block_satellite_snapshots(location)
forecasts = list( forecasts = list(
WeatherForecast.objects.filter(location=location, forecast_date__gte=date.today()).order_by("forecast_date")[:7] WeatherForecast.objects.filter(location=location, forecast_date__gte=date.today()).order_by("forecast_date")[:7]
) )
@@ -26,7 +26,7 @@ def load_farm_context(sensor_id: str) -> dict | None:
return { return {
"sensor": sensor, "sensor": sensor,
"location": location, "location": location,
"depths": depths, "satellite_snapshots": satellite_snapshots,
"forecasts": forecasts, "forecasts": forecasts,
"history": [], "history": [],
"plants": plants, "plants": plants,
+5 -2
View File
@@ -1,6 +1,5 @@
from rest_framework import serializers from rest_framework import serializers
from location_data.serializers import SoilDepthDataSerializer
from irrigation.models import IrrigationMethod from irrigation.models import IrrigationMethod
from irrigation.serializers import IrrigationMethodSerializer from irrigation.serializers import IrrigationMethodSerializer
from weather.models import WeatherForecast from weather.models import WeatherForecast
@@ -19,6 +18,7 @@ class SensorDataUpdateSerializer(serializers.Serializer):
farm_uuid = serializers.UUIDField(required=True) farm_uuid = serializers.UUIDField(required=True)
farm_boundary = serializers.JSONField(required=True) farm_boundary = serializers.JSONField(required=True)
block_count = serializers.IntegerField(required=False, min_value=1, default=1)
sensor_key = serializers.CharField(required=False, default=DEFAULT_SENSOR_KEY) sensor_key = serializers.CharField(required=False, default=DEFAULT_SENSOR_KEY)
sensor_payload = serializers.JSONField(required=False) sensor_payload = serializers.JSONField(required=False)
plant_ids = serializers.ListField( plant_ids = serializers.ListField(
@@ -40,6 +40,7 @@ class SensorDataUpdateSerializer(serializers.Serializer):
known_fields = { known_fields = {
"farm_uuid", "farm_uuid",
"farm_boundary", "farm_boundary",
"block_count",
"sensor_key", "sensor_key",
"sensor_payload", "sensor_payload",
"plant_ids", "plant_ids",
@@ -150,6 +151,8 @@ class FarmCenterLocationSerializer(serializers.Serializer):
lat = serializers.DecimalField(max_digits=9, decimal_places=6) lat = serializers.DecimalField(max_digits=9, decimal_places=6)
lon = serializers.DecimalField(max_digits=9, decimal_places=6) lon = serializers.DecimalField(max_digits=9, decimal_places=6)
farm_boundary = serializers.JSONField() farm_boundary = serializers.JSONField()
input_block_count = serializers.IntegerField()
block_layout = serializers.JSONField()
class WeatherForecastDetailSerializer(serializers.ModelSerializer): class WeatherForecastDetailSerializer(serializers.ModelSerializer):
@@ -173,7 +176,7 @@ class WeatherForecastDetailSerializer(serializers.ModelSerializer):
class FarmSoilPayloadSerializer(serializers.Serializer): class FarmSoilPayloadSerializer(serializers.Serializer):
resolved_metrics = serializers.JSONField() resolved_metrics = serializers.JSONField()
metric_sources = serializers.JSONField() metric_sources = serializers.JSONField()
depths = SoilDepthDataSerializer(many=True) satellite_snapshots = serializers.JSONField()
class PlantCatalogSnapshotSerializer(serializers.ModelSerializer): class PlantCatalogSnapshotSerializer(serializers.ModelSerializer):
+55 -68
View File
@@ -12,11 +12,13 @@ from django.utils.dateparse import parse_datetime
import requests import requests
from location_data.models import SoilLocation from location_data.block_subdivision import create_or_get_block_subdivision
from location_data.serializers import SoilDepthDataSerializer from location_data.models import BlockSubdivision, SoilLocation
from location_data.tasks import fetch_soil_data_for_coordinates from location_data.satellite_snapshot import (
build_location_block_satellite_snapshots,
build_location_satellite_snapshot,
)
from irrigation.serializers import IrrigationMethodSerializer from irrigation.serializers import IrrigationMethodSerializer
from weather.services import update_weather_for_location
from weather.models import WeatherForecast from weather.models import WeatherForecast
from .models import ( from .models import (
@@ -29,7 +31,6 @@ from .models import (
from .serializers import PlantCatalogSnapshotSerializer, WeatherForecastDetailSerializer from .serializers import PlantCatalogSnapshotSerializer, WeatherForecastDetailSerializer
DEPTH_PRIORITY = ["0-5cm", "5-15cm", "15-30cm"]
DECIMAL_PRECISION = Decimal("0.000001") DECIMAL_PRECISION = Decimal("0.000001")
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -231,7 +232,7 @@ def get_canonical_farm_record(farm_uuid: str) -> SensorData | None:
"weather_forecast", "weather_forecast",
"irrigation_method", "irrigation_method",
) )
.prefetch_related("plant_assignments__plant", "center_location__depths") .prefetch_related("plant_assignments__plant")
.filter(farm_uuid=farm_uuid) .filter(farm_uuid=farm_uuid)
.first() .first()
) )
@@ -461,14 +462,12 @@ def get_farm_details(farm_uuid: str):
center_location.weather_forecasts.order_by("-forecast_date", "-id").first() center_location.weather_forecasts.order_by("-forecast_date", "-id").first()
) )
depths = list(center_location.depths.all()) latest_satellite = build_location_satellite_snapshot(center_location)
depths.sort(key=lambda item: DEPTH_PRIORITY.index(item.depth_label) if item.depth_label in DEPTH_PRIORITY else 99) soil_metrics = dict(latest_satellite.get("resolved_metrics") or {})
soil_metrics = _surface_soil_metrics(depths)
sensor_metrics, sensor_metric_sources = _resolve_sensor_metrics(farm.sensor_payload) sensor_metrics, sensor_metric_sources = _resolve_sensor_metrics(farm.sensor_payload)
resolved_metrics = dict(soil_metrics) resolved_metrics = dict(soil_metrics)
metric_sources = {key: "soil" for key in soil_metrics} metric_sources = {key: "remote_sensing" for key in soil_metrics}
for key, value in sensor_metrics.items(): for key, value in sensor_metrics.items():
resolved_metrics[key] = value resolved_metrics[key] = value
metric_sources[key] = sensor_metric_sources[key] metric_sources[key] = sensor_metric_sources[key]
@@ -482,6 +481,8 @@ def get_farm_details(farm_uuid: str):
"lat": center_location.latitude, "lat": center_location.latitude,
"lon": center_location.longitude, "lon": center_location.longitude,
"farm_boundary": center_location.farm_boundary, "farm_boundary": center_location.farm_boundary,
"input_block_count": center_location.input_block_count,
"block_layout": center_location.block_layout,
}, },
"weather": WeatherForecastDetailSerializer(weather).data if weather else None, "weather": WeatherForecastDetailSerializer(weather).data if weather else None,
"sensor_payload": farm.sensor_payload or {}, "sensor_payload": farm.sensor_payload or {},
@@ -489,7 +490,7 @@ def get_farm_details(farm_uuid: str):
"soil": { "soil": {
"resolved_metrics": resolved_metrics, "resolved_metrics": resolved_metrics,
"metric_sources": metric_sources, "metric_sources": metric_sources,
"depths": SoilDepthDataSerializer(depths, many=True).data, "satellite_snapshots": build_location_block_satellite_snapshots(center_location),
}, },
"plant_ids": [plant.backend_plant_id for plant in plant_snapshots], "plant_ids": [plant.backend_plant_id for plant in plant_snapshots],
"plants": PlantCatalogSnapshotSerializer(plant_snapshots, many=True).data, "plants": PlantCatalogSnapshotSerializer(plant_snapshots, many=True).data,
@@ -516,7 +517,10 @@ def get_farm_details(farm_uuid: str):
} }
def resolve_center_location_from_boundary(farm_boundary: dict | list) -> SoilLocation: def resolve_center_location_from_boundary(
farm_boundary: dict | list,
block_count: int = 1,
) -> SoilLocation:
""" """
مرز مزرعه را میگیرد، مرکز را محاسبه میکند و رکورد SoilLocation را مرز مزرعه را میگیرد، مرکز را محاسبه میکند و رکورد SoilLocation را
ایجاد/بهروزرسانی میکند. ایجاد/بهروزرسانی میکند.
@@ -530,13 +534,35 @@ def resolve_center_location_from_boundary(farm_boundary: dict | list) -> SoilLoc
raise ValueError("farm_boundary باید حداقل 3 گوشه معتبر داشته باشد.") raise ValueError("farm_boundary باید حداقل 3 گوشه معتبر داشته باشد.")
center_lat, center_lon = _compute_polygon_centroid(normalized_points) center_lat, center_lon = _compute_polygon_centroid(normalized_points)
serialized_boundary = _serialize_boundary(farm_boundary)
normalized_block_count = max(int(block_count or 1), 1)
with transaction.atomic(): with transaction.atomic():
location, _ = SoilLocation.objects.update_or_create( location, created = SoilLocation.objects.get_or_create(
latitude=center_lat, latitude=center_lat,
longitude=center_lon, longitude=center_lon,
defaults={"farm_boundary": _serialize_boundary(farm_boundary)}, defaults={
"farm_boundary": serialized_boundary,
"input_block_count": normalized_block_count,
},
) )
if created:
location.set_input_block_count(normalized_block_count)
location.farm_boundary = serialized_boundary
location.save(update_fields=["farm_boundary", "input_block_count", "block_layout", "updated_at"])
if normalized_block_count == 1:
_create_initial_block_subdivision(location, serialized_boundary)
else:
changed_fields = []
if location.farm_boundary != serialized_boundary:
location.farm_boundary = serialized_boundary
changed_fields.append("farm_boundary")
if location.input_block_count != normalized_block_count:
location.set_input_block_count(normalized_block_count)
changed_fields.extend(["input_block_count", "block_layout"])
if changed_fields:
changed_fields.append("updated_at")
location.save(update_fields=changed_fields)
return location return location
@@ -550,36 +576,25 @@ def resolve_weather_for_location(location: SoilLocation) -> WeatherForecast | No
def ensure_location_and_weather_data(location: SoilLocation) -> tuple[SoilLocation, WeatherForecast | None]: def ensure_location_and_weather_data(location: SoilLocation) -> tuple[SoilLocation, WeatherForecast | None]:
""" """
اگر داده خاک یا آبوهوا برای location موجود نباشد، از سرویس مربوطه در فاز فعلی برای location_data و بلوکها هیچ ریکوئست خارجی زده نمیشود
واکشی و در دیتابیس ذخیره میشود. و فقط دادههای محلی موجود برگردانده میشوند.
""" """
if not location.is_complete:
try:
soil_result = fetch_soil_data_for_coordinates(
latitude=float(location.latitude),
longitude=float(location.longitude),
)
except Exception as exc:
raise ExternalDataSyncError(f"خطا در واکشی داده خاک: {exc}") from exc
if soil_result.get("status") != "completed":
raise ExternalDataSyncError(
soil_result.get("error") or "واکشی داده خاک کامل نشد."
)
location.refresh_from_db()
weather_forecast = resolve_weather_for_location(location) weather_forecast = resolve_weather_for_location(location)
if weather_forecast is None:
weather_result = update_weather_for_location(location)
if weather_result.get("status") not in {"success", "no_data"}:
raise ExternalDataSyncError(
weather_result.get("error") or "واکشی داده آب‌وهوا کامل نشد."
)
weather_forecast = resolve_weather_for_location(location)
return location, weather_forecast return location, weather_forecast
def _create_initial_block_subdivision(
location: SoilLocation,
block_boundary: dict | list,
) -> BlockSubdivision:
subdivision, _created = create_or_get_block_subdivision(
location=location,
block_code="block-1",
boundary=block_boundary,
)
return subdivision
def _resolve_sensor_metrics(sensor_payload: dict | None) -> tuple[dict, dict]: def _resolve_sensor_metrics(sensor_payload: dict | None) -> tuple[dict, dict]:
if not isinstance(sensor_payload, dict): if not isinstance(sensor_payload, dict):
return {}, {} return {}, {}
@@ -659,34 +674,6 @@ def _normalize_numeric_result(value: float, source_values: list[object]) -> int
return float(Decimal(str(value)).quantize(Decimal("0.0001"), rounding=ROUND_HALF_UP)) return float(Decimal(str(value)).quantize(Decimal("0.0001"), rounding=ROUND_HALF_UP))
def _surface_soil_metrics(depths) -> dict:
if not depths:
return {}
primary_depth = depths[0]
fields = [
"bdod",
"cec",
"cfvo",
"clay",
"nitrogen",
"ocd",
"ocs",
"phh2o",
"sand",
"silt",
"soc",
"wv0010",
"wv0033",
"wv1500",
]
return {
field: getattr(primary_depth, field)
for field in fields
if getattr(primary_depth, field) is not None
}
def _extract_boundary_points(boundary: dict | list) -> list: def _extract_boundary_points(boundary: dict | list) -> list:
if isinstance(boundary, dict): if isinstance(boundary, dict):
if boundary.get("type") == "Polygon": if boundary.get("type") == "Polygon":
+59 -91
View File
@@ -5,7 +5,7 @@ import uuid
from django.test import TestCase from django.test import TestCase
from rest_framework.test import APIClient from rest_framework.test import APIClient
from location_data.models import SoilDepthData, SoilLocation from location_data.models import BlockSubdivision, SoilLocation
from farm_data.models import PlantCatalogSnapshot, SensorData, SensorParameter from farm_data.models import PlantCatalogSnapshot, SensorData, SensorParameter
from farm_data.services import ( from farm_data.services import (
assign_farm_plants_from_backend_ids, assign_farm_plants_from_backend_ids,
@@ -42,19 +42,6 @@ class FarmDetailApiTests(TestCase):
longitude="51.400000", longitude="51.400000",
farm_boundary={"type": "Polygon", "coordinates": []}, farm_boundary={"type": "Polygon", "coordinates": []},
) )
SoilDepthData.objects.create(
soil_location=self.location,
depth_label="0-5cm",
clay=22.0,
nitrogen=10.0,
sand=40.0,
)
SoilDepthData.objects.create(
soil_location=self.location,
depth_label="5-15cm",
clay=18.0,
nitrogen=8.0,
)
self.weather = WeatherForecast.objects.create( self.weather = WeatherForecast.objects.create(
location=self.location, location=self.location,
forecast_date=date(2026, 4, 10), forecast_date=date(2026, 4, 10),
@@ -123,9 +110,7 @@ class FarmDetailApiTests(TestCase):
self.assertEqual(resolved_metrics["nitrogen"], 99.0) self.assertEqual(resolved_metrics["nitrogen"], 99.0)
self.assertEqual(metric_sources["nitrogen"]["type"], "sensor") self.assertEqual(metric_sources["nitrogen"]["type"], "sensor")
self.assertEqual(metric_sources["nitrogen"]["strategy"], "single_value") self.assertEqual(metric_sources["nitrogen"]["strategy"], "single_value")
self.assertEqual(resolved_metrics["clay"], 22.0) self.assertEqual(payload["soil"]["satellite_snapshots"], [])
self.assertEqual(metric_sources["clay"], "soil")
self.assertEqual(len(payload["soil"]["depths"]), 2)
self.assertCountEqual(payload["plant_ids"], [self.plant1.backend_plant_id, self.plant2.backend_plant_id]) self.assertCountEqual(payload["plant_ids"], [self.plant1.backend_plant_id, self.plant2.backend_plant_id])
self.assertEqual(len(payload["plants"]), 2) self.assertEqual(len(payload["plants"]), 2)
returned_plants = {item["id"]: item for item in payload["plants"]} returned_plants = {item["id"]: item for item in payload["plants"]}
@@ -204,21 +189,6 @@ class FarmDataUpsertApiTests(TestCase):
latitude="35.710000", latitude="35.710000",
longitude="51.410000", longitude="51.410000",
) )
SoilDepthData.objects.create(
soil_location=self.location,
depth_label="0-5cm",
clay=20.0,
)
SoilDepthData.objects.create(
soil_location=self.location,
depth_label="5-15cm",
clay=18.0,
)
SoilDepthData.objects.create(
soil_location=self.location,
depth_label="15-30cm",
clay=16.0,
)
self.boundary = square_boundary_for_center(35.71, 51.41) self.boundary = square_boundary_for_center(35.71, 51.41)
self.weather = WeatherForecast.objects.create( self.weather = WeatherForecast.objects.create(
location=self.location, location=self.location,
@@ -312,16 +282,7 @@ class FarmDataUpsertApiTests(TestCase):
self.assertEqual(response.status_code, 400) self.assertEqual(response.status_code, 400)
self.assertIn("farm_uuid", response.json()["data"]) self.assertIn("farm_uuid", response.json()["data"])
@patch("farm_data.services.update_weather_for_location", return_value={"status": "no_data"}) def test_post_creates_center_location_from_boundary_when_missing(self):
@patch(
"farm_data.services.fetch_soil_data_for_coordinates",
return_value={"status": "completed", "depths": []},
)
def test_post_creates_center_location_from_boundary_when_missing(
self,
_mock_fetch_soil_data_for_coordinates,
_mock_update_weather_for_location,
):
farm_uuid = uuid.uuid4() farm_uuid = uuid.uuid4()
response = self.client.post( response = self.client.post(
@@ -347,6 +308,60 @@ class FarmDataUpsertApiTests(TestCase):
self.assertEqual(str(farm.center_location.latitude), "50.010000") self.assertEqual(str(farm.center_location.latitude), "50.010000")
self.assertEqual(str(farm.center_location.longitude), "50.010000") self.assertEqual(str(farm.center_location.longitude), "50.010000")
self.assertIsNone(farm.weather_forecast_id) self.assertIsNone(farm.weather_forecast_id)
self.assertEqual(farm.center_location.input_block_count, 1)
self.assertEqual(len(farm.center_location.block_layout["blocks"]), 1)
subdivision = BlockSubdivision.objects.get(soil_location=farm.center_location, block_code="block-1")
self.assertGreater(subdivision.grid_point_count, 0)
self.assertEqual(subdivision.grid_point_count, subdivision.centroid_count)
def test_post_persists_requested_block_count_on_center_location(self):
farm_uuid = uuid.uuid4()
response = self.client.post(
"/api/farm-data/",
data={
"farm_uuid": str(farm_uuid),
"farm_boundary": self.boundary,
"block_count": 3,
"sensor_payload": {"sensor-7-1": {"soil_moisture": 40.0}},
},
format="json",
)
self.assertEqual(response.status_code, 201)
farm = SensorData.objects.get(farm_uuid=farm_uuid)
self.assertEqual(farm.center_location.input_block_count, 3)
self.assertEqual(len(farm.center_location.block_layout["blocks"]), 3)
self.assertFalse(
BlockSubdivision.objects.filter(soil_location=farm.center_location).exists()
)
def test_resolve_center_location_runs_subdivision_only_on_creation(self):
boundary = square_boundary_for_center(35.75, 51.45)
first_location = resolve_center_location_from_boundary(boundary, block_count=1)
first_subdivision = BlockSubdivision.objects.get(
soil_location=first_location,
block_code="block-1",
)
second_location = resolve_center_location_from_boundary(boundary, block_count=1)
self.assertEqual(first_location.id, second_location.id)
self.assertEqual(
BlockSubdivision.objects.filter(
soil_location=second_location,
block_code="block-1",
).count(),
1,
)
self.assertEqual(
BlockSubdivision.objects.get(
soil_location=second_location,
block_code="block-1",
).id,
first_subdivision.id,
)
def test_resolve_center_location_uses_geometric_centroid_for_concave_polygon(self): def test_resolve_center_location_uses_geometric_centroid_for_concave_polygon(self):
location = resolve_center_location_from_boundary( location = resolve_center_location_from_boundary(
@@ -368,53 +383,10 @@ class FarmDataUpsertApiTests(TestCase):
self.assertEqual(str(location.latitude), "2.078947") self.assertEqual(str(location.latitude), "2.078947")
self.assertEqual(str(location.longitude), "2.078947") self.assertEqual(str(location.longitude), "2.078947")
@patch("farm_data.services.update_weather_for_location") def test_post_keeps_missing_location_without_external_sync(self):
@patch("farm_data.services.fetch_soil_data_for_coordinates")
def test_post_fetches_missing_location_and_weather_data(
self,
mock_fetch_soil_data_for_coordinates,
mock_update_weather_for_location,
):
missing_boundary = square_boundary_for_center(36.0, 52.0) missing_boundary = square_boundary_for_center(36.0, 52.0)
farm_uuid = uuid.uuid4() farm_uuid = uuid.uuid4()
def soil_side_effect(latitude, longitude, task_id="", progress_callback=None):
location = SoilLocation.objects.get(
latitude="36.000000",
longitude="52.000000",
)
SoilDepthData.objects.update_or_create(
soil_location=location,
depth_label="0-5cm",
defaults={"clay": 20.0},
)
SoilDepthData.objects.update_or_create(
soil_location=location,
depth_label="5-15cm",
defaults={"clay": 18.0},
)
SoilDepthData.objects.update_or_create(
soil_location=location,
depth_label="15-30cm",
defaults={"clay": 16.0},
)
return {"status": "completed", "location_id": location.id, "depths": ["0-5cm", "5-15cm", "15-30cm"]}
def weather_side_effect(location):
WeatherForecast.objects.update_or_create(
location=location,
forecast_date=date(2026, 4, 12),
defaults={
"temperature_min": 10.0,
"temperature_max": 20.0,
"temperature_mean": 15.0,
},
)
return {"status": "success", "location_id": location.id, "days_updated": 1}
mock_fetch_soil_data_for_coordinates.side_effect = soil_side_effect
mock_update_weather_for_location.side_effect = weather_side_effect
response = self.client.post( response = self.client.post(
"/api/farm-data/", "/api/farm-data/",
data={ data={
@@ -426,9 +398,5 @@ class FarmDataUpsertApiTests(TestCase):
) )
self.assertEqual(response.status_code, 201) self.assertEqual(response.status_code, 201)
mock_fetch_soil_data_for_coordinates.assert_called_once()
mock_update_weather_for_location.assert_called_once()
farm = SensorData.objects.get(farm_uuid=farm_uuid) farm = SensorData.objects.get(farm_uuid=farm_uuid)
self.assertEqual(farm.center_location.depths.count(), 3) self.assertIsNone(farm.weather_forecast_id)
self.assertIsNotNone(farm.weather_forecast_id)
+8 -1
View File
@@ -83,6 +83,7 @@ class FarmDataUpsertView(APIView):
"`farm_uuid` باید از API ارسال شود و هرگز خودکار ساخته نمی‌شود. " "`farm_uuid` باید از API ارسال شود و هرگز خودکار ساخته نمی‌شود. "
"مرز مزرعه را می‌گیرد، مرکز زمین را خودش محاسبه و در location_data ذخیره می‌کند. " "مرز مزرعه را می‌گیرد، مرکز زمین را خودش محاسبه و در location_data ذخیره می‌کند. "
"رکورد آب‌وهوا هم از همان مرکز زمین به‌صورت خودکار پیدا می‌شود. " "رکورد آب‌وهوا هم از همان مرکز زمین به‌صورت خودکار پیدا می‌شود. "
"در این مرحله برای location_data هیچ ریکوئست خارجی برای بلوک‌ها زده نمی‌شود. "
'خوانش‌ها داخل `sensor_payload` مثل `{"sensor-7-1": {...}}` نگه‌داری می‌شوند.' 'خوانش‌ها داخل `sensor_payload` مثل `{"sensor-7-1": {...}}` نگه‌داری می‌شوند.'
), ),
request=SensorDataUpdateSerializer, request=SensorDataUpdateSerializer,
@@ -121,6 +122,7 @@ class FarmDataUpsertView(APIView):
] ]
], ],
}, },
"block_count": 3,
"sensor_payload": { "sensor_payload": {
"sensor-7-1": { "sensor-7-1": {
"soil_moisture": 45.2, "soil_moisture": 45.2,
@@ -147,6 +149,7 @@ class FarmDataUpsertView(APIView):
{"lat": 35.7200, "lon": 51.3900}, {"lat": 35.7200, "lon": 51.3900},
] ]
}, },
"block_count": 2,
"sensor_payload": { "sensor_payload": {
"sensor-7-1": { "sensor-7-1": {
"soil_moisture": 45.2, "soil_moisture": 45.2,
@@ -172,11 +175,15 @@ class FarmDataUpsertView(APIView):
farm_uuid = serializer.validated_data["farm_uuid"] farm_uuid = serializer.validated_data["farm_uuid"]
farm_boundary = serializer.validated_data["farm_boundary"] farm_boundary = serializer.validated_data["farm_boundary"]
block_count = serializer.validated_data.get("block_count", 1)
plant_ids = serializer.validated_data.get("plant_ids") plant_ids = serializer.validated_data.get("plant_ids")
irrigation_method_id = serializer.validated_data.get("irrigation_method_id") irrigation_method_id = serializer.validated_data.get("irrigation_method_id")
sensor_payload = serializer.validated_data.get("sensor_payload", {}) sensor_payload = serializer.validated_data.get("sensor_payload", {})
try: try:
center_location = resolve_center_location_from_boundary(farm_boundary) center_location = resolve_center_location_from_boundary(
farm_boundary,
block_count=block_count,
)
except ValueError as exc: except ValueError as exc:
return Response( return Response(
{"code": 400, "msg": "داده نامعتبر.", "data": {"farm_boundary": [str(exc)]}}, {"code": 400, "msg": "داده نامعتبر.", "data": {"farm_boundary": [str(exc)]}},
+1 -21
View File
@@ -7,7 +7,7 @@ import uuid
from django.test import TransactionTestCase from django.test import TransactionTestCase
from rest_framework.test import APIClient from rest_framework.test import APIClient
from location_data.models import NdviObservation, SoilDepthData, SoilLocation from location_data.models import NdviObservation, SoilLocation
from weather.models import WeatherForecast from weather.models import WeatherForecast
@@ -55,32 +55,12 @@ class IntegrationAPITestCase(TransactionTestCase):
lat: float, lat: float,
lon: float, lon: float,
boundary: dict[str, Any] | None = None, boundary: dict[str, Any] | None = None,
clay_values: tuple[float, float, float] = (22.0, 18.0, 15.0),
nitrogen_values: tuple[float, float, float] = (14.0, 11.0, 8.0),
) -> SoilLocation: ) -> SoilLocation:
location = SoilLocation.objects.create( location = SoilLocation.objects.create(
latitude=f"{lat:.6f}", latitude=f"{lat:.6f}",
longitude=f"{lon:.6f}", longitude=f"{lon:.6f}",
farm_boundary=boundary or square_boundary(lat, lon), farm_boundary=boundary or square_boundary(lat, lon),
) )
depth_labels = (
SoilDepthData.DEPTH_0_5,
SoilDepthData.DEPTH_5_15,
SoilDepthData.DEPTH_15_30,
)
for index, depth_label in enumerate(depth_labels):
SoilDepthData.objects.create(
soil_location=location,
depth_label=depth_label,
clay=clay_values[index],
nitrogen=nitrogen_values[index],
sand=40.0 - (index * 2),
silt=25.0 + index,
phh2o=6.6 + (index * 0.1),
wv0010=0.41 - (index * 0.02),
wv0033=0.28 - (index * 0.01),
wv1500=0.12 - (index * 0.01),
)
return location return location
def seed_weather_forecasts( def seed_weather_forecasts(
@@ -88,36 +88,7 @@ class ReportingAndAiJourneyTests(IntegrationAPITestCase):
) )
self.assertEqual(soil_response.status_code, 200) self.assertEqual(soil_response.status_code, 200)
self.assertEqual(soil_response.json()["data"]["source"], "database") self.assertEqual(soil_response.json()["data"]["source"], "database")
self.assertEqual(len(soil_response.json()["data"]["depths"]), 3) self.assertIn("satellite_snapshots", soil_response.json()["data"])
queued_location = {}
def soil_delay_stub(lat: float, lon: float):
location = self.create_complete_location(lat=lat, lon=lon)
queued_location["id"] = location.id
return SimpleNamespace(id="soil-task-1")
with patch("location_data.views.fetch_soil_data_task.delay", side_effect=soil_delay_stub):
queued_response = self.client.post(
"/api/soil-data/",
data={"lat": "36.100000", "lon": "52.200000"},
format="json",
)
self.assertEqual(queued_response.status_code, 202)
with patch(
"celery.result.AsyncResult",
return_value=FakeAsyncResult(
state="SUCCESS",
result={"status": "completed", "location_id": queued_location["id"]},
),
):
soil_status_response = self.client.get("/api/soil-data/tasks/soil-task-1/status/")
self.assertEqual(soil_status_response.status_code, 200)
self.assertEqual(
soil_status_response.json()["data"]["result"]["id"],
queued_location["id"],
)
weather_response = self.client.post( weather_response = self.client.post(
"/api/weather/farm-card/", "/api/weather/farm-card/",
+9
View File
@@ -0,0 +1,9 @@
# `location_data` خیلی خلاصه
- ورودی این اپ، مختصات گوشه‌های کل زمین و boundary هر بلوکِ تعریف‌شده توسط کشاورز است.
- هر بلوک جداگانه به grid های `30×30` متر تبدیل می‌شود و در `AnalysisGridCell` ذخیره می‌شود.
- برای همه grid های همان بلوک، داده ماهواره‌ای یک بازه زمانی از `openEO` گرفته می‌شود و میانگین همان بازه به عنوان وضعیت نهایی هر grid در `AnalysisGridObservation` ذخیره می‌شود.
- feature های اصلی فعلی: `ndvi`, `ndwi`, `lst_c`, `soil_vv`, `soil_vv_db`, `dem_m`, `slope_deg`.
- بعد برای هر بلوک، روی feature های grid ها `KMeans` اجرا می‌شود؛ برای هر `K` مقدار `SSE / Inertia` ذخیره می‌شود و نمودار `K-SSE` هم ساخته می‌شود.
- نقطه elbow همان تعداد مناسب زیر‌بلوک‌ها است و نتیجه در `RemoteSensingSubdivisionResult` و خود `BlockSubdivision` ذخیره می‌شود.
- جریان قدیمی depth-based soil data و `soil_adapters.py` دیگر در workflow این اپ جایی ندارد.
+122 -10
View File
@@ -1,11 +1,29 @@
from django.contrib import admin from django.contrib import admin
from .models import SoilDepthData, SoilLocation from .models import (
AnalysisGridCell,
AnalysisGridObservation,
BlockSubdivision,
RemoteSensingClusterAssignment,
RemoteSensingRun,
RemoteSensingSubdivisionResult,
SoilLocation,
)
class SoilDepthDataInline(admin.TabularInline): class BlockSubdivisionInline(admin.TabularInline):
model = SoilDepthData model = BlockSubdivision
extra = 0 extra = 0
readonly_fields = ("depth_label", "bdod", "cec", "cfvo", "clay", "nitrogen", "ocd", "ocs", "phh2o", "sand", "silt", "soc", "wv0010", "wv0033", "wv1500") readonly_fields = (
"block_code",
"chunk_size_sqm",
"grid_point_count",
"centroid_count",
"status",
"created_at",
"updated_at",
)
fields = readonly_fields
show_change_link = True
@admin.register(SoilLocation) @admin.register(SoilLocation)
@@ -14,11 +32,105 @@ class SoilLocationAdmin(admin.ModelAdmin):
list_filter = ("created_at",) list_filter = ("created_at",)
search_fields = ("latitude", "longitude") search_fields = ("latitude", "longitude")
readonly_fields = ("created_at", "updated_at") readonly_fields = ("created_at", "updated_at")
inlines = [SoilDepthDataInline] inlines = [BlockSubdivisionInline]
@admin.register(SoilDepthData) @admin.register(BlockSubdivision)
class SoilDepthDataAdmin(admin.ModelAdmin): class BlockSubdivisionAdmin(admin.ModelAdmin):
list_display = ("id", "soil_location", "depth_label", "bdod", "cec", "phh2o", "clay", "sand", "silt") list_display = (
list_filter = ("depth_label",) "id",
search_fields = ("soil_location__latitude", "soil_location__longitude") "soil_location",
"block_code",
"chunk_size_sqm",
"grid_point_count",
"centroid_count",
"status",
"updated_at",
)
list_filter = ("status", "chunk_size_sqm", "created_at")
search_fields = ("block_code", "soil_location__latitude", "soil_location__longitude")
readonly_fields = ("created_at", "updated_at")
@admin.register(RemoteSensingRun)
class RemoteSensingRunAdmin(admin.ModelAdmin):
list_display = (
"id",
"soil_location",
"block_code",
"provider",
"chunk_size_sqm",
"status",
"temporal_start",
"temporal_end",
"created_at",
)
list_filter = ("provider", "status", "chunk_size_sqm", "created_at")
search_fields = ("block_code", "soil_location__latitude", "soil_location__longitude")
readonly_fields = ("created_at", "updated_at")
@admin.register(AnalysisGridCell)
class AnalysisGridCellAdmin(admin.ModelAdmin):
list_display = (
"id",
"cell_code",
"soil_location",
"block_code",
"chunk_size_sqm",
"centroid_lat",
"centroid_lon",
"created_at",
)
list_filter = ("chunk_size_sqm", "created_at")
search_fields = ("cell_code", "block_code", "soil_location__latitude", "soil_location__longitude")
readonly_fields = ("created_at", "updated_at")
@admin.register(AnalysisGridObservation)
class AnalysisGridObservationAdmin(admin.ModelAdmin):
list_display = (
"id",
"cell",
"temporal_start",
"temporal_end",
"ndvi",
"ndwi",
"lst_c",
"created_at",
)
list_filter = ("temporal_start", "temporal_end", "created_at")
search_fields = ("cell__cell_code", "cell__block_code")
readonly_fields = ("created_at", "updated_at")
@admin.register(RemoteSensingSubdivisionResult)
class RemoteSensingSubdivisionResultAdmin(admin.ModelAdmin):
list_display = (
"id",
"soil_location",
"block_code",
"cluster_count",
"chunk_size_sqm",
"temporal_start",
"temporal_end",
"created_at",
)
list_filter = ("chunk_size_sqm", "cluster_count", "created_at")
search_fields = ("block_code", "soil_location__latitude", "soil_location__longitude")
readonly_fields = ("created_at", "updated_at")
@admin.register(RemoteSensingClusterAssignment)
class RemoteSensingClusterAssignmentAdmin(admin.ModelAdmin):
list_display = (
"id",
"result",
"cell",
"cluster_label",
"created_at",
)
list_filter = ("cluster_label", "created_at")
search_fields = ("cell__cell_code", "result__block_code")
readonly_fields = ("created_at", "updated_at")
+1 -22
View File
@@ -1,13 +1,12 @@
from functools import cached_property from functools import cached_property
from django.apps import AppConfig from django.apps import AppConfig
from django.conf import settings
class SoilDataConfig(AppConfig): class SoilDataConfig(AppConfig):
default_auto_field = "django.db.models.BigAutoField" default_auto_field = "django.db.models.BigAutoField"
name = "location_data" name = "location_data"
verbose_name = "Soil Data (SoilGrids)" verbose_name = "Location Data (Remote Sensing)"
@cached_property @cached_property
def ndvi_health_service(self): def ndvi_health_service(self):
@@ -15,25 +14,5 @@ class SoilDataConfig(AppConfig):
return NdviHealthService() return NdviHealthService()
@cached_property
def soil_data_adapter(self):
from .soil_adapters import MockSoilDataAdapter, SoilGridsAdapter
provider = getattr(settings, "SOIL_DATA_PROVIDER", "soilgrids")
if provider == "soilgrids":
return SoilGridsAdapter(
timeout=getattr(settings, "SOILGRIDS_TIMEOUT_SECONDS", 60)
)
if provider == "mock":
if not (getattr(settings, "DEBUG", False) or getattr(settings, "DEVELOP", False)):
raise RuntimeError("Mock soil provider is disabled outside dev/test environments.")
return MockSoilDataAdapter(
delay_seconds=getattr(settings, "SOIL_MOCK_DELAY_SECONDS", 0.8)
)
raise ValueError(f"Unsupported soil data provider: {provider}")
def get_ndvi_health_service(self): def get_ndvi_health_service(self):
return self.ndvi_health_service return self.ndvi_health_service
def get_soil_data_adapter(self):
return self.soil_data_adapter
+401
View File
@@ -0,0 +1,401 @@
from __future__ import annotations
from dataclasses import dataclass
from decimal import Decimal, ROUND_HALF_UP
from io import BytesIO
import math
from django.conf import settings
from django.core.files.base import ContentFile
EARTH_RADIUS_M = 6371008.8
COORD_PRECISION = Decimal("0.000001")
MAX_K = 10
RANDOM_STATE = 42
@dataclass(frozen=True)
class GeoPoint:
lat: float
lon: float
def create_or_get_block_subdivision(
location,
block_code: str,
boundary: dict | list,
*,
chunk_size_sqm: int | None = None,
):
"""
اگر subdivision این بلوک قبلاً ساخته شده باشد همان را برمیگرداند؛
در غیر این صورت الگوریتم grid + KMeans را اجرا و ذخیره میکند.
"""
from .models import BlockSubdivision
existing = BlockSubdivision.objects.filter(
soil_location=location,
block_code=block_code,
).first()
if existing is not None:
return existing, False
payload = build_block_subdivision_payload(
boundary=boundary,
block_code=block_code,
chunk_size_sqm=chunk_size_sqm,
)
subdivision = BlockSubdivision.objects.create(
soil_location=location,
block_code=block_code,
source_boundary=payload["source_boundary"],
chunk_size_sqm=payload["chunk_size_sqm"],
grid_points=payload["grid_points"],
centroid_points=payload["centroid_points"],
grid_point_count=payload["grid_point_count"],
centroid_count=payload["centroid_count"],
status="created",
metadata=payload["metadata"],
)
plot_content = render_elbow_plot(
inertia_curve=payload["metadata"].get("inertia_curve", []),
optimal_k=payload["metadata"].get("optimal_k", 0),
block_code=block_code,
)
if plot_content is not None:
subdivision.elbow_plot.save(
f"{location.pk}_{block_code}_elbow.png",
plot_content,
save=False,
)
subdivision.save(update_fields=["elbow_plot", "updated_at"])
sync_block_layout_with_subdivision(location, subdivision)
return subdivision, True
def build_block_subdivision_payload(
boundary: dict | list,
block_code: str = "block-1",
chunk_size_sqm: int | None = None,
) -> dict:
"""
مرز یک بلوک را گرفته و ابتدا شبکه نقاط را میسازد، سپس با KMeans
تعداد بهینه خوشهها را از elbow point پیدا میکند و centroidها را برمیگرداند.
"""
chunk_size = int(chunk_size_sqm or getattr(settings, "SUBDIVISION_CHUNK_SQM", 900) or 900)
if chunk_size <= 0:
raise ValueError("chunk_size_sqm باید بزرگ‌تر از صفر باشد.")
polygon = extract_polygon(boundary)
if len(polygon) < 3:
raise ValueError("مرز بلوک باید حداقل سه نقطه معتبر داشته باشد.")
projected_polygon = project_polygon_to_local_meters(polygon)
area_sqm = abs(polygon_area(projected_polygon))
grid_points, grid_vectors = generate_grid_points(
polygon=polygon,
projected_polygon=projected_polygon,
chunk_size_sqm=chunk_size,
)
clustering_result = cluster_grid_points(grid_vectors, polygon)
return {
"block_code": block_code,
"source_boundary": boundary if isinstance(boundary, dict) else {"points": boundary},
"chunk_size_sqm": chunk_size,
"grid_points": grid_points,
"centroid_points": clustering_result["centroid_points"],
"grid_point_count": len(grid_points),
"centroid_count": len(clustering_result["centroid_points"]),
"metadata": {
"estimated_area_sqm": round(area_sqm, 2),
"optimal_k": clustering_result["optimal_k"],
"inertia_curve": clustering_result["inertia_curve"],
},
}
def cluster_grid_points(grid_vectors: list[tuple[float, float]], polygon: list[GeoPoint]) -> dict:
if not grid_vectors:
return {
"optimal_k": 0,
"inertia_curve": [],
"centroid_points": [],
}
if len(grid_vectors) == 1:
lat, lon = unproject_point(grid_vectors[0][0], grid_vectors[0][1], polygon)
return {
"optimal_k": 1,
"inertia_curve": [{"k": 1, "sse": 0.0}],
"centroid_points": [
{
"sub_block_code": "sub-block-1",
"centroid_lat": quantize_coordinate(lat),
"centroid_lon": quantize_coordinate(lon),
}
],
}
try:
from sklearn.cluster import KMeans
except ImportError as exc: # pragma: no cover - runtime dependency guard
raise ImportError("scikit-learn برای اجرای subdivision لازم است.") from exc
max_k = min(MAX_K, len(grid_vectors))
inertia_curve = []
trained_models = {}
for k in range(1, max_k + 1):
model = KMeans(
n_clusters=k,
n_init=10,
random_state=RANDOM_STATE,
)
model.fit(grid_vectors)
trained_models[k] = model
inertia_curve.append({"k": k, "sse": round(float(model.inertia_), 6)})
optimal_k = detect_elbow_point(inertia_curve)
final_model = trained_models[optimal_k]
centroid_points = []
for index, center in enumerate(final_model.cluster_centers_, start=1):
lat, lon = unproject_point(center[0], center[1], polygon)
centroid_points.append(
{
"sub_block_code": f"sub-block-{index}",
"centroid_lat": quantize_coordinate(lat),
"centroid_lon": quantize_coordinate(lon),
}
)
return {
"optimal_k": optimal_k,
"inertia_curve": inertia_curve,
"centroid_points": centroid_points,
}
def detect_elbow_point(inertia_curve: list[dict]) -> int:
if not inertia_curve:
return 0
if len(inertia_curve) <= 2:
return inertia_curve[-1]["k"] if len(inertia_curve) == 2 else inertia_curve[0]["k"]
sses = [item["sse"] for item in inertia_curve]
ks = [item["k"] for item in inertia_curve]
slopes = [sses[index] - sses[index + 1] for index in range(len(sses) - 1)]
best_k = ks[0]
best_change = float("-inf")
for index in range(len(slopes) - 1):
change = slopes[index] - slopes[index + 1]
candidate_k = ks[index + 1]
if change > best_change:
best_change = change
best_k = candidate_k
return best_k
def render_elbow_plot(
inertia_curve: list[dict],
optimal_k: int,
block_code: str,
) -> ContentFile | None:
if not inertia_curve:
return None
try:
import matplotlib
matplotlib.use("Agg")
import matplotlib.pyplot as plt
except ImportError as exc: # pragma: no cover - runtime dependency guard
raise ImportError("matplotlib برای ذخیره نمودار elbow لازم است.") from exc
ks = [item["k"] for item in inertia_curve]
sses = [item["sse"] for item in inertia_curve]
buffer = BytesIO()
fig, ax = plt.subplots(figsize=(8, 5))
try:
ax.plot(ks, sses, marker="o", linewidth=2, color="#2f6fed")
if optimal_k in ks:
elbow_index = ks.index(optimal_k)
ax.scatter(
[ks[elbow_index]],
[sses[elbow_index]],
color="#d62828",
s=90,
zorder=3,
label=f"Elbow K={optimal_k}",
)
ax.legend()
ax.set_title(f"Elbow Plot - {block_code}")
ax.set_xlabel("K")
ax.set_ylabel("SSE / Inertia")
ax.grid(True, linestyle="--", linewidth=0.5, alpha=0.6)
fig.tight_layout()
fig.savefig(buffer, format="png", dpi=150)
buffer.seek(0)
return ContentFile(buffer.getvalue())
finally:
buffer.close()
plt.close(fig)
def sync_block_layout_with_subdivision(location, subdivision) -> None:
layout = location.block_layout or {}
blocks = list(layout.get("blocks") or [])
target_block = None
for block in blocks:
if block.get("block_code") == subdivision.block_code:
target_block = block
break
if target_block is None:
target_block = {
"block_code": subdivision.block_code,
"order": len(blocks) + 1,
"source": "input",
"needs_subdivision": None,
"sub_blocks": [],
}
blocks.append(target_block)
target_block["needs_subdivision"] = subdivision.centroid_count > 1
target_block["sub_blocks"] = list(subdivision.centroid_points or [])
target_block["subdivision_summary"] = {
"chunk_size_sqm": subdivision.chunk_size_sqm,
"grid_point_count": subdivision.grid_point_count,
"centroid_count": subdivision.centroid_count,
"optimal_k": (subdivision.metadata or {}).get("optimal_k", subdivision.centroid_count),
}
layout["blocks"] = blocks
layout["algorithm_status"] = "completed"
location.block_layout = layout
location.save(update_fields=["block_layout", "updated_at"])
def generate_grid_points(
polygon: list[GeoPoint],
projected_polygon: list[tuple[float, float]],
chunk_size_sqm: int,
) -> tuple[list[dict], list[tuple[float, float]]]:
step_m = math.sqrt(chunk_size_sqm)
min_x, max_x, min_y, max_y = bounds(projected_polygon)
grid_points: list[dict] = []
grid_vectors: list[tuple[float, float]] = []
y = min_y + (step_m / 2.0)
point_index = 0
while y <= max_y:
x = min_x + (step_m / 2.0)
while x <= max_x:
if point_in_polygon((x, y), projected_polygon):
lat, lon = unproject_point(x, y, polygon)
point_index += 1
grid_vectors.append((x, y))
grid_points.append(
{
"point_code": f"pt-{point_index}",
"lat": quantize_coordinate(lat),
"lon": quantize_coordinate(lon),
}
)
x += step_m
y += step_m
return grid_points, grid_vectors
def extract_polygon(boundary: dict | list) -> list[GeoPoint]:
if isinstance(boundary, dict):
if boundary.get("type") == "Polygon":
coordinates = boundary.get("coordinates") or []
if coordinates and isinstance(coordinates[0], list):
points = coordinates[0]
else:
points = []
else:
points = boundary.get("corners") or []
elif isinstance(boundary, list):
points = boundary
else:
points = []
polygon: list[GeoPoint] = []
for point in points:
lat = lon = None
if isinstance(point, dict):
lat = point.get("lat", point.get("latitude"))
lon = point.get("lon", point.get("longitude"))
elif isinstance(point, (list, tuple)) and len(point) >= 2:
lon, lat = point[0], point[1]
if lat is None or lon is None:
continue
polygon.append(GeoPoint(lat=float(lat), lon=float(lon)))
if len(polygon) > 1 and polygon[0] == polygon[-1]:
polygon = polygon[:-1]
return polygon
def project_polygon_to_local_meters(polygon: list[GeoPoint]) -> list[tuple[float, float]]:
origin = polygon[0]
lat0 = math.radians(origin.lat)
lon0 = math.radians(origin.lon)
cos_lat0 = math.cos(lat0)
projected = []
for point in polygon:
lat = math.radians(point.lat)
lon = math.radians(point.lon)
x = (lon - lon0) * cos_lat0 * EARTH_RADIUS_M
y = (lat - lat0) * EARTH_RADIUS_M
projected.append((x, y))
return projected
def unproject_point(x: float, y: float, polygon: list[GeoPoint]) -> tuple[float, float]:
origin = polygon[0]
lat0 = math.radians(origin.lat)
lon0 = math.radians(origin.lon)
cos_lat0 = math.cos(lat0)
lat = math.degrees((y / EARTH_RADIUS_M) + lat0)
lon = math.degrees((x / (EARTH_RADIUS_M * cos_lat0)) + lon0)
return lat, lon
def polygon_area(points: list[tuple[float, float]]) -> float:
area = 0.0
closed = points + [points[0]]
for index in range(len(points)):
x1, y1 = closed[index]
x2, y2 = closed[index + 1]
area += (x1 * y2) - (x2 * y1)
return area / 2.0
def bounds(points: list[tuple[float, float]]) -> tuple[float, float, float, float]:
xs = [point[0] for point in points]
ys = [point[1] for point in points]
return min(xs), max(xs), min(ys), max(ys)
def point_in_polygon(point: tuple[float, float], polygon: list[tuple[float, float]]) -> bool:
x, y = point
inside = False
j = len(polygon) - 1
for i in range(len(polygon)):
xi, yi = polygon[i]
xj, yj = polygon[j]
intersects = ((yi > y) != (yj > y)) and (
x < ((xj - xi) * (y - yi) / ((yj - yi) or 1e-12)) + xi
)
if intersects:
inside = not inside
j = i
return inside
def quantize_coordinate(value: float) -> float:
return float(Decimal(str(value)).quantize(COORD_PRECISION, rounding=ROUND_HALF_UP))
+489
View File
@@ -0,0 +1,489 @@
from __future__ import annotations
from dataclasses import dataclass
from typing import Any
from django.db import transaction
from .block_subdivision import detect_elbow_point, render_elbow_plot
from .models import (
AnalysisGridObservation,
BlockSubdivision,
RemoteSensingClusterAssignment,
RemoteSensingRun,
RemoteSensingSubdivisionResult,
SoilLocation,
)
DEFAULT_CLUSTER_FEATURES = [
"ndvi",
"ndwi",
"lst_c",
"soil_vv_db",
"dem_m",
"slope_deg",
]
SUPPORTED_CLUSTER_FEATURES = tuple(DEFAULT_CLUSTER_FEATURES)
DEFAULT_RANDOM_STATE = 42
DEFAULT_MAX_K = 10
class DataDrivenSubdivisionError(Exception):
"""Raised when remote-sensing-driven subdivision can not be computed."""
@dataclass
class ClusteringDataset:
observations: list[AnalysisGridObservation]
selected_features: list[str]
raw_feature_rows: list[list[float | None]]
raw_feature_maps: list[dict[str, float | None]]
skipped_cell_codes: list[str]
used_cell_codes: list[str]
imputed_matrix: list[list[float]]
scaled_matrix: list[list[float]]
imputer_statistics: dict[str, float | None]
scaler_means: dict[str, float]
scaler_scales: dict[str, float]
missing_value_counts: dict[str, int]
skipped_reasons: dict[str, list[str]]
def create_remote_sensing_subdivision_result(
*,
location: SoilLocation,
run: RemoteSensingRun,
observations: list[AnalysisGridObservation],
block_subdivision: BlockSubdivision | None = None,
block_code: str = "",
selected_features: list[str] | None = None,
explicit_k: int | None = None,
max_k: int = DEFAULT_MAX_K,
random_state: int = DEFAULT_RANDOM_STATE,
) -> RemoteSensingSubdivisionResult:
"""
Build a data-driven subdivision result from stored remote sensing observations.
KMeans is applied on actual per-cell feature vectors, not geometric points.
"""
dataset = build_clustering_dataset(
observations=observations,
selected_features=selected_features,
)
if not dataset.observations:
raise DataDrivenSubdivisionError("هیچ observation قابل استفاده‌ای برای خوشه‌بندی باقی نماند.")
optimal_k, inertia_curve = choose_cluster_count(
scaled_matrix=dataset.scaled_matrix,
explicit_k=explicit_k,
max_k=max_k,
random_state=random_state,
)
cluster_selection_strategy = "explicit_k" if explicit_k is not None else "elbow"
labels = run_kmeans_labels(
scaled_matrix=dataset.scaled_matrix,
cluster_count=optimal_k,
random_state=random_state,
)
cluster_summaries = build_cluster_summaries(
observations=dataset.observations,
labels=labels,
)
with transaction.atomic():
result, _created = RemoteSensingSubdivisionResult.objects.update_or_create(
run=run,
defaults={
"soil_location": location,
"block_subdivision": block_subdivision,
"block_code": block_code,
"chunk_size_sqm": run.chunk_size_sqm,
"temporal_start": run.temporal_start,
"temporal_end": run.temporal_end,
"cluster_count": optimal_k,
"selected_features": dataset.selected_features,
"skipped_cell_codes": dataset.skipped_cell_codes,
"metadata": {
"cell_count": len(observations),
"used_cell_count": len(dataset.observations),
"skipped_cell_count": len(dataset.skipped_cell_codes),
"used_cell_codes": dataset.used_cell_codes,
"skipped_reasons": dataset.skipped_reasons,
"selected_features": dataset.selected_features,
"imputer_strategy": "median",
"imputer_statistics": dataset.imputer_statistics,
"missing_value_counts": dataset.missing_value_counts,
"scaler_means": dataset.scaler_means,
"scaler_scales": dataset.scaler_scales,
"kmeans_params": {
"random_state": random_state,
"explicit_k": explicit_k,
"selected_k": optimal_k,
"max_k": max_k,
"n_init": 10,
"selection_strategy": cluster_selection_strategy,
},
"inertia_curve": inertia_curve,
"cluster_summaries": cluster_summaries,
},
},
)
result.assignments.all().delete()
assignment_rows = []
for index, observation in enumerate(dataset.observations):
assignment_rows.append(
RemoteSensingClusterAssignment(
result=result,
cell=observation.cell,
cluster_label=int(labels[index]),
raw_feature_values=dataset.raw_feature_maps[index],
scaled_feature_values={
feature_name: round(dataset.scaled_matrix[index][feature_index], 6)
for feature_index, feature_name in enumerate(dataset.selected_features)
},
)
)
RemoteSensingClusterAssignment.objects.bulk_create(assignment_rows)
if block_subdivision is not None:
sync_block_subdivision_with_result(
block_subdivision=block_subdivision,
result=result,
observations=observations,
cluster_summaries=cluster_summaries,
)
sync_location_block_layout_with_result(
location=location,
result=result,
cluster_summaries=cluster_summaries,
)
return result
def build_clustering_dataset(
*,
observations: list[AnalysisGridObservation],
selected_features: list[str] | None = None,
) -> ClusteringDataset:
selected_features = list(selected_features or DEFAULT_CLUSTER_FEATURES)
invalid_features = [
feature_name
for feature_name in selected_features
if feature_name not in SUPPORTED_CLUSTER_FEATURES
]
if invalid_features:
raise DataDrivenSubdivisionError(
"ویژگی‌های نامعتبر برای خوشه‌بندی: "
+ ", ".join(sorted(invalid_features))
)
raw_rows: list[list[float | None]] = []
raw_maps: list[dict[str, float | None]] = []
usable_observations: list[AnalysisGridObservation] = []
skipped_cell_codes: list[str] = []
used_cell_codes: list[str] = []
missing_value_counts = {feature_name: 0 for feature_name in selected_features}
skipped_reasons = {"all_features_missing": []}
for observation in observations:
feature_map = {
feature_name: _coerce_float(getattr(observation, feature_name, None))
for feature_name in selected_features
}
for feature_name, value in feature_map.items():
if value is None:
missing_value_counts[feature_name] += 1
if all(value is None for value in feature_map.values()):
skipped_cell_codes.append(observation.cell.cell_code)
skipped_reasons["all_features_missing"].append(observation.cell.cell_code)
continue
usable_observations.append(observation)
used_cell_codes.append(observation.cell.cell_code)
raw_maps.append(feature_map)
raw_rows.append([feature_map[feature_name] for feature_name in selected_features])
if not usable_observations:
return ClusteringDataset(
observations=[],
selected_features=selected_features,
raw_feature_rows=[],
raw_feature_maps=[],
skipped_cell_codes=skipped_cell_codes,
used_cell_codes=[],
imputed_matrix=[],
scaled_matrix=[],
imputer_statistics={feature_name: None for feature_name in selected_features},
scaler_means={feature_name: 0.0 for feature_name in selected_features},
scaler_scales={feature_name: 1.0 for feature_name in selected_features},
missing_value_counts=missing_value_counts,
skipped_reasons=skipped_reasons,
)
try:
import numpy as np
from sklearn.impute import SimpleImputer
from sklearn.preprocessing import StandardScaler
except ImportError as exc: # pragma: no cover - runtime dependency guard
raise DataDrivenSubdivisionError(
"scikit-learn و numpy برای خوشه‌بندی داده‌محور لازم هستند."
) from exc
raw_matrix = np.array(raw_rows, dtype=float)
imputer = SimpleImputer(strategy="median")
imputed_matrix = imputer.fit_transform(raw_matrix)
scaler = StandardScaler()
scaled_matrix = scaler.fit_transform(imputed_matrix)
return ClusteringDataset(
observations=usable_observations,
selected_features=selected_features,
raw_feature_rows=raw_rows,
raw_feature_maps=raw_maps,
skipped_cell_codes=skipped_cell_codes,
used_cell_codes=used_cell_codes,
imputed_matrix=imputed_matrix.tolist(),
scaled_matrix=scaled_matrix.tolist(),
imputer_statistics={
feature_name: _coerce_float(imputer.statistics_[index])
for index, feature_name in enumerate(selected_features)
},
scaler_means={
feature_name: float(scaler.mean_[index])
for index, feature_name in enumerate(selected_features)
},
scaler_scales={
feature_name: float(scaler.scale_[index] or 1.0)
for index, feature_name in enumerate(selected_features)
},
missing_value_counts=missing_value_counts,
skipped_reasons=skipped_reasons,
)
def choose_cluster_count(
*,
scaled_matrix: list[list[float]],
explicit_k: int | None,
max_k: int,
random_state: int,
) -> tuple[int, list[dict[str, float]]]:
sample_count = len(scaled_matrix)
if sample_count == 0:
raise DataDrivenSubdivisionError("هیچ نمونه‌ای برای خوشه‌بندی وجود ندارد.")
if sample_count == 1:
return 1, [{"k": 1, "sse": 0.0}]
if explicit_k is not None:
if explicit_k <= 0:
raise DataDrivenSubdivisionError("cluster_count باید بزرگ‌تر از صفر باشد.")
return min(explicit_k, sample_count), []
try:
from sklearn.cluster import KMeans
except ImportError as exc: # pragma: no cover
raise DataDrivenSubdivisionError("scikit-learn برای انتخاب تعداد خوشه لازم است.") from exc
max_allowed_k = min(max_k, sample_count)
inertia_curve = []
for k in range(1, max_allowed_k + 1):
model = KMeans(n_clusters=k, n_init=10, random_state=random_state)
model.fit(scaled_matrix)
inertia_curve.append({"k": k, "sse": round(float(model.inertia_), 6)})
return detect_elbow_point(inertia_curve), inertia_curve
def run_kmeans_labels(
*,
scaled_matrix: list[list[float]],
cluster_count: int,
random_state: int,
) -> list[int]:
if cluster_count <= 0:
raise DataDrivenSubdivisionError("cluster_count باید بزرگ‌تر از صفر باشد.")
if len(scaled_matrix) == 1:
return [0]
try:
from sklearn.cluster import KMeans
except ImportError as exc: # pragma: no cover
raise DataDrivenSubdivisionError("scikit-learn برای اجرای KMeans لازم است.") from exc
model = KMeans(n_clusters=cluster_count, n_init=10, random_state=random_state)
return [int(label) for label in model.fit_predict(scaled_matrix)]
def build_cluster_summaries(
*,
observations: list[AnalysisGridObservation],
labels: list[int],
) -> list[dict[str, Any]]:
clusters: dict[int, dict[str, Any]] = {}
for observation, label in zip(observations, labels):
cluster = clusters.setdefault(
int(label),
{
"cluster_label": int(label),
"cell_codes": [],
"centroid_lat_sum": 0.0,
"centroid_lon_sum": 0.0,
"cell_count": 0,
},
)
cluster["cell_codes"].append(observation.cell.cell_code)
cluster["centroid_lat_sum"] += float(observation.cell.centroid_lat)
cluster["centroid_lon_sum"] += float(observation.cell.centroid_lon)
cluster["cell_count"] += 1
summaries = []
for cluster_label in sorted(clusters):
cluster = clusters[cluster_label]
cell_count = cluster["cell_count"] or 1
summaries.append(
{
"cluster_label": cluster_label,
"cell_count": cluster["cell_count"],
"centroid_lat": round(cluster["centroid_lat_sum"] / cell_count, 6),
"centroid_lon": round(cluster["centroid_lon_sum"] / cell_count, 6),
"cell_codes": cluster["cell_codes"],
}
)
return summaries
def sync_location_block_layout_with_result(
*,
location: SoilLocation,
result: RemoteSensingSubdivisionResult,
cluster_summaries: list[dict[str, Any]],
) -> None:
layout = dict(location.block_layout or {})
blocks = list(layout.get("blocks") or [])
target_block = None
for block in blocks:
if block.get("block_code") == result.block_code:
target_block = block
break
if target_block is None:
target_block = {
"block_code": result.block_code,
"order": len(blocks) + 1,
"source": "remote_sensing",
"needs_subdivision": None,
"sub_blocks": [],
}
blocks.append(target_block)
target_block["needs_subdivision"] = result.cluster_count > 1
target_block["sub_blocks"] = [
{
"sub_block_code": f"cluster-{cluster['cluster_label']}",
"cluster_label": cluster["cluster_label"],
"centroid_lat": cluster["centroid_lat"],
"centroid_lon": cluster["centroid_lon"],
"cell_count": cluster["cell_count"],
}
for cluster in cluster_summaries
]
target_block["subdivision_summary"] = {
"type": "data_driven_remote_sensing",
"cluster_count": result.cluster_count,
"selected_features": result.selected_features,
"used_cell_count": result.metadata.get("used_cell_count", 0),
"skipped_cell_count": result.metadata.get("skipped_cell_count", 0),
"run_id": result.run_id,
}
layout["blocks"] = blocks
layout["algorithm_status"] = "completed"
location.block_layout = layout
location.save(update_fields=["block_layout", "updated_at"])
def sync_block_subdivision_with_result(
*,
block_subdivision: BlockSubdivision,
result: RemoteSensingSubdivisionResult,
observations: list[AnalysisGridObservation],
cluster_summaries: list[dict[str, Any]],
) -> None:
metadata = dict(block_subdivision.metadata or {})
metadata["data_driven_subdivision"] = {
"run_id": result.run_id,
"result_id": result.id,
"cluster_count": result.cluster_count,
"selected_features": result.selected_features,
"used_cell_count": result.metadata.get("used_cell_count", 0),
"skipped_cell_count": result.metadata.get("skipped_cell_count", 0),
"temporal_extent": {
"start_date": result.temporal_start.isoformat() if result.temporal_start else None,
"end_date": result.temporal_end.isoformat() if result.temporal_end else None,
},
"inertia_curve": result.metadata.get("inertia_curve", []),
}
block_subdivision.grid_points = [
{
"cell_code": observation.cell.cell_code,
"centroid_lat": round(float(observation.cell.centroid_lat), 6),
"centroid_lon": round(float(observation.cell.centroid_lon), 6),
}
for observation in observations
]
block_subdivision.centroid_points = [
{
"sub_block_code": f"cluster-{cluster['cluster_label']}",
"cluster_label": cluster["cluster_label"],
"centroid_lat": cluster["centroid_lat"],
"centroid_lon": cluster["centroid_lon"],
"cell_count": cluster["cell_count"],
"cell_codes": cluster["cell_codes"],
}
for cluster in cluster_summaries
]
block_subdivision.grid_point_count = len(observations)
block_subdivision.centroid_count = len(cluster_summaries)
block_subdivision.status = "subdivided"
block_subdivision.metadata = metadata
plot_content = render_elbow_plot(
inertia_curve=result.metadata.get("inertia_curve", []),
optimal_k=result.cluster_count,
block_code=result.block_code or block_subdivision.block_code,
)
if plot_content is not None:
block_subdivision.elbow_plot.save(
f"remote-sensing-{result.soil_location_id}-{result.block_code or block_subdivision.block_code}-elbow.png",
plot_content,
save=False,
)
block_subdivision.save(
update_fields=[
"grid_points",
"centroid_points",
"grid_point_count",
"centroid_count",
"status",
"metadata",
"elbow_plot",
"updated_at",
]
)
return
block_subdivision.save(
update_fields=[
"grid_points",
"centroid_points",
"grid_point_count",
"centroid_count",
"status",
"metadata",
"updated_at",
]
)
def _coerce_float(value: Any) -> float | None:
if value is None:
return None
try:
return float(value)
except (TypeError, ValueError):
return None
+327
View File
@@ -0,0 +1,327 @@
from __future__ import annotations
from decimal import Decimal
import math
from django.conf import settings
from django.db import transaction
from .block_subdivision import (
GeoPoint,
bounds,
extract_polygon,
point_in_polygon,
project_polygon_to_local_meters,
quantize_coordinate,
unproject_point,
)
from .models import AnalysisGridCell, BlockSubdivision, SoilLocation
def create_or_get_analysis_grid_cells(
location: SoilLocation,
*,
boundary: dict | list | None = None,
block_code: str | None = None,
block_subdivision: BlockSubdivision | None = None,
chunk_size_sqm: int | None = None,
) -> dict:
"""
شبکه تحلیل 30x30 متری یا هر chunk size تنظیمشده را برای مزرعه/بلوک میسازد
و رکوردهای AnalysisGridCell را بهصورت idempotent ذخیره میکند.
"""
normalized_chunk_size = int(
chunk_size_sqm or getattr(settings, "SUBDIVISION_CHUNK_SQM", 900) or 900
)
if normalized_chunk_size <= 0:
raise ValueError("chunk_size_sqm باید بزرگ‌تر از صفر باشد.")
resolved_block_code = str(block_code or getattr(block_subdivision, "block_code", "") or "").strip()
resolved_boundary = _resolve_boundary(
location=location,
boundary=boundary,
block_subdivision=block_subdivision,
)
polygon = extract_polygon(resolved_boundary)
if len(polygon) < 3:
raise ValueError("برای ساخت analysis grid باید حداقل سه نقطه معتبر در boundary وجود داشته باشد.")
existing_qs = AnalysisGridCell.objects.filter(
soil_location=location,
block_code=resolved_block_code,
chunk_size_sqm=normalized_chunk_size,
).order_by("cell_code")
existing_count = existing_qs.count()
if existing_count:
return {
"created_count": 0,
"existing_count": existing_count,
"total_count": existing_count,
"chunk_size_sqm": normalized_chunk_size,
"block_code": resolved_block_code,
"created": False,
}
cell_payloads = build_analysis_grid_payload(
polygon=polygon,
location=location,
block_code=resolved_block_code,
chunk_size_sqm=normalized_chunk_size,
)
created_cells = []
with transaction.atomic():
for payload in cell_payloads:
created_cells.append(
AnalysisGridCell.objects.create(
soil_location=location,
block_subdivision=block_subdivision,
block_code=resolved_block_code,
cell_code=payload["cell_code"],
chunk_size_sqm=normalized_chunk_size,
geometry=payload["geometry"],
centroid_lat=Decimal(str(payload["centroid_lat"])),
centroid_lon=Decimal(str(payload["centroid_lon"])),
)
)
_update_grid_summary_metadata(
location=location,
block_code=resolved_block_code,
chunk_size_sqm=normalized_chunk_size,
total_count=len(created_cells),
block_subdivision=block_subdivision,
)
return {
"created_count": len(created_cells),
"existing_count": 0,
"total_count": len(created_cells),
"chunk_size_sqm": normalized_chunk_size,
"block_code": resolved_block_code,
"created": True,
}
def build_analysis_grid_payload(
*,
polygon: list[GeoPoint],
location: SoilLocation,
block_code: str,
chunk_size_sqm: int,
) -> list[dict]:
projected_polygon = project_polygon_to_local_meters(polygon)
step_m = math.sqrt(chunk_size_sqm)
min_x, max_x, min_y, max_y = bounds(projected_polygon)
payloads: list[dict] = []
row_index = 0
y = min_y
while y < max_y:
col_index = 0
x = min_x
while x < max_x:
cell_polygon = [
(x, y),
(x + step_m, y),
(x + step_m, y + step_m),
(x, y + step_m),
]
if _cell_intersects_polygon(cell_polygon, projected_polygon):
payloads.append(
_build_cell_payload(
location=location,
block_code=block_code,
chunk_size_sqm=chunk_size_sqm,
polygon=polygon,
cell_polygon=cell_polygon,
row_index=row_index,
col_index=col_index,
)
)
col_index += 1
x += step_m
row_index += 1
y += step_m
return payloads
def _build_cell_payload(
*,
location: SoilLocation,
block_code: str,
chunk_size_sqm: int,
polygon: list[GeoPoint],
cell_polygon: list[tuple[float, float]],
row_index: int,
col_index: int,
) -> dict:
closed_polygon = cell_polygon + [cell_polygon[0]]
geometry_coordinates = []
for x, y in closed_polygon:
lat, lon = unproject_point(x, y, polygon)
geometry_coordinates.append(
[quantize_coordinate(lon), quantize_coordinate(lat)]
)
centroid_x = sum(point[0] for point in cell_polygon) / len(cell_polygon)
centroid_y = sum(point[1] for point in cell_polygon) / len(cell_polygon)
centroid_lat, centroid_lon = unproject_point(centroid_x, centroid_y, polygon)
return {
"cell_code": build_analysis_cell_code(
location_id=location.id,
block_code=block_code,
chunk_size_sqm=chunk_size_sqm,
row_index=row_index,
col_index=col_index,
),
"geometry": {
"type": "Polygon",
"coordinates": [geometry_coordinates],
},
"centroid_lat": quantize_coordinate(centroid_lat),
"centroid_lon": quantize_coordinate(centroid_lon),
}
def build_analysis_cell_code(
*,
location_id: int | None,
block_code: str,
chunk_size_sqm: int,
row_index: int,
col_index: int,
) -> str:
block_segment = block_code or "farm"
location_segment = location_id if location_id is not None else "new"
return (
f"loc-{location_segment}__"
f"block-{block_segment}__"
f"chunk-{chunk_size_sqm}__"
f"r{row_index:04d}c{col_index:04d}"
)
def _resolve_boundary(
*,
location: SoilLocation,
boundary: dict | list | None,
block_subdivision: BlockSubdivision | None,
) -> dict | list:
if boundary:
return boundary
if block_subdivision is not None and block_subdivision.source_boundary:
return block_subdivision.source_boundary
if location.farm_boundary:
return location.farm_boundary
raise ValueError("هیچ boundary معتبری برای ساخت analysis grid پیدا نشد.")
def _cell_intersects_polygon(
cell_polygon: list[tuple[float, float]],
polygon: list[tuple[float, float]],
) -> bool:
if any(point_in_polygon(point, polygon) for point in cell_polygon):
return True
for polygon_point in polygon:
if _point_in_rect(polygon_point, cell_polygon):
return True
cell_edges = _polygon_edges(cell_polygon)
polygon_edges = _polygon_edges(polygon)
for edge_a in cell_edges:
for edge_b in polygon_edges:
if _segments_intersect(edge_a[0], edge_a[1], edge_b[0], edge_b[1]):
return True
return False
def _point_in_rect(point: tuple[float, float], rect: list[tuple[float, float]]) -> bool:
xs = [vertex[0] for vertex in rect]
ys = [vertex[1] for vertex in rect]
return min(xs) <= point[0] <= max(xs) and min(ys) <= point[1] <= max(ys)
def _polygon_edges(points: list[tuple[float, float]]) -> list[tuple[tuple[float, float], tuple[float, float]]]:
closed = points + [points[0]]
return [
(closed[index], closed[index + 1])
for index in range(len(points))
]
def _segments_intersect(
p1: tuple[float, float],
p2: tuple[float, float],
q1: tuple[float, float],
q2: tuple[float, float],
) -> bool:
o1 = _orientation(p1, p2, q1)
o2 = _orientation(p1, p2, q2)
o3 = _orientation(q1, q2, p1)
o4 = _orientation(q1, q2, p2)
if o1 != o2 and o3 != o4:
return True
if o1 == 0 and _on_segment(p1, q1, p2):
return True
if o2 == 0 and _on_segment(p1, q2, p2):
return True
if o3 == 0 and _on_segment(q1, p1, q2):
return True
if o4 == 0 and _on_segment(q1, p2, q2):
return True
return False
def _orientation(a: tuple[float, float], b: tuple[float, float], c: tuple[float, float]) -> int:
value = ((b[1] - a[1]) * (c[0] - b[0])) - ((b[0] - a[0]) * (c[1] - b[1]))
if abs(value) < 1e-9:
return 0
return 1 if value > 0 else 2
def _on_segment(a: tuple[float, float], b: tuple[float, float], c: tuple[float, float]) -> bool:
return (
min(a[0], c[0]) <= b[0] <= max(a[0], c[0])
and min(a[1], c[1]) <= b[1] <= max(a[1], c[1])
)
def _update_grid_summary_metadata(
*,
location: SoilLocation,
block_code: str,
chunk_size_sqm: int,
total_count: int,
block_subdivision: BlockSubdivision | None,
) -> None:
if block_subdivision is not None:
metadata = dict(block_subdivision.metadata or {})
metadata["analysis_grid"] = {
"chunk_size_sqm": chunk_size_sqm,
"cell_count": total_count,
}
block_subdivision.metadata = metadata
block_subdivision.save(update_fields=["metadata", "updated_at"])
layout = dict(location.block_layout or {})
blocks = list(layout.get("blocks") or [])
for block in blocks:
if block.get("block_code") == block_code:
block["analysis_grid_summary"] = {
"chunk_size_sqm": chunk_size_sqm,
"cell_count": total_count,
}
break
else:
if not block_code:
layout["analysis_grid_summary"] = {
"chunk_size_sqm": chunk_size_sqm,
"cell_count": total_count,
}
if blocks:
layout["blocks"] = blocks
location.block_layout = layout
location.save(update_fields=["block_layout", "updated_at"])
@@ -1,107 +0,0 @@
"""
Management command to seed a fixed demo farm center location and soil depths.
Run: python manage.py seed_location_data
"""
from django.core.management.base import BaseCommand
from location_data.models import SoilDepthData, SoilLocation
DEMO_LATITUDE = "50.000000"
DEMO_LONGITUDE = "50.000000"
DEMO_BOUNDARY = {
"type": "Polygon",
"coordinates": [
[
[49.995, 49.995],
[50.005, 49.995],
[50.005, 50.005],
[49.995, 50.005],
[49.995, 49.995],
]
],
}
DEMO_SOIL_DEPTHS = {
SoilDepthData.DEPTH_0_5: {
"bdod": 1.22,
"cec": 18.4,
"cfvo": 3.0,
"clay": 24.0,
"nitrogen": 0.21,
"ocd": 26.0,
"ocs": 4.1,
"phh2o": 6.7,
"sand": 38.0,
"silt": 38.0,
"soc": 1.8,
"wv0010": 0.32,
"wv0033": 0.24,
"wv1500": 0.12,
},
SoilDepthData.DEPTH_5_15: {
"bdod": 1.28,
"cec": 17.2,
"cfvo": 4.0,
"clay": 26.0,
"nitrogen": 0.18,
"ocd": 23.0,
"ocs": 3.6,
"phh2o": 6.8,
"sand": 36.0,
"silt": 38.0,
"soc": 1.5,
"wv0010": 0.29,
"wv0033": 0.22,
"wv1500": 0.11,
},
SoilDepthData.DEPTH_15_30: {
"bdod": 1.34,
"cec": 15.9,
"cfvo": 5.0,
"clay": 28.0,
"nitrogen": 0.14,
"ocd": 19.0,
"ocs": 2.9,
"phh2o": 6.9,
"sand": 34.0,
"silt": 38.0,
"soc": 1.2,
"wv0010": 0.26,
"wv0033": 0.19,
"wv1500": 0.09,
},
}
class Command(BaseCommand):
help = "Seed a fixed center location at 50.00, 50.00 plus three soil depth rows."
def handle(self, *args, **options):
location, created = SoilLocation.objects.update_or_create(
latitude=DEMO_LATITUDE,
longitude=DEMO_LONGITUDE,
defaults={
"task_id": "",
"farm_boundary": DEMO_BOUNDARY,
},
)
status_text = "Created" if created else "Updated"
self.stdout.write(
self.style.SUCCESS(
f"{status_text} SoilLocation id={location.id} at ({location.latitude}, {location.longitude})"
)
)
for depth_label, values in DEMO_SOIL_DEPTHS.items():
_, depth_created = SoilDepthData.objects.update_or_create(
soil_location=location,
depth_label=depth_label,
defaults=values,
)
depth_status = "Created" if depth_created else "Updated"
self.stdout.write(
self.style.SUCCESS(f" {depth_status} SoilDepthData {depth_label}")
)
self.stdout.write(self.style.SUCCESS("\nDone seeding location_data demo records."))
@@ -0,0 +1,45 @@
from django.db import migrations, models
def build_default_layout():
return {
"input_block_count": 1,
"default_full_farm": True,
"algorithm_status": "pending",
"blocks": [
{
"block_code": "block-1",
"order": 1,
"source": "default",
"needs_subdivision": None,
"sub_blocks": [],
}
],
}
class Migration(migrations.Migration):
dependencies = [
("location_data", "0007_ndviobservation"),
]
operations = [
migrations.AddField(
model_name="soillocation",
name="block_layout",
field=models.JSONField(
blank=True,
default=build_default_layout,
help_text="ساختار بلوک‌های زمین. به‌صورت پیش‌فرض کل زمین یک بلوک است و بعداً الگوریتم می‌تواند برای هر بلوک زیر‌بلوک تعریف کند.",
),
),
migrations.AddField(
model_name="soillocation",
name="input_block_count",
field=models.PositiveIntegerField(
default=1,
help_text="تعداد بلوک‌های اولیه‌ای که کشاورز برای زمین ثبت می‌کند.",
),
),
]
@@ -0,0 +1,38 @@
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("location_data", "0008_soillocation_block_layout"),
]
operations = [
migrations.CreateModel(
name="BlockSubdivision",
fields=[
("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
("block_code", models.CharField(help_text="شناسه بلوکی که این خردسازی برای آن انجام شده است.", max_length=64)),
("source_boundary", models.JSONField(blank=True, default=dict, help_text="مرز همان بلوکی که به سرویس subdivision داده شده است.")),
("chunk_size_sqm", models.PositiveIntegerField(default=100, help_text="اندازه هر chunk به متر مربع.")),
("grid_points", models.JSONField(blank=True, default=list, help_text="نقاط اولیه شبکه داخل مرز بلوک.")),
("centroid_points", models.JSONField(blank=True, default=list, help_text="مراکز نهایی بخش‌های خردشده.")),
("grid_point_count", models.PositiveIntegerField(default=0)),
("centroid_count", models.PositiveIntegerField(default=0)),
("status", models.CharField(default="created", help_text="وضعیت تولید subdivision برای این بلوک.", max_length=32)),
("metadata", models.JSONField(blank=True, default=dict)),
("created_at", models.DateTimeField(auto_now_add=True)),
("updated_at", models.DateTimeField(auto_now=True)),
("soil_location", models.ForeignKey(on_delete=models.deletion.CASCADE, related_name="block_subdivisions", to="location_data.soillocation")),
],
options={
"ordering": ["soil_location", "block_code", "-updated_at"],
"verbose_name": "خردسازی بلوک",
"verbose_name_plural": "خردسازی بلوک‌ها",
},
),
migrations.AddConstraint(
model_name="blocksubdivision",
constraint=models.UniqueConstraint(fields=("soil_location", "block_code"), name="location_block_subdivision_unique_location_block_code"),
),
]
@@ -0,0 +1,21 @@
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("location_data", "0009_blocksubdivision"),
]
operations = [
migrations.AddField(
model_name="blocksubdivision",
name="elbow_plot",
field=models.ImageField(
blank=True,
help_text="تصویر نمودار elbow برای انتخاب تعداد بهینه خوشه‌ها.",
null=True,
upload_to="location_data/elbow_plots/",
),
),
]
@@ -0,0 +1,110 @@
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("location_data", "0010_blocksubdivision_elbow_plot"),
]
operations = [
migrations.CreateModel(
name="AnalysisGridCell",
fields=[
("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
("block_code", models.CharField(blank=True, db_index=True, default="", help_text="شناسه بلوکی که این سلول به آن تعلق دارد.", max_length=64)),
("cell_code", models.CharField(help_text="شناسه یکتای سلول تحلیل.", max_length=128, unique=True)),
("chunk_size_sqm", models.PositiveIntegerField(db_index=True, default=900, help_text="اندازه سلول تحلیل به متر مربع.")),
("geometry", models.JSONField(blank=True, default=dict, help_text="هندسه سلول به صورت GeoJSON polygon یا ساختار مشابه.")),
("centroid_lat", models.DecimalField(db_index=True, decimal_places=6, help_text="عرض جغرافیایی مرکز سلول.", max_digits=9)),
("centroid_lon", models.DecimalField(db_index=True, decimal_places=6, help_text="طول جغرافیایی مرکز سلول.", max_digits=9)),
("created_at", models.DateTimeField(auto_now_add=True, db_index=True)),
("updated_at", models.DateTimeField(auto_now=True)),
("block_subdivision", models.ForeignKey(blank=True, null=True, on_delete=models.deletion.SET_NULL, related_name="analysis_grid_cells", to="location_data.blocksubdivision")),
("soil_location", models.ForeignKey(on_delete=models.deletion.CASCADE, related_name="analysis_grid_cells", to="location_data.soillocation")),
],
options={
"verbose_name": "analysis grid cell",
"verbose_name_plural": "analysis grid cells",
"ordering": ["soil_location", "block_code", "cell_code"],
},
),
migrations.CreateModel(
name="RemoteSensingRun",
fields=[
("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
("block_code", models.CharField(blank=True, db_index=True, default="", help_text="شناسه بلوکی که این run برای آن اجرا شده است.", max_length=64)),
("provider", models.CharField(default="openeo", help_text="ارائه‌دهنده داده سنجش‌ازدور.", max_length=64)),
("chunk_size_sqm", models.PositiveIntegerField(default=900, help_text="اندازه هر سلول تحلیل به متر مربع.")),
("temporal_start", models.DateField(blank=True, null=True)),
("temporal_end", models.DateField(blank=True, null=True)),
("status", models.CharField(choices=[("pending", "Pending"), ("running", "Running"), ("success", "Success"), ("failure", "Failure")], db_index=True, default="pending", max_length=16)),
("metadata", models.JSONField(blank=True, default=dict)),
("error_message", models.TextField(blank=True, default="")),
("started_at", models.DateTimeField(blank=True, null=True)),
("finished_at", models.DateTimeField(blank=True, null=True)),
("created_at", models.DateTimeField(auto_now_add=True, db_index=True)),
("updated_at", models.DateTimeField(auto_now=True)),
("block_subdivision", models.ForeignKey(blank=True, null=True, on_delete=models.deletion.SET_NULL, related_name="remote_sensing_runs", to="location_data.blocksubdivision")),
("soil_location", models.ForeignKey(on_delete=models.deletion.CASCADE, related_name="remote_sensing_runs", to="location_data.soillocation")),
],
options={
"verbose_name": "remote sensing run",
"verbose_name_plural": "remote sensing runs",
"ordering": ["-created_at", "-id"],
},
),
migrations.CreateModel(
name="AnalysisGridObservation",
fields=[
("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
("temporal_start", models.DateField(db_index=True)),
("temporal_end", models.DateField(db_index=True)),
("ndvi", models.FloatField(blank=True, null=True)),
("ndwi", models.FloatField(blank=True, null=True)),
("lst_c", models.FloatField(blank=True, null=True)),
("soil_vv", models.FloatField(blank=True, null=True)),
("soil_vv_db", models.FloatField(blank=True, null=True)),
("dem_m", models.FloatField(blank=True, null=True)),
("slope_deg", models.FloatField(blank=True, null=True)),
("metadata", models.JSONField(blank=True, default=dict)),
("created_at", models.DateTimeField(auto_now_add=True, db_index=True)),
("updated_at", models.DateTimeField(auto_now=True)),
("cell", models.ForeignKey(on_delete=models.deletion.CASCADE, related_name="observations", to="location_data.analysisgridcell")),
("run", models.ForeignKey(blank=True, null=True, on_delete=models.deletion.SET_NULL, related_name="observations", to="location_data.remotesensingrun")),
],
options={
"verbose_name": "analysis grid observation",
"verbose_name_plural": "analysis grid observations",
"ordering": ["-temporal_start", "-temporal_end", "-id"],
},
),
migrations.AddIndex(
model_name="analysisgridcell",
index=models.Index(fields=["soil_location", "block_code"], name="grid_cell_loc_block_idx"),
),
migrations.AddIndex(
model_name="analysisgridcell",
index=models.Index(fields=["soil_location", "chunk_size_sqm"], name="grid_cell_loc_chunk_idx"),
),
migrations.AddIndex(
model_name="remotesensingrun",
index=models.Index(fields=["soil_location", "status", "created_at"], name="rs_run_loc_status_created_idx"),
),
migrations.AddIndex(
model_name="remotesensingrun",
index=models.Index(fields=["block_code", "created_at"], name="rs_run_block_created_idx"),
),
migrations.AddConstraint(
model_name="analysisgridobservation",
constraint=models.UniqueConstraint(fields=("cell", "temporal_start", "temporal_end"), name="grid_obs_unique_cell_temporal_range"),
),
migrations.AddIndex(
model_name="analysisgridobservation",
index=models.Index(fields=["cell", "temporal_start", "temporal_end"], name="grid_obs_cell_temporal_idx"),
),
migrations.AddIndex(
model_name="analysisgridobservation",
index=models.Index(fields=["temporal_start", "temporal_end"], name="grid_obs_temporal_idx"),
),
]
@@ -0,0 +1,65 @@
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("location_data", "0011_remote_sensing_models"),
]
operations = [
migrations.CreateModel(
name="RemoteSensingSubdivisionResult",
fields=[
("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
("block_code", models.CharField(blank=True, db_index=True, default="", max_length=64)),
("chunk_size_sqm", models.PositiveIntegerField(default=900)),
("temporal_start", models.DateField(db_index=True)),
("temporal_end", models.DateField(db_index=True)),
("cluster_count", models.PositiveIntegerField(default=0)),
("selected_features", models.JSONField(blank=True, default=list)),
("skipped_cell_codes", models.JSONField(blank=True, default=list)),
("metadata", models.JSONField(blank=True, default=dict)),
("created_at", models.DateTimeField(auto_now_add=True, db_index=True)),
("updated_at", models.DateTimeField(auto_now=True)),
("block_subdivision", models.ForeignKey(blank=True, null=True, on_delete=models.deletion.SET_NULL, related_name="remote_sensing_subdivision_results", to="location_data.blocksubdivision")),
("run", models.OneToOneField(on_delete=models.deletion.CASCADE, related_name="subdivision_result", to="location_data.remotesensingrun")),
("soil_location", models.ForeignKey(on_delete=models.deletion.CASCADE, related_name="remote_sensing_subdivision_results", to="location_data.soillocation")),
],
options={
"verbose_name": "remote sensing subdivision result",
"verbose_name_plural": "remote sensing subdivision results",
"ordering": ["-created_at", "-id"],
},
),
migrations.CreateModel(
name="RemoteSensingClusterAssignment",
fields=[
("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
("cluster_label", models.PositiveIntegerField(db_index=True)),
("raw_feature_values", models.JSONField(blank=True, default=dict)),
("scaled_feature_values", models.JSONField(blank=True, default=dict)),
("created_at", models.DateTimeField(auto_now_add=True, db_index=True)),
("updated_at", models.DateTimeField(auto_now=True)),
("cell", models.ForeignKey(on_delete=models.deletion.CASCADE, related_name="cluster_assignments", to="location_data.analysisgridcell")),
("result", models.ForeignKey(on_delete=models.deletion.CASCADE, related_name="assignments", to="location_data.remotesensingsubdivisionresult")),
],
options={
"verbose_name": "remote sensing cluster assignment",
"verbose_name_plural": "remote sensing cluster assignments",
"ordering": ["cluster_label", "cell__cell_code"],
},
),
migrations.AddIndex(
model_name="remotesensingsubdivisionresult",
index=models.Index(fields=["soil_location", "block_code", "temporal_start", "temporal_end"], name="rs_subdiv_result_lookup_idx"),
),
migrations.AddConstraint(
model_name="remotesensingclusterassignment",
constraint=models.UniqueConstraint(fields=("result", "cell"), name="rs_cluster_assign_unique_result_cell"),
),
migrations.AddIndex(
model_name="remotesensingclusterassignment",
index=models.Index(fields=["result", "cluster_label"], name="rs_cluster_assign_result_label_idx"),
),
]
@@ -0,0 +1,14 @@
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("location_data", "0012_remote_sensing_subdivision_models"),
]
operations = [
migrations.DeleteModel(
name="SoilDepthData",
),
]
@@ -0,0 +1,15 @@
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("location_data", "0013_remove_soildepthdata"),
]
operations = [
migrations.AlterField(
model_name="blocksubdivision",
name="chunk_size_sqm",
field=models.PositiveIntegerField(default=900, help_text="اندازه هر chunk به متر مربع."),
),
]
+414 -41
View File
@@ -1,10 +1,47 @@
from django.db import models from django.db import models
def build_block_layout(block_count: int = 1, blocks: list[dict] | None = None) -> dict:
normalized_blocks = []
if blocks:
for index, block in enumerate(blocks):
normalized_blocks.append(
{
"block_code": str(block.get("block_code") or f"block-{index + 1}").strip(),
"order": int(block.get("order") or index + 1),
"source": "input",
"boundary": block.get("boundary") or {},
"needs_subdivision": None,
"sub_blocks": [],
}
)
else:
normalized_count = max(int(block_count or 1), 1)
for index in range(normalized_count):
normalized_blocks.append(
{
"block_code": f"block-{index + 1}",
"order": index + 1,
"source": "input" if normalized_count > 1 else "default",
"boundary": {},
"needs_subdivision": None,
"sub_blocks": [],
}
)
normalized_count = len(normalized_blocks) if normalized_blocks else max(int(block_count or 1), 1)
return {
"input_block_count": normalized_count,
"default_full_farm": normalized_count == 1,
"algorithm_status": "pending",
"blocks": normalized_blocks,
}
class SoilLocation(models.Model): class SoilLocation(models.Model):
""" """
مرکز زمین برای دادههای خاک و مزرعه. مرکز زمین و مرز مزرعه/بلوکهای تعریفشده توسط کشاورز.
هر مختصات سه سطر در SoilDepthData دارد (۰۵، ۵۱۵، ۱۵۳۰ سانتیمتر).
""" """
latitude = models.DecimalField( latitude = models.DecimalField(
@@ -33,6 +70,18 @@ class SoilLocation(models.Model):
'می‌تواند GeoJSON polygon یا bbox مثل {"type": "Polygon", "coordinates": [...]} باشد.' 'می‌تواند GeoJSON polygon یا bbox مثل {"type": "Polygon", "coordinates": [...]} باشد.'
), ),
) )
input_block_count = models.PositiveIntegerField(
default=1,
help_text="تعداد بلوک‌های اولیه‌ای که کشاورز برای زمین ثبت می‌کند.",
)
block_layout = models.JSONField(
default=build_block_layout,
blank=True,
help_text=(
"ساختار بلوک‌های زمین. به‌صورت پیش‌فرض کل زمین یک بلوک است و "
"بعداً الگوریتم می‌تواند برای هر بلوک زیر‌بلوک تعریف کند."
),
)
created_at = models.DateTimeField(auto_now_add=True) created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True) updated_at = models.DateTimeField(auto_now=True)
@@ -60,63 +109,387 @@ class SoilLocation(models.Model):
@property @property
def is_complete(self): def is_complete(self):
"""آیا هر سه عمق ذخیره شده‌اند؟""" """آیا حداقل یک run کامل remote sensing برای این location وجود دارد؟"""
return self.depths.count() == 3 return self.remote_sensing_runs.filter(status="success").exists()
def set_input_block_count(self, block_count: int = 1, blocks: list[dict] | None = None):
normalized_count = len(blocks) if blocks else max(int(block_count or 1), 1)
self.input_block_count = normalized_count
self.block_layout = build_block_layout(normalized_count, blocks=blocks)
def save(self, *args, **kwargs):
if not self.input_block_count:
self.input_block_count = 1
if not self.block_layout:
self.block_layout = build_block_layout(self.input_block_count)
super().save(*args, **kwargs)
class SoilDepthData(models.Model): class BlockSubdivision(models.Model):
""" """
دادههای خاک برای یک عمق مشخص، مرتبط با یک SoilLocation. نتیجه خردسازی یک بلوک برای یک SoilLocation.
مقادیر خام از API SoilGrids (قبل از اعمال d_factor). grid_points نقاط اولیه شبکه هستند و centroid_points مراکز نهایی بخشها.
""" """
DEPTH_0_5 = "0-5cm" soil_location = models.ForeignKey(
DEPTH_5_15 = "5-15cm" SoilLocation,
DEPTH_15_30 = "15-30cm" on_delete=models.CASCADE,
DEPTH_CHOICES = [ related_name="block_subdivisions",
(DEPTH_0_5, "۰–۵ سانتی‌متر"), )
(DEPTH_5_15, "۵–۱۵ سانتی‌متر"), block_code = models.CharField(
(DEPTH_15_30, "۱۵–۳۰ سانتی‌متر"), max_length=64,
help_text="شناسه بلوکی که این خردسازی برای آن انجام شده است.",
)
source_boundary = models.JSONField(
default=dict,
blank=True,
help_text="مرز همان بلوکی که به سرویس subdivision داده شده است.",
)
chunk_size_sqm = models.PositiveIntegerField(
default=900,
help_text="اندازه هر chunk به متر مربع.",
)
grid_points = models.JSONField(
default=list,
blank=True,
help_text="نقاط اولیه شبکه داخل مرز بلوک.",
)
centroid_points = models.JSONField(
default=list,
blank=True,
help_text="مراکز نهایی بخش‌های خردشده.",
)
grid_point_count = models.PositiveIntegerField(default=0)
centroid_count = models.PositiveIntegerField(default=0)
elbow_plot = models.ImageField(
upload_to="location_data/elbow_plots/",
null=True,
blank=True,
help_text="تصویر نمودار elbow برای انتخاب تعداد بهینه خوشه‌ها.",
)
status = models.CharField(
max_length=32,
default="created",
help_text="وضعیت تولید subdivision برای این بلوک.",
)
metadata = models.JSONField(default=dict, blank=True)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
constraints = [
models.UniqueConstraint(
fields=["soil_location", "block_code"],
name="location_block_subdivision_unique_location_block_code",
)
]
ordering = ["soil_location", "block_code", "-updated_at"]
verbose_name = "خردسازی بلوک"
verbose_name_plural = "خردسازی بلوک‌ها"
def __str__(self):
return f"BlockSubdivision({self.soil_location_id}, {self.block_code})"
class RemoteSensingRun(models.Model):
STATUS_PENDING = "pending"
STATUS_RUNNING = "running"
STATUS_SUCCESS = "success"
STATUS_FAILURE = "failure"
STATUS_CHOICES = [
(STATUS_PENDING, "Pending"),
(STATUS_RUNNING, "Running"),
(STATUS_SUCCESS, "Success"),
(STATUS_FAILURE, "Failure"),
] ]
soil_location = models.ForeignKey( soil_location = models.ForeignKey(
SoilLocation, SoilLocation,
on_delete=models.CASCADE, on_delete=models.CASCADE,
related_name="depths", related_name="remote_sensing_runs",
) )
depth_label = models.CharField( block_subdivision = models.ForeignKey(
max_length=10, BlockSubdivision,
choices=DEPTH_CHOICES, on_delete=models.SET_NULL,
null=True,
blank=True,
related_name="remote_sensing_runs",
)
block_code = models.CharField(
max_length=64,
blank=True,
default="",
db_index=True,
help_text="شناسه بلوکی که این run برای آن اجرا شده است.",
)
provider = models.CharField(
max_length=64,
default="openeo",
help_text="ارائه‌دهنده داده سنجش‌ازدور.",
)
chunk_size_sqm = models.PositiveIntegerField(
default=900,
help_text="اندازه هر سلول تحلیل به متر مربع.",
)
temporal_start = models.DateField(null=True, blank=True)
temporal_end = models.DateField(null=True, blank=True)
status = models.CharField(
max_length=16,
choices=STATUS_CHOICES,
default=STATUS_PENDING,
db_index=True, db_index=True,
) )
# خواص خاک — مقادیر mean از API (raw) metadata = models.JSONField(default=dict, blank=True)
bdod = models.FloatField(null=True, blank=True) error_message = models.TextField(blank=True, default="")
cec = models.FloatField(null=True, blank=True) started_at = models.DateTimeField(null=True, blank=True)
cfvo = models.FloatField(null=True, blank=True) finished_at = models.DateTimeField(null=True, blank=True)
clay = models.FloatField(null=True, blank=True) created_at = models.DateTimeField(auto_now_add=True, db_index=True)
nitrogen = models.FloatField(null=True, blank=True) updated_at = models.DateTimeField(auto_now=True)
ocd = models.FloatField(null=True, blank=True)
ocs = models.FloatField(null=True, blank=True)
phh2o = models.FloatField(null=True, blank=True)
sand = models.FloatField(null=True, blank=True)
silt = models.FloatField(null=True, blank=True)
soc = models.FloatField(null=True, blank=True)
wv0010 = models.FloatField(null=True, blank=True)
wv0033 = models.FloatField(null=True, blank=True)
wv1500 = models.FloatField(null=True, blank=True)
created_at = models.DateTimeField(auto_now_add=True)
class Meta: class Meta:
constraints = [ ordering = ["-created_at", "-id"]
models.UniqueConstraint( indexes = [
fields=["soil_location", "depth_label"], models.Index(
name="soil_depth_unique_location_depth", fields=["soil_location", "status", "created_at"],
) name="rs_run_loc_status_created_idx",
),
models.Index(
fields=["block_code", "created_at"],
name="rs_run_block_created_idx",
),
] ]
ordering = ["soil_location", "depth_label"] verbose_name = "remote sensing run"
verbose_name_plural = "remote sensing runs"
def __str__(self): def __str__(self):
return f"SoilDepthData({self.soil_location_id}, {self.depth_label})" block_text = self.block_code or "farm"
return f"RemoteSensingRun({self.soil_location_id}, {block_text}, {self.status})"
@property
def normalized_status(self) -> str:
"""
Return the client-facing lifecycle status while keeping legacy DB values stable.
"""
if self.status == self.STATUS_SUCCESS:
return "completed"
if self.status == self.STATUS_FAILURE:
return "failed"
return self.status
class AnalysisGridCell(models.Model):
soil_location = models.ForeignKey(
SoilLocation,
on_delete=models.CASCADE,
related_name="analysis_grid_cells",
)
block_subdivision = models.ForeignKey(
BlockSubdivision,
on_delete=models.SET_NULL,
null=True,
blank=True,
related_name="analysis_grid_cells",
)
block_code = models.CharField(
max_length=64,
blank=True,
default="",
db_index=True,
help_text="شناسه بلوکی که این سلول به آن تعلق دارد.",
)
cell_code = models.CharField(
max_length=128,
unique=True,
help_text="شناسه یکتای سلول تحلیل.",
)
chunk_size_sqm = models.PositiveIntegerField(
default=900,
db_index=True,
help_text="اندازه سلول تحلیل به متر مربع.",
)
geometry = models.JSONField(
default=dict,
blank=True,
help_text="هندسه سلول به صورت GeoJSON polygon یا ساختار مشابه.",
)
centroid_lat = models.DecimalField(
max_digits=9,
decimal_places=6,
db_index=True,
help_text="عرض جغرافیایی مرکز سلول.",
)
centroid_lon = models.DecimalField(
max_digits=9,
decimal_places=6,
db_index=True,
help_text="طول جغرافیایی مرکز سلول.",
)
created_at = models.DateTimeField(auto_now_add=True, db_index=True)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
ordering = ["soil_location", "block_code", "cell_code"]
indexes = [
models.Index(
fields=["soil_location", "block_code"],
name="grid_cell_loc_block_idx",
),
models.Index(
fields=["soil_location", "chunk_size_sqm"],
name="grid_cell_loc_chunk_idx",
),
]
verbose_name = "analysis grid cell"
verbose_name_plural = "analysis grid cells"
def __str__(self):
return f"AnalysisGridCell({self.cell_code})"
class AnalysisGridObservation(models.Model):
cell = models.ForeignKey(
AnalysisGridCell,
on_delete=models.CASCADE,
related_name="observations",
)
run = models.ForeignKey(
RemoteSensingRun,
on_delete=models.SET_NULL,
null=True,
blank=True,
related_name="observations",
)
temporal_start = models.DateField(db_index=True)
temporal_end = models.DateField(db_index=True)
ndvi = models.FloatField(null=True, blank=True)
ndwi = models.FloatField(null=True, blank=True)
lst_c = models.FloatField(null=True, blank=True)
soil_vv = models.FloatField(null=True, blank=True)
soil_vv_db = models.FloatField(null=True, blank=True)
dem_m = models.FloatField(null=True, blank=True)
slope_deg = models.FloatField(null=True, blank=True)
metadata = models.JSONField(default=dict, blank=True)
created_at = models.DateTimeField(auto_now_add=True, db_index=True)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
ordering = ["-temporal_start", "-temporal_end", "-id"]
constraints = [
models.UniqueConstraint(
fields=["cell", "temporal_start", "temporal_end"],
name="grid_obs_unique_cell_temporal_range",
)
]
indexes = [
models.Index(
fields=["cell", "temporal_start", "temporal_end"],
name="grid_obs_cell_temporal_idx",
),
models.Index(
fields=["temporal_start", "temporal_end"],
name="grid_obs_temporal_idx",
),
]
verbose_name = "analysis grid observation"
verbose_name_plural = "analysis grid observations"
def __str__(self):
return (
f"AnalysisGridObservation({self.cell_id}, "
f"{self.temporal_start}, {self.temporal_end})"
)
class RemoteSensingSubdivisionResult(models.Model):
soil_location = models.ForeignKey(
SoilLocation,
on_delete=models.CASCADE,
related_name="remote_sensing_subdivision_results",
)
run = models.OneToOneField(
RemoteSensingRun,
on_delete=models.CASCADE,
related_name="subdivision_result",
)
block_subdivision = models.ForeignKey(
BlockSubdivision,
on_delete=models.SET_NULL,
null=True,
blank=True,
related_name="remote_sensing_subdivision_results",
)
block_code = models.CharField(
max_length=64,
blank=True,
default="",
db_index=True,
)
chunk_size_sqm = models.PositiveIntegerField(default=900)
temporal_start = models.DateField(db_index=True)
temporal_end = models.DateField(db_index=True)
cluster_count = models.PositiveIntegerField(default=0)
selected_features = models.JSONField(default=list, blank=True)
skipped_cell_codes = models.JSONField(default=list, blank=True)
metadata = models.JSONField(default=dict, blank=True)
created_at = models.DateTimeField(auto_now_add=True, db_index=True)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
ordering = ["-created_at", "-id"]
indexes = [
models.Index(
fields=["soil_location", "block_code", "temporal_start", "temporal_end"],
name="rs_subdiv_result_lookup_idx",
)
]
verbose_name = "remote sensing subdivision result"
verbose_name_plural = "remote sensing subdivision results"
def __str__(self):
return (
f"RemoteSensingSubdivisionResult({self.soil_location_id}, "
f"{self.block_code or 'farm'}, clusters={self.cluster_count})"
)
class RemoteSensingClusterAssignment(models.Model):
result = models.ForeignKey(
RemoteSensingSubdivisionResult,
on_delete=models.CASCADE,
related_name="assignments",
)
cell = models.ForeignKey(
AnalysisGridCell,
on_delete=models.CASCADE,
related_name="cluster_assignments",
)
cluster_label = models.PositiveIntegerField(db_index=True)
raw_feature_values = models.JSONField(default=dict, blank=True)
scaled_feature_values = models.JSONField(default=dict, blank=True)
created_at = models.DateTimeField(auto_now_add=True, db_index=True)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
ordering = ["cluster_label", "cell__cell_code"]
constraints = [
models.UniqueConstraint(
fields=["result", "cell"],
name="rs_cluster_assign_unique_result_cell",
)
]
indexes = [
models.Index(
fields=["result", "cluster_label"],
name="rs_cluster_assign_result_label_idx",
)
]
verbose_name = "remote sensing cluster assignment"
verbose_name_plural = "remote sensing cluster assignments"
def __str__(self):
return f"RemoteSensingClusterAssignment({self.result_id}, {self.cell_id}, {self.cluster_label})"
class NdviObservation(models.Model): class NdviObservation(models.Model):
+476
View File
@@ -0,0 +1,476 @@
from __future__ import annotations
import math
import os
from dataclasses import dataclass
from datetime import date
from decimal import Decimal
from typing import Any
from .models import AnalysisGridCell
DEFAULT_OPENEO_BACKEND_URL = "https://openeofed.dataspace.copernicus.eu"
DEFAULT_OPENEO_PROVIDER = "openeo"
SENTINEL2_COLLECTION = "SENTINEL2_L2A"
SENTINEL3_LST_COLLECTION = "SENTINEL3_SLSTR_L2_LST"
SENTINEL1_COLLECTION = "SENTINEL1_GRD"
COPERNICUS_DEM_COLLECTION = "COPERNICUS_30"
VALID_SCL_CLASSES = (4, 5, 6)
METRIC_NAMES = (
"ndvi",
"ndwi",
"lst_c",
"soil_vv",
"soil_vv_db",
"dem_m",
"slope_deg",
)
class OpenEOServiceError(Exception):
"""Base exception for openEO service failures."""
class OpenEOAuthenticationError(OpenEOServiceError):
"""Raised when authentication with the openEO backend fails."""
class OpenEOExecutionError(OpenEOServiceError):
"""Raised when a metric process graph can not be executed successfully."""
@dataclass(frozen=True)
class OpenEOConnectionSettings:
backend_url: str = DEFAULT_OPENEO_BACKEND_URL
auth_method: str = "client_credentials"
client_id: str = ""
client_secret: str = ""
provider_id: str = ""
username: str = ""
password: str = ""
allow_interactive_oidc: bool = False
@classmethod
def from_env(cls) -> "OpenEOConnectionSettings":
return cls(
backend_url=os.environ.get("OPENEO_BACKEND_URL", DEFAULT_OPENEO_BACKEND_URL).strip(),
auth_method=os.environ.get("OPENEO_AUTH_METHOD", "client_credentials").strip().lower(),
client_id=os.environ.get("OPENEO_AUTH_CLIENT_ID", "").strip(),
client_secret=os.environ.get("OPENEO_AUTH_CLIENT_SECRET", "").strip(),
provider_id=os.environ.get("OPENEO_AUTH_PROVIDER_ID", "").strip(),
username=os.environ.get("OPENEO_USERNAME", "").strip(),
password=os.environ.get("OPENEO_PASSWORD", "").strip(),
allow_interactive_oidc=os.environ.get("OPENEO_ALLOW_INTERACTIVE_OIDC", "0").strip().lower()
in {"1", "true", "yes", "on"},
)
def connect_openeo(settings: OpenEOConnectionSettings | None = None):
"""
Build an authenticated openEO connection using environment-driven configuration.
Preferred authentication mode in production is OIDC client credentials.
"""
settings = settings or OpenEOConnectionSettings.from_env()
try:
import openeo
except ImportError as exc: # pragma: no cover - runtime dependency guard
raise OpenEOServiceError("The `openeo` Python client is required for remote sensing jobs.") from exc
connection = openeo.connect(settings.backend_url)
try:
if settings.auth_method == "client_credentials":
if not settings.client_id or not settings.client_secret:
raise OpenEOAuthenticationError(
"OPENEO_AUTH_CLIENT_ID and OPENEO_AUTH_CLIENT_SECRET must be configured."
)
auth_kwargs = {
"client_id": settings.client_id,
"client_secret": settings.client_secret,
}
if settings.provider_id:
auth_kwargs["provider_id"] = settings.provider_id
return connection.authenticate_oidc_client_credentials(**auth_kwargs)
if settings.auth_method == "password":
if not settings.username or not settings.password:
raise OpenEOAuthenticationError(
"OPENEO_USERNAME and OPENEO_PASSWORD must be configured for password auth."
)
auth_kwargs = {
"username": settings.username,
"password": settings.password,
}
if settings.provider_id:
auth_kwargs["provider_id"] = settings.provider_id
return connection.authenticate_oidc_resource_owner_password_credentials(**auth_kwargs)
if settings.auth_method == "oidc":
if not settings.allow_interactive_oidc:
raise OpenEOAuthenticationError(
"Interactive OIDC auth is disabled. Use client credentials in Celery workers."
)
auth_kwargs = {}
if settings.provider_id:
auth_kwargs["provider_id"] = settings.provider_id
return connection.authenticate_oidc(**auth_kwargs)
raise OpenEOAuthenticationError(f"Unsupported OPENEO_AUTH_METHOD: {settings.auth_method}")
except Exception as exc:
if isinstance(exc, OpenEOServiceError):
raise
raise OpenEOAuthenticationError(f"Failed to authenticate with openEO backend: {exc}") from exc
def build_feature_collection(cells: list[AnalysisGridCell]) -> dict[str, Any]:
features = []
for cell in cells:
features.append(
{
"type": "Feature",
"id": cell.cell_code,
"properties": {
"cell_code": cell.cell_code,
"block_code": cell.block_code,
"soil_location_id": cell.soil_location_id,
},
"geometry": cell.geometry,
}
)
return {"type": "FeatureCollection", "features": features}
def build_spatial_extent(cells: list[AnalysisGridCell]) -> dict[str, float]:
if not cells:
raise ValueError("At least one analysis grid cell is required.")
west = None
east = None
south = None
north = None
for cell in cells:
coordinates = ((cell.geometry or {}).get("coordinates") or [[]])[0]
for lon, lat in coordinates:
west = lon if west is None else min(west, lon)
east = lon if east is None else max(east, lon)
south = lat if south is None else min(south, lat)
north = lat if north is None else max(north, lat)
return {
"west": float(west),
"south": float(south),
"east": float(east),
"north": float(north),
}
def build_empty_metric_payload() -> dict[str, Any]:
return {metric_name: None for metric_name in METRIC_NAMES}
def initialize_metric_result_map(cells: list[AnalysisGridCell]) -> dict[str, dict[str, Any]]:
return {cell.cell_code: build_empty_metric_payload() for cell in cells}
def compute_remote_sensing_metrics(
cells: list[AnalysisGridCell],
*,
temporal_start: date | str,
temporal_end: date | str,
connection=None,
) -> dict[str, Any]:
"""
Compute all requested remote sensing metrics in batch mode per metric.
Returns a normalized structure keyed by `cell_code`, plus execution metadata
that can be stored by Celery tasks and Django models.
"""
if not cells:
return {
"results": {},
"metadata": {
"backend": DEFAULT_OPENEO_PROVIDER,
"collections_used": [],
"slope_supported": False,
"job_refs": {},
"failed_metrics": [],
},
}
connection = connection or connect_openeo()
feature_collection = build_feature_collection(cells)
spatial_extent = build_spatial_extent(cells)
results = initialize_metric_result_map(cells)
metadata = {
"backend": DEFAULT_OPENEO_PROVIDER,
"backend_url": DEFAULT_OPENEO_BACKEND_URL,
"collections_used": [
SENTINEL2_COLLECTION,
SENTINEL3_LST_COLLECTION,
SENTINEL1_COLLECTION,
COPERNICUS_DEM_COLLECTION,
],
"slope_supported": True,
"job_refs": {},
"failed_metrics": [],
}
metric_runners = [
("ndvi", compute_ndvi),
("ndwi", compute_ndwi),
("lst_c", compute_lst_c),
("soil_vv", compute_soil_vv),
("dem_m", compute_dem_m),
("slope_deg", compute_slope_deg),
]
for metric_name, runner in metric_runners:
try:
metric_payload = runner(
connection=connection,
feature_collection=feature_collection,
spatial_extent=spatial_extent,
temporal_start=temporal_start,
temporal_end=temporal_end,
)
merge_metric_results(results, metric_payload["results"])
metadata["job_refs"][metric_name] = metric_payload.get("job_ref")
if metric_name == "slope_deg" and not metric_payload.get("supported", True):
metadata["slope_supported"] = False
except Exception as exc:
if metric_name == "slope_deg":
metadata["slope_supported"] = False
metadata["failed_metrics"].append(
{"metric": metric_name, "error": str(exc), "non_fatal": True}
)
continue
raise OpenEOExecutionError(f"Failed to compute metric `{metric_name}`: {exc}") from exc
for cell_code, payload in results.items():
soil_vv = payload.get("soil_vv")
payload["soil_vv_db"] = linear_to_db(soil_vv)
return {"results": results, "metadata": metadata}
def compute_ndvi(*, connection, feature_collection, spatial_extent, temporal_start, temporal_end) -> dict[str, Any]:
cube = connection.load_collection(
SENTINEL2_COLLECTION,
spatial_extent=spatial_extent,
temporal_extent=[_normalize_date(temporal_start), _normalize_date(temporal_end)],
bands=["B03", "B04", "B08", "SCL"],
)
scl = cube.band("SCL")
invalid_mask = (scl != VALID_SCL_CLASSES[0]) & (scl != VALID_SCL_CLASSES[1]) & (scl != VALID_SCL_CLASSES[2])
red = cube.band("B04") * 0.0001
nir = cube.band("B08") * 0.0001
ndvi = ((nir - red) / (nir + red)).mask(invalid_mask.resample_cube_spatial(red))
aggregated = ndvi.mean_time().aggregate_spatial(geometries=feature_collection, reducer="mean").execute()
return {"results": parse_aggregate_spatial_response(aggregated, "ndvi")}
def compute_ndwi(*, connection, feature_collection, spatial_extent, temporal_start, temporal_end) -> dict[str, Any]:
cube = connection.load_collection(
SENTINEL2_COLLECTION,
spatial_extent=spatial_extent,
temporal_extent=[_normalize_date(temporal_start), _normalize_date(temporal_end)],
bands=["B03", "B08", "SCL"],
)
scl = cube.band("SCL")
invalid_mask = (scl != VALID_SCL_CLASSES[0]) & (scl != VALID_SCL_CLASSES[1]) & (scl != VALID_SCL_CLASSES[2])
green = cube.band("B03") * 0.0001
nir = cube.band("B08") * 0.0001
ndwi = ((green - nir) / (green + nir)).mask(invalid_mask.resample_cube_spatial(green))
aggregated = ndwi.mean_time().aggregate_spatial(geometries=feature_collection, reducer="mean").execute()
return {"results": parse_aggregate_spatial_response(aggregated, "ndwi")}
def compute_lst_c(*, connection, feature_collection, spatial_extent, temporal_start, temporal_end) -> dict[str, Any]:
cube = connection.load_collection(
SENTINEL3_LST_COLLECTION,
spatial_extent=spatial_extent,
temporal_extent=[_normalize_date(temporal_start), _normalize_date(temporal_end)],
)
band_name = infer_band_name(cube, preferred=("LST", "LST_in", "LST", "band_0"))
lst_k = cube.band(band_name) if band_name else cube
lst_c = lst_k - 273.15
aggregated = lst_c.mean_time().aggregate_spatial(geometries=feature_collection, reducer="mean").execute()
return {"results": parse_aggregate_spatial_response(aggregated, "lst_c")}
def compute_soil_vv(*, connection, feature_collection, spatial_extent, temporal_start, temporal_end) -> dict[str, Any]:
cube = connection.load_collection(
SENTINEL1_COLLECTION,
spatial_extent=spatial_extent,
temporal_extent=[_normalize_date(temporal_start), _normalize_date(temporal_end)],
bands=["VV"],
)
vv = cube.band("VV")
aggregated = vv.mean_time().aggregate_spatial(geometries=feature_collection, reducer="mean").execute()
return {"results": parse_aggregate_spatial_response(aggregated, "soil_vv")}
def compute_dem_m(*, connection, feature_collection, spatial_extent, temporal_start, temporal_end) -> dict[str, Any]:
cube = connection.load_collection(
COPERNICUS_DEM_COLLECTION,
spatial_extent=spatial_extent,
temporal_extent=[_normalize_date(temporal_start), _normalize_date(temporal_end)],
)
band_name = infer_band_name(cube, preferred=("DEM", "elevation", "band_0"))
dem = cube.band(band_name) if band_name else cube
aggregated = dem.aggregate_spatial(geometries=feature_collection, reducer="mean").execute()
return {"results": parse_aggregate_spatial_response(aggregated, "dem_m")}
def compute_slope_deg(*, connection, feature_collection, spatial_extent, temporal_start, temporal_end) -> dict[str, Any]:
cube = connection.load_collection(
COPERNICUS_DEM_COLLECTION,
spatial_extent=spatial_extent,
temporal_extent=[_normalize_date(temporal_start), _normalize_date(temporal_end)],
)
band_name = infer_band_name(cube, preferred=("DEM", "elevation", "band_0"))
dem = cube.band(band_name) if band_name else cube
try:
slope_rad = dem.slope()
slope_deg = slope_rad * (180.0 / math.pi)
aggregated = slope_deg.aggregate_spatial(geometries=feature_collection, reducer="mean").execute()
return {
"results": parse_aggregate_spatial_response(aggregated, "slope_deg"),
"supported": True,
}
except Exception:
return {
"results": {feature["id"]: {"slope_deg": None} for feature in feature_collection.get("features", [])},
"supported": False,
}
def parse_aggregate_spatial_response(payload: Any, metric_name: str) -> dict[str, dict[str, Any]]:
"""
Parse different JSON shapes returned by openEO aggregate_spatial executions.
"""
if payload is None:
return {}
if isinstance(payload, dict) and payload.get("type") == "FeatureCollection":
return _parse_feature_collection_results(payload, metric_name)
if isinstance(payload, dict) and "features" in payload:
return _parse_feature_collection_results(payload, metric_name)
if isinstance(payload, dict):
return _parse_mapping_results(payload, metric_name)
if isinstance(payload, list):
return _parse_list_results(payload, metric_name)
raise OpenEOExecutionError(f"Unsupported openEO aggregate_spatial response type: {type(payload)!r}")
def _parse_feature_collection_results(payload: dict[str, Any], metric_name: str) -> dict[str, dict[str, Any]]:
results: dict[str, dict[str, Any]] = {}
for feature in payload.get("features", []):
feature_id = str(
feature.get("id")
or (feature.get("properties") or {}).get("cell_code")
or (feature.get("properties") or {}).get("id")
)
if not feature_id:
continue
properties = feature.get("properties") or {}
value = _extract_aggregate_value(properties)
results[feature_id] = {metric_name: _coerce_float(value)}
return results
def _parse_mapping_results(payload: dict[str, Any], metric_name: str) -> dict[str, dict[str, Any]]:
if "data" in payload and isinstance(payload["data"], (dict, list)):
return parse_aggregate_spatial_response(payload["data"], metric_name)
results: dict[str, dict[str, Any]] = {}
for feature_id, value in payload.items():
if feature_id in {"type", "links", "meta"}:
continue
results[str(feature_id)] = {metric_name: _coerce_float(_extract_aggregate_value(value))}
return results
def _parse_list_results(payload: list[Any], metric_name: str) -> dict[str, dict[str, Any]]:
results: dict[str, dict[str, Any]] = {}
for index, item in enumerate(payload):
if isinstance(item, dict):
feature_id = str(item.get("id") or item.get("cell_code") or item.get("feature_id") or index)
value = _extract_aggregate_value(item)
else:
feature_id = str(index)
value = item
results[feature_id] = {metric_name: _coerce_float(value)}
return results
def _extract_aggregate_value(value: Any) -> Any:
if isinstance(value, dict):
for key in ("mean", "value", "result", "average"):
if key in value:
return _extract_aggregate_value(value[key])
if len(value) == 1:
return _extract_aggregate_value(next(iter(value.values())))
return None
if isinstance(value, list):
if not value:
return None
return _extract_aggregate_value(value[0])
return value
def merge_metric_results(target: dict[str, dict[str, Any]], updates: dict[str, dict[str, Any]]) -> None:
for cell_code, values in updates.items():
target.setdefault(cell_code, build_empty_metric_payload())
target[cell_code].update(values)
def linear_to_db(value: Any) -> float | None:
numeric = _coerce_float(value)
if numeric is None or numeric <= 0:
return None
return round(10.0 * math.log10(numeric), 6)
def infer_band_name(cube, preferred: tuple[str, ...]) -> str | None:
"""
Best-effort band name selection for collections with backend-specific naming.
"""
metadata = getattr(cube, "metadata", None)
if metadata is None:
return None
band_dimension = getattr(metadata, "band_dimension", None)
bands = getattr(band_dimension, "bands", None)
if not bands:
return None
available = []
for band in bands:
name = getattr(band, "name", None) or str(band)
available.append(name)
for candidate in preferred:
if candidate in available:
return candidate
return available[0] if available else None
def _coerce_float(value: Any) -> float | None:
if value is None:
return None
if isinstance(value, Decimal):
return float(value)
try:
return float(value)
except (TypeError, ValueError):
return None
def _normalize_date(value: date | str) -> str:
if isinstance(value, date):
return value.isoformat()
return str(value)
+116
View File
@@ -0,0 +1,116 @@
from __future__ import annotations
from typing import Any
from django.db.models import Avg, QuerySet
from .models import AnalysisGridObservation, RemoteSensingRun, SoilLocation
SATELLITE_METRIC_FIELDS = (
"ndvi",
"ndwi",
"lst_c",
"soil_vv_db",
"dem_m",
"slope_deg",
)
def build_location_satellite_snapshot(
location: SoilLocation,
*,
block_code: str = "",
) -> dict[str, Any]:
run = get_latest_completed_remote_sensing_run(location, block_code=block_code)
if run is None:
return {
"status": "missing",
"block_code": block_code,
"run_id": None,
"temporal_extent": None,
"cell_count": 0,
"resolved_metrics": {},
"metric_sources": {},
}
observations = get_run_observations(run)
summary = summarize_observations(observations)
return {
"status": "completed",
"block_code": run.block_code,
"run_id": run.id,
"temporal_extent": {
"start_date": run.temporal_start.isoformat() if run.temporal_start else None,
"end_date": run.temporal_end.isoformat() if run.temporal_end else None,
},
"cell_count": observations.count(),
"resolved_metrics": summary,
"metric_sources": {
metric_name: "remote_sensing"
for metric_name in summary
},
}
def build_location_block_satellite_snapshots(location: SoilLocation) -> list[dict[str, Any]]:
block_layout = location.block_layout or {}
blocks = block_layout.get("blocks") or []
if not blocks:
return [build_location_satellite_snapshot(location)]
snapshots = []
for block in blocks:
snapshots.append(
build_location_satellite_snapshot(
location,
block_code=str(block.get("block_code") or "").strip(),
)
)
return snapshots
def get_latest_completed_remote_sensing_run(
location: SoilLocation,
*,
block_code: str = "",
) -> RemoteSensingRun | None:
return (
RemoteSensingRun.objects.filter(
soil_location=location,
block_code=block_code or "",
status=RemoteSensingRun.STATUS_SUCCESS,
)
.order_by("-temporal_end", "-created_at", "-id")
.first()
)
def get_run_observations(run: RemoteSensingRun) -> QuerySet[AnalysisGridObservation]:
return (
AnalysisGridObservation.objects.select_related("cell", "run")
.filter(
cell__soil_location=run.soil_location,
cell__block_code=run.block_code or "",
temporal_start=run.temporal_start,
temporal_end=run.temporal_end,
)
.order_by("cell__cell_code")
)
def summarize_observations(
observations: QuerySet[AnalysisGridObservation],
) -> dict[str, float]:
aggregates = observations.aggregate(
**{
f"{metric_name}_mean": Avg(metric_name)
for metric_name in SATELLITE_METRIC_FIELDS
}
)
summary: dict[str, float] = {}
for metric_name in SATELLITE_METRIC_FIELDS:
value = aggregates.get(f"{metric_name}_mean")
if value is None:
continue
summary[metric_name] = round(float(value), 6)
return summary
+280 -36
View File
@@ -1,42 +1,49 @@
from rest_framework import serializers from rest_framework import serializers
from .models import SoilDepthData, SoilLocation from .data_driven_subdivision import SUPPORTED_CLUSTER_FEATURES
from .soil_adapters import DEPTHS from .models import (
AnalysisGridObservation,
BlockSubdivision,
RemoteSensingRun,
RemoteSensingClusterAssignment,
RemoteSensingSubdivisionResult,
SoilLocation,
)
from .satellite_snapshot import build_location_block_satellite_snapshots
class SoilDataRequestSerializer(serializers.Serializer): class SoilDataRequestSerializer(serializers.Serializer):
"""سریالایزر ورودی: lon و lat برای درخواست داده خاک.""" """ورودی ثبت مزرعه و بلوک‌های تعریف‌شده توسط کشاورز."""
class BlockInputSerializer(serializers.Serializer):
block_code = serializers.CharField(max_length=64)
boundary = serializers.JSONField()
order = serializers.IntegerField(required=False, min_value=1)
lon = serializers.DecimalField(max_digits=9, decimal_places=6, required=True) lon = serializers.DecimalField(max_digits=9, decimal_places=6, required=True)
lat = serializers.DecimalField(max_digits=9, decimal_places=6, required=True) lat = serializers.DecimalField(max_digits=9, decimal_places=6, required=True)
block_count = serializers.IntegerField(required=False, min_value=1, default=1)
block_code = serializers.CharField(required=False, default="block-1", max_length=64)
farm_boundary = serializers.JSONField(required=False)
blocks = BlockInputSerializer(many=True, required=False)
def validate(self, attrs):
class SoilDepthDataSerializer(serializers.ModelSerializer): blocks = attrs.get("blocks") or []
"""سریالایزر خروجی برای هر عمق خاک.""" if self.context.get("require_farm_boundary") and not attrs.get("farm_boundary"):
raise serializers.ValidationError(
class Meta: {"farm_boundary": ["مختصات گوشه‌های کل زمین باید ارسال شود."]}
model = SoilDepthData )
fields = [ if self.context.get("require_farm_boundary") and not blocks:
"depth_label", raise serializers.ValidationError(
"bdod", {"blocks": ["مختصات بلوک‌های تعریف‌شده توسط کشاورز باید ارسال شود."]}
"cec", )
"cfvo", if blocks:
"clay", attrs["block_count"] = len(blocks)
"nitrogen", return attrs
"ocd",
"ocs",
"phh2o",
"sand",
"silt",
"soc",
"wv0010",
"wv0033",
"wv1500",
]
class SoilLocationResponseSerializer(serializers.ModelSerializer): class SoilLocationResponseSerializer(serializers.ModelSerializer):
"""سریالایزر خروجی برای SoilLocation همراه با depths.""" """سریالایزر خروجی برای SoilLocation همراه با خلاصه سنجش‌ازدور."""
lon = serializers.DecimalField( lon = serializers.DecimalField(
source="longitude", source="longitude",
@@ -50,19 +57,51 @@ class SoilLocationResponseSerializer(serializers.ModelSerializer):
decimal_places=6, decimal_places=6,
read_only=True, read_only=True,
) )
depths = serializers.SerializerMethodField() input_block_count = serializers.IntegerField(read_only=True)
farm_boundary = serializers.JSONField(read_only=True)
block_layout = serializers.JSONField(read_only=True)
block_subdivisions = serializers.SerializerMethodField()
satellite_snapshots = serializers.SerializerMethodField()
class Meta: class Meta:
model = SoilLocation model = SoilLocation
fields = ["id", "lon", "lat", "depths"] fields = [
"id",
"lon",
"lat",
"input_block_count",
"farm_boundary",
"block_layout",
"block_subdivisions",
"satellite_snapshots",
]
def get_depths(self, obj): def get_block_subdivisions(self, obj):
depth_qs = obj.depths.all() subdivisions = obj.block_subdivisions.all().order_by("block_code", "id")
order = {d: i for i, d in enumerate(DEPTHS)} return BlockSubdivisionSerializer(subdivisions, many=True).data
sorted_depths = sorted(
depth_qs, key=lambda d: order.get(d.depth_label, 99) def get_satellite_snapshots(self, obj):
) return build_location_block_satellite_snapshots(obj)
return SoilDepthDataSerializer(sorted_depths, many=True).data
class BlockSubdivisionSerializer(serializers.ModelSerializer):
elbow_plot = serializers.ImageField(read_only=True)
class Meta:
model = BlockSubdivision
fields = [
"block_code",
"chunk_size_sqm",
"grid_points",
"centroid_points",
"grid_point_count",
"centroid_count",
"elbow_plot",
"status",
"metadata",
"created_at",
"updated_at",
]
class SoilDataTaskResponseSerializer(serializers.Serializer): class SoilDataTaskResponseSerializer(serializers.Serializer):
@@ -94,3 +133,208 @@ class NdviHealthResponseSerializer(serializers.Serializer):
observation_date = serializers.CharField(allow_null=True) observation_date = serializers.CharField(allow_null=True)
satellite_source = serializers.CharField(allow_null=True) satellite_source = serializers.CharField(allow_null=True)
healthData = NdviHealthDataItemSerializer(many=True) healthData = NdviHealthDataItemSerializer(many=True)
class RemoteSensingTriggerSerializer(serializers.Serializer):
lon = serializers.DecimalField(max_digits=9, decimal_places=6, required=True)
lat = serializers.DecimalField(max_digits=9, decimal_places=6, required=True)
block_code = serializers.CharField(required=False, allow_blank=True, default="", max_length=64)
start_date = serializers.DateField(required=True)
end_date = serializers.DateField(required=True)
force_refresh = serializers.BooleanField(required=False, default=False)
cluster_count = serializers.IntegerField(required=False, min_value=1, allow_null=True, default=None)
selected_features = serializers.ListField(
child=serializers.CharField(max_length=64),
required=False,
allow_empty=False,
)
def validate(self, attrs):
if attrs["start_date"] > attrs["end_date"]:
raise serializers.ValidationError("start_date نمی‌تواند بعد از end_date باشد.")
selected_features = attrs.get("selected_features") or []
invalid_features = sorted(
feature_name
for feature_name in selected_features
if feature_name not in SUPPORTED_CLUSTER_FEATURES
)
if invalid_features:
raise serializers.ValidationError(
{
"selected_features": [
"ویژگی‌های نامعتبر برای خوشه‌بندی: "
+ ", ".join(invalid_features)
]
}
)
return attrs
class RemoteSensingResultQuerySerializer(RemoteSensingTriggerSerializer):
page = serializers.IntegerField(required=False, min_value=1, default=1)
page_size = serializers.IntegerField(required=False, min_value=1, max_value=200, default=100)
class RemoteSensingCellObservationSerializer(serializers.ModelSerializer):
cell_code = serializers.CharField(source="cell.cell_code", read_only=True)
block_code = serializers.CharField(source="cell.block_code", read_only=True)
chunk_size_sqm = serializers.IntegerField(source="cell.chunk_size_sqm", read_only=True)
centroid_lat = serializers.DecimalField(source="cell.centroid_lat", max_digits=9, decimal_places=6, read_only=True)
centroid_lon = serializers.DecimalField(source="cell.centroid_lon", max_digits=9, decimal_places=6, read_only=True)
geometry = serializers.JSONField(source="cell.geometry", read_only=True)
class Meta:
model = AnalysisGridObservation
fields = [
"cell_code",
"block_code",
"chunk_size_sqm",
"centroid_lat",
"centroid_lon",
"geometry",
"temporal_start",
"temporal_end",
"ndvi",
"ndwi",
"lst_c",
"soil_vv",
"soil_vv_db",
"dem_m",
"slope_deg",
"metadata",
]
class RemoteSensingSummarySerializer(serializers.Serializer):
cell_count = serializers.IntegerField()
ndvi_mean = serializers.FloatField(allow_null=True)
ndwi_mean = serializers.FloatField(allow_null=True)
lst_c_mean = serializers.FloatField(allow_null=True)
soil_vv_db_mean = serializers.FloatField(allow_null=True)
dem_m_mean = serializers.FloatField(allow_null=True)
slope_deg_mean = serializers.FloatField(allow_null=True)
class RemoteSensingRunSerializer(serializers.ModelSerializer):
status_label = serializers.SerializerMethodField()
pipeline_status = serializers.SerializerMethodField()
stage = serializers.SerializerMethodField()
selected_features = serializers.SerializerMethodField()
requested_cluster_count = serializers.SerializerMethodField()
def get_status_label(self, obj):
return obj.normalized_status
def get_pipeline_status(self, obj):
return obj.normalized_status
def get_stage(self, obj):
return (obj.metadata or {}).get("stage")
def get_selected_features(self, obj):
return (obj.metadata or {}).get("selected_features", [])
def get_requested_cluster_count(self, obj):
return (obj.metadata or {}).get("requested_cluster_count")
class Meta:
model = RemoteSensingRun
fields = [
"id",
"block_code",
"chunk_size_sqm",
"temporal_start",
"temporal_end",
"status",
"status_label",
"pipeline_status",
"stage",
"selected_features",
"requested_cluster_count",
"metadata",
"error_message",
"started_at",
"finished_at",
"created_at",
"updated_at",
]
class RemoteSensingClusterAssignmentSerializer(serializers.ModelSerializer):
cell_code = serializers.CharField(source="cell.cell_code", read_only=True)
centroid_lat = serializers.DecimalField(source="cell.centroid_lat", max_digits=9, decimal_places=6, read_only=True)
centroid_lon = serializers.DecimalField(source="cell.centroid_lon", max_digits=9, decimal_places=6, read_only=True)
class Meta:
model = RemoteSensingClusterAssignment
fields = [
"cell_code",
"cluster_label",
"centroid_lat",
"centroid_lon",
"raw_feature_values",
"scaled_feature_values",
]
class RemoteSensingSubdivisionResultSerializer(serializers.ModelSerializer):
assignments = serializers.SerializerMethodField()
def get_assignments(self, obj):
assignments = self.context.get("paginated_assignments")
if assignments is None:
assignments = obj.assignments.all().order_by("cluster_label", "cell__cell_code")
return RemoteSensingClusterAssignmentSerializer(assignments, many=True).data
class Meta:
model = RemoteSensingSubdivisionResult
fields = [
"id",
"block_code",
"chunk_size_sqm",
"temporal_start",
"temporal_end",
"cluster_count",
"selected_features",
"skipped_cell_codes",
"metadata",
"assignments",
"created_at",
"updated_at",
]
class RemoteSensingResponseSerializer(serializers.Serializer):
status = serializers.CharField()
source = serializers.CharField()
location = SoilLocationResponseSerializer()
block_code = serializers.CharField(allow_blank=True)
chunk_size_sqm = serializers.IntegerField(allow_null=True)
temporal_extent = serializers.JSONField()
summary = RemoteSensingSummarySerializer()
cells = RemoteSensingCellObservationSerializer(many=True)
run = RemoteSensingRunSerializer(allow_null=True)
subdivision_result = RemoteSensingSubdivisionResultSerializer(allow_null=True)
pagination = serializers.JSONField(required=False)
class RemoteSensingRunStatusResponseSerializer(serializers.Serializer):
status = serializers.CharField()
source = serializers.CharField()
run = RemoteSensingRunSerializer()
task_id = serializers.CharField(allow_blank=True, allow_null=True, required=False)
class RemoteSensingRunResultResponseSerializer(serializers.Serializer):
status = serializers.CharField()
source = serializers.CharField()
location = SoilLocationResponseSerializer()
block_code = serializers.CharField(allow_blank=True)
chunk_size_sqm = serializers.IntegerField(allow_null=True)
temporal_extent = serializers.JSONField()
summary = RemoteSensingSummarySerializer()
cells = RemoteSensingCellObservationSerializer(many=True)
run = RemoteSensingRunSerializer()
subdivision_result = RemoteSensingSubdivisionResultSerializer(allow_null=True)
pagination = serializers.JSONField(required=False)
-286
View File
@@ -1,286 +0,0 @@
from __future__ import annotations
import hashlib
import math
import random
import time
from abc import ABC, abstractmethod
try:
import requests
except ImportError: # pragma: no cover - handled when live adapter is used
requests = None
SOILGRIDS_BASE = "https://rest.isric.org/soilgrids/v2.0/properties/query"
PROPERTIES = [
"bdod",
"cec",
"cfvo",
"clay",
"nitrogen",
"ocd",
"ocs",
"phh2o",
"sand",
"silt",
"soc",
"wv0010",
"wv0033",
"wv1500",
]
VALUES = ["Q0.5", "Q0.05", "Q0.95", "mean", "uncertainty"]
DEPTHS = ["0-5cm", "5-15cm", "15-30cm"]
DEPTH_INDEX = {depth: index for index, depth in enumerate(DEPTHS)}
def _clamp(value: float, lower: float, upper: float) -> float:
return max(lower, min(upper, value))
def _round_field(name: str, value: float) -> float:
if name in {"nitrogen", "soc", "ocs", "wv0010", "wv0033", "wv1500"}:
return round(value, 3)
return round(value, 2)
class BaseSoilDataAdapter(ABC):
source_name = "base"
@abstractmethod
def fetch_depth_fields(self, lon: float, lat: float, depth: str) -> dict:
"""Return normalized field values for a single soil depth."""
class SoilGridsAdapter(BaseSoilDataAdapter):
source_name = "soilgrids"
def __init__(self, base_url: str = SOILGRIDS_BASE, timeout: float = 60):
self.base_url = base_url
self.timeout = timeout
def fetch_depth_fields(self, lon: float, lat: float, depth: str) -> dict:
if requests is None:
raise RuntimeError("requests package is required for SoilGridsAdapter")
params = {
"lon": lon,
"lat": lat,
"depth": depth,
}
for prop in PROPERTIES:
params.setdefault("property", []).append(prop)
for value in VALUES:
params.setdefault("value", []).append(value)
response = requests.get(
self.base_url,
params=params,
headers={"accept": "application/json"},
timeout=self.timeout,
)
response.raise_for_status()
return self._parse_response_to_fields(response.json())
def _parse_response_to_fields(self, data: dict) -> dict:
fields = {prop: None for prop in PROPERTIES}
layers = data.get("properties", {}).get("layers", [])
for layer in layers:
name = layer.get("name")
if name not in fields:
continue
depths_list = layer.get("depths", [])
if not depths_list:
continue
values = depths_list[0].get("values", {})
mean_value = values.get("mean")
if mean_value is not None:
fields[name] = float(mean_value)
return fields
class MockSoilDataAdapter(BaseSoilDataAdapter):
source_name = "mock"
def __init__(
self,
delay_seconds: float = 0.8,
seed_namespace: str = "croplogic-soil",
):
self.delay_seconds = max(0.0, delay_seconds)
self.seed_namespace = seed_namespace
def fetch_depth_fields(self, lon: float, lat: float, depth: str) -> dict:
if depth not in DEPTH_INDEX:
raise ValueError(f"Unsupported soil depth: {depth}")
if self.delay_seconds:
time.sleep(self.delay_seconds)
depth_index = DEPTH_INDEX[depth]
texture_score = self._layered_noise(lon, lat, "texture")
organic_score = self._layered_noise(lon, lat, "organic")
moisture_score = self._layered_noise(lon, lat, "moisture")
mineral_score = self._layered_noise(lon, lat, "mineral")
stone_score = self._layered_noise(lon, lat, "stone")
ph_score = self._layered_noise(lon, lat, "ph")
sand, clay, silt = self._build_texture(
texture_score=texture_score,
organic_score=organic_score,
depth_index=depth_index,
)
soc = _clamp(
0.7
+ (organic_score * 1.9)
+ (clay * 0.012)
- (depth_index * 0.28)
+ ((1 - moisture_score) * 0.08),
0.45,
4.2,
)
nitrogen = _clamp(
0.04
+ (soc * 0.085)
+ ((1 - (sand / 100.0)) * 0.025)
+ ((2 - depth_index) * 0.008),
0.03,
0.42,
)
ocd = _clamp(
10.0 + (soc * 8.5) + (organic_score * 4.0) - (depth_index * 2.6),
7.0,
46.0,
)
ocs = _clamp(
1.0 + (soc * 1.55) - (depth_index * 0.28) + (organic_score * 0.12),
0.5,
8.5,
)
cec = _clamp(
7.0
+ (clay * 0.33)
+ (soc * 1.7)
+ ((1 - (sand / 100.0)) * 2.6)
+ (mineral_score * 1.4),
5.0,
38.0,
)
cfvo = _clamp(1.0 + (stone_score * 12.0) + (depth_index * 2.4), 0.0, 35.0)
bdod = _clamp(
1.06
+ (sand * 0.0038)
+ (depth_index * 0.06)
- (soc * 0.035)
+ (stone_score * 0.03),
0.95,
1.62,
)
phh2o = _clamp(
6.2
+ ((ph_score - 0.5) * 1.1)
+ (depth_index * 0.08)
- (organic_score * 0.12),
5.6,
8.1,
)
wv1500 = _clamp(
0.05
+ (clay * 0.0016)
+ (soc * 0.012)
- (sand * 0.0003)
+ (depth_index * 0.004),
0.05,
0.22,
)
wv0033 = _clamp(
wv1500 + 0.07 + (clay * 0.0015) + (soc * 0.01) - (sand * 0.0002),
wv1500 + 0.04,
0.38,
)
wv0010 = _clamp(
wv0033 + 0.03 + (soc * 0.006) + (moisture_score * 0.01),
wv0033 + 0.015,
0.48,
)
fields = {
"bdod": bdod,
"cec": cec,
"cfvo": cfvo,
"clay": clay,
"nitrogen": nitrogen,
"ocd": ocd,
"ocs": ocs,
"phh2o": phh2o,
"sand": sand,
"silt": silt,
"soc": soc,
"wv0010": wv0010,
"wv0033": wv0033,
"wv1500": wv1500,
}
return {name: _round_field(name, value) for name, value in fields.items()}
def _build_texture(
self,
texture_score: float,
organic_score: float,
depth_index: int,
) -> tuple[float, float, float]:
sand = _clamp(
30.0
+ (texture_score * 28.0)
+ ((organic_score - 0.5) * 3.5)
- (depth_index * 2.5),
18.0,
72.0,
)
clay = _clamp(
13.0
+ ((1 - texture_score) * 18.0)
+ (depth_index * 5.5)
+ ((organic_score - 0.5) * 2.0),
8.0,
42.0,
)
minimum_silt = 12.0
total = sand + clay
if total > 100.0 - minimum_silt:
excess = total - (100.0 - minimum_silt)
sand -= excess * 0.65
clay -= excess * 0.35
silt = 100.0 - sand - clay
return sand, clay, silt
def _layered_noise(self, lon: float, lat: float, key: str) -> float:
regional = self._smooth_noise(lon, lat, f"{key}:regional", scale=1.7)
local = self._smooth_noise(lon, lat, f"{key}:local", scale=0.32)
micro = self._smooth_noise(lon, lat, f"{key}:micro", scale=0.08)
return _clamp((regional * 0.55) + (local * 0.3) + (micro * 0.15), 0.0, 1.0)
def _smooth_noise(self, lon: float, lat: float, key: str, scale: float) -> float:
grid_x = lon / scale
grid_y = lat / scale
x0 = math.floor(grid_x)
y0 = math.floor(grid_y)
tx = grid_x - x0
ty = grid_y - y0
v00 = self._cell_noise(key, x0, y0)
v10 = self._cell_noise(key, x0 + 1, y0)
v01 = self._cell_noise(key, x0, y0 + 1)
v11 = self._cell_noise(key, x0 + 1, y0 + 1)
tx = tx * tx * (3.0 - (2.0 * tx))
ty = ty * ty * (3.0 - (2.0 * ty))
top = (v00 * (1 - tx)) + (v10 * tx)
bottom = (v01 * (1 - tx)) + (v11 * tx)
return (top * (1 - ty)) + (bottom * ty)
def _cell_noise(self, key: str, grid_x: int, grid_y: int) -> float:
seed_input = f"{self.seed_namespace}:{key}:{grid_x}:{grid_y}"
digest = hashlib.sha256(seed_input.encode("ascii")).digest()
seed = int.from_bytes(digest[:8], "big", signed=False)
return random.Random(seed).random()
+584 -66
View File
@@ -1,15 +1,36 @@
""" """
تسکهای Celery برای واکشی دادههای خاک. تسکهای Celery برای pipeline سنجشازدور و subdivision دادهمحور.
""" """
from decimal import Decimal import logging
from typing import Any
from config.celery import app from config.celery import app
from django.apps import apps from django.conf import settings
from django.db import transaction from django.db import transaction
from django.utils import timezone
from django.utils.dateparse import parse_date
from .models import SoilDepthData, SoilLocation from .data_driven_subdivision import (
from .soil_adapters import DEPTHS DEFAULT_CLUSTER_FEATURES,
DataDrivenSubdivisionError,
create_remote_sensing_subdivision_result,
)
from .grid_analysis import create_or_get_analysis_grid_cells
from .models import (
AnalysisGridCell,
AnalysisGridObservation,
BlockSubdivision,
RemoteSensingRun,
RemoteSensingSubdivisionResult,
SoilLocation,
)
from .openeo_service import (
OpenEOAuthenticationError,
OpenEOExecutionError,
OpenEOServiceError,
compute_remote_sensing_metrics,
)
try: try:
import requests import requests
@@ -19,79 +40,576 @@ else:
RequestException = requests.RequestException RequestException = requests.RequestException
def fetch_soil_data_for_coordinates( logger = logging.getLogger(__name__)
latitude: float,
longitude: float,
def run_remote_sensing_analysis(
*,
soil_location_id: int,
block_code: str = "",
temporal_start: Any,
temporal_end: Any,
force_refresh: bool = False,
task_id: str = "", task_id: str = "",
progress_callback=None, run_id: int | None = None,
): cluster_count: int | None = None,
selected_features: list[str] | None = None,
) -> dict[str, Any]:
""" """
واکشی سنکرون داده خاک برای مختصات دادهشده و ذخیره در DB. اجرای سنکرون تحلیل سنجشازدور برای یک location/block.
این helper هم توسط Celery task و هم توسط endpointهای sync استفاده میشود. این helper برای Celery task و هر orchestration داخلی دیگر قابل استفاده است.
""" """
lat = Decimal(str(round(float(latitude), 6))) start_date = _normalize_temporal_date(temporal_start, "temporal_start")
lon = Decimal(str(round(float(longitude), 6))) end_date = _normalize_temporal_date(temporal_end, "temporal_end")
adapter = apps.get_app_config("location_data").get_soil_data_adapter() if start_date > end_date:
raise ValueError("temporal_start نمی‌تواند بعد از temporal_end باشد.")
with transaction.atomic(): location = SoilLocation.objects.filter(pk=soil_location_id).first()
location, created = SoilLocation.objects.select_for_update().get_or_create( if location is None:
latitude=lat, raise ValueError(f"SoilLocation با id={soil_location_id} پیدا نشد.")
longitude=lon,
defaults={"task_id": task_id}, resolved_block_code = str(block_code or "").strip()
subdivision = _resolve_block_subdivision(location, resolved_block_code)
run = _get_or_create_remote_sensing_run(
run_id=run_id,
location=location,
subdivision=subdivision,
block_code=resolved_block_code,
temporal_start=start_date,
temporal_end=end_date,
task_id=task_id,
cluster_count=cluster_count,
selected_features=selected_features or list(DEFAULT_CLUSTER_FEATURES),
) )
if not created and task_id: _mark_run_running(run)
location.task_id = task_id
location.save(update_fields=["task_id"])
for index, depth in enumerate(DEPTHS): try:
if progress_callback is not None: _record_run_stage(
progress_callback( run,
state="PROGRESS", "preparing_analysis_grid",
meta={ {
"current": index + 1, "block_code": resolved_block_code,
"total": len(DEPTHS), "temporal_extent": {
"message": f"در حال واکشی عمق {depth}...", "start_date": start_date.isoformat(),
"end_date": end_date.isoformat(),
},
}, },
) )
fields = adapter.fetch_depth_fields(float(lon), float(lat), depth) grid_summary = create_or_get_analysis_grid_cells(
with transaction.atomic(): location,
SoilDepthData.objects.update_or_create( block_code=resolved_block_code,
soil_location=location, block_subdivision=subdivision,
depth_label=depth, )
defaults=fields, _record_run_stage(run, "analysis_grid_ready", {"grid_summary": grid_summary})
all_cells = _load_grid_cells(location, resolved_block_code)
cells_to_process = _select_cells_for_processing(
all_cells=all_cells,
temporal_start=start_date,
temporal_end=end_date,
force_refresh=force_refresh,
)
_record_run_stage(
run,
"analysis_cells_selected",
{
"cell_selection": {
"total_cell_count": len(all_cells),
"cell_count_to_process": len(cells_to_process),
"existing_cell_count": len(all_cells) - len(cells_to_process),
"force_refresh": force_refresh,
}
},
) )
if task_id: if not cells_to_process:
with transaction.atomic(): _record_run_stage(
location.task_id = "" run,
location.save(update_fields=["task_id"]) "using_cached_observations",
{"source": "database"},
return { )
observations = _load_observations(
location=location,
block_code=resolved_block_code,
temporal_start=start_date,
temporal_end=end_date,
)
subdivision_result = _ensure_subdivision_result(
location=location,
run=run,
subdivision=subdivision,
block_code=resolved_block_code,
observations=observations,
cluster_count=cluster_count,
selected_features=selected_features,
)
_record_run_stage(
run,
"clustering_completed",
_build_clustering_stage_metadata(subdivision_result),
)
summary = {
"status": "completed", "status": "completed",
"location_id": location.id, "source": "database",
"depths": DEPTHS, "run_id": run.id,
"processed_cell_count": 0,
"created_observation_count": 0,
"updated_observation_count": 0,
"existing_observation_count": len(all_cells),
"failed_metric_count": 0,
"chunk_size_sqm": grid_summary["chunk_size_sqm"],
"block_code": resolved_block_code,
"cell_count": len(all_cells),
"subdivision_result_id": getattr(subdivision_result, "id", None),
"cluster_count": getattr(subdivision_result, "cluster_count", 0),
} }
_mark_run_success(run, summary)
return summary
_record_run_stage(
@app.task(bind=True) run,
def fetch_soil_data_task(self, latitude: float, longitude: float): "fetching_remote_metrics",
""" {"requested_cell_count": len(cells_to_process)},
واکشی دادههای خاک برای مختصات دادهشده و ذخیره در DB.
برای هر عمق (0-5cm, 5-15cm, 15-30cm) یک ریکوئست/شبیهسازی جدا انجام میشود.
"""
try:
return fetch_soil_data_for_coordinates(
latitude=latitude,
longitude=longitude,
task_id=self.request.id,
progress_callback=self.update_state,
) )
except RequestException as exc: remote_payload = compute_remote_sensing_metrics(
lat = Decimal(str(round(float(latitude), 6))) cells_to_process,
lon = Decimal(str(round(float(longitude), 6))) temporal_start=start_date,
location = SoilLocation.objects.filter(latitude=lat, longitude=lon).first() temporal_end=end_date,
return { )
"status": "error", _record_run_stage(
"location_id": getattr(location, "id", None), run,
"error": str(exc), "remote_metrics_fetched",
{
"failed_metric_count": len(remote_payload["metadata"].get("failed_metrics", [])),
"service_metadata": remote_payload["metadata"],
},
)
upsert_summary = _upsert_grid_observations(
cells=cells_to_process,
run=run,
temporal_start=start_date,
temporal_end=end_date,
metric_payload=remote_payload,
)
_record_run_stage(run, "observations_persisted", upsert_summary)
observations = _load_observations(
location=location,
block_code=resolved_block_code,
temporal_start=start_date,
temporal_end=end_date,
)
subdivision_result = _ensure_subdivision_result(
location=location,
run=run,
subdivision=subdivision,
block_code=resolved_block_code,
observations=observations,
cluster_count=cluster_count,
selected_features=selected_features,
)
_record_run_stage(
run,
"clustering_completed",
_build_clustering_stage_metadata(subdivision_result),
)
summary = {
"status": "completed",
"source": "openeo",
"run_id": run.id,
"processed_cell_count": len(cells_to_process),
"created_observation_count": upsert_summary["created_count"],
"updated_observation_count": upsert_summary["updated_count"],
"existing_observation_count": len(all_cells) - len(cells_to_process),
"failed_metric_count": len(remote_payload["metadata"].get("failed_metrics", [])),
"chunk_size_sqm": grid_summary["chunk_size_sqm"],
"block_code": resolved_block_code,
"cell_count": len(all_cells),
"subdivision_result_id": subdivision_result.id,
"cluster_count": subdivision_result.cluster_count,
}
_mark_run_success(run, summary, remote_payload["metadata"])
logger.info(
"Remote sensing analysis completed",
extra={
"run_id": run.id,
"soil_location_id": location.id,
"block_code": resolved_block_code,
"processed_cell_count": summary["processed_cell_count"],
},
)
return summary
except Exception as exc:
_mark_run_failure(run, str(exc))
raise
@app.task(bind=True, max_retries=3, default_retry_delay=60)
def run_remote_sensing_analysis_task(
self,
soil_location_id: int,
block_code: str = "",
temporal_start: Any = "",
temporal_end: Any = "",
force_refresh: bool = False,
run_id: int | None = None,
cluster_count: int | None = None,
selected_features: list[str] | None = None,
):
"""
اجرای async تحلیل سنجشازدور برای location/block و ذخیره نتایج در DB.
"""
logger.info(
"Starting remote sensing analysis task",
extra={
"task_id": self.request.id,
"soil_location_id": soil_location_id,
"block_code": block_code,
"temporal_start": temporal_start,
"temporal_end": temporal_end,
"force_refresh": force_refresh,
},
)
try:
return run_remote_sensing_analysis(
soil_location_id=soil_location_id,
block_code=block_code,
temporal_start=temporal_start,
temporal_end=temporal_end,
force_refresh=force_refresh,
task_id=self.request.id,
run_id=run_id,
cluster_count=cluster_count,
selected_features=selected_features,
)
except OpenEOAuthenticationError:
logger.exception(
"Remote sensing auth failure",
extra={"task_id": self.request.id, "soil_location_id": soil_location_id},
)
raise
except (OpenEOExecutionError, OpenEOServiceError, RequestException, DataDrivenSubdivisionError) as exc:
logger.warning(
"Transient remote sensing failure, retrying task",
extra={
"task_id": self.request.id,
"soil_location_id": soil_location_id,
"block_code": block_code,
"retry_count": self.request.retries,
"error": str(exc),
},
)
raise self.retry(exc=exc)
def _normalize_temporal_date(value: Any, field_name: str):
if hasattr(value, "isoformat") and not isinstance(value, str):
return value
parsed = parse_date(str(value))
if parsed is None:
raise ValueError(f"{field_name} نامعتبر است.")
return parsed
def _resolve_block_subdivision(location: SoilLocation, block_code: str) -> BlockSubdivision | None:
if not block_code:
return None
return (
BlockSubdivision.objects.filter(
soil_location=location,
block_code=block_code,
)
.order_by("-updated_at", "-id")
.first()
)
def _get_or_create_remote_sensing_run(
*,
run_id: int | None,
location: SoilLocation,
subdivision: BlockSubdivision | None,
block_code: str,
temporal_start,
temporal_end,
task_id: str,
cluster_count: int | None,
selected_features: list[str],
) -> RemoteSensingRun:
queued_at = timezone.now().isoformat()
if run_id is not None:
run = RemoteSensingRun.objects.filter(pk=run_id, soil_location=location).first()
if run is not None:
metadata = dict(run.metadata or {})
if task_id:
metadata["task_id"] = task_id
metadata.setdefault("status_label", "pending")
metadata["stage"] = "queued"
metadata["selected_features"] = selected_features
metadata["requested_cluster_count"] = cluster_count
metadata["pipeline"] = {
"name": "remote_sensing_subdivision",
"version": 2,
}
metadata["timestamps"] = {
**dict(metadata.get("timestamps") or {}),
"queued_at": queued_at,
}
run.block_subdivision = subdivision
run.block_code = block_code
run.chunk_size_sqm = int(getattr(settings, "SUBDIVISION_CHUNK_SQM", 900) or 900)
run.temporal_start = temporal_start
run.temporal_end = temporal_end
run.metadata = metadata
run.save(
update_fields=[
"block_subdivision",
"block_code",
"chunk_size_sqm",
"temporal_start",
"temporal_end",
"metadata",
"updated_at",
]
)
return run
metadata = {
"status_label": "pending",
"stage": "queued",
"selected_features": selected_features,
"requested_cluster_count": cluster_count,
"pipeline": {
"name": "remote_sensing_subdivision",
"version": 2,
},
"timestamps": {"queued_at": queued_at},
}
if task_id:
metadata["task_id"] = task_id
return RemoteSensingRun.objects.create(
soil_location=location,
block_subdivision=subdivision,
block_code=block_code,
chunk_size_sqm=int(getattr(settings, "SUBDIVISION_CHUNK_SQM", 900) or 900),
temporal_start=temporal_start,
temporal_end=temporal_end,
status=RemoteSensingRun.STATUS_PENDING,
metadata=metadata,
)
def _mark_run_running(run: RemoteSensingRun) -> None:
metadata = dict(run.metadata or {})
metadata["status_label"] = "running"
metadata["stage"] = "running"
metadata["timestamps"] = {
**dict(metadata.get("timestamps") or {}),
"started_at": timezone.now().isoformat(),
}
run.status = RemoteSensingRun.STATUS_RUNNING
run.started_at = timezone.now()
run.metadata = metadata
run.save(update_fields=["status", "started_at", "metadata", "updated_at"])
def _mark_run_success(
run: RemoteSensingRun,
summary: dict[str, Any],
service_metadata: dict[str, Any] | None = None,
) -> None:
metadata = dict(run.metadata or {})
metadata["summary"] = summary
metadata["status_label"] = "completed"
metadata["stage"] = "completed"
metadata["timestamps"] = {
**dict(metadata.get("timestamps") or {}),
"completed_at": timezone.now().isoformat(),
}
if service_metadata:
metadata["service"] = service_metadata
run.status = RemoteSensingRun.STATUS_SUCCESS
run.finished_at = timezone.now()
run.error_message = ""
run.metadata = metadata
run.save(
update_fields=[
"status",
"finished_at",
"error_message",
"metadata",
"updated_at",
]
)
def _mark_run_failure(run: RemoteSensingRun, error_message: str) -> None:
metadata = dict(run.metadata or {})
metadata["status_label"] = "failed"
metadata["failure_reason"] = error_message[:4000]
metadata["timestamps"] = {
**dict(metadata.get("timestamps") or {}),
"failed_at": timezone.now().isoformat(),
}
run.status = RemoteSensingRun.STATUS_FAILURE
run.finished_at = timezone.now()
run.error_message = error_message[:4000]
run.metadata = metadata
run.save(
update_fields=[
"status",
"finished_at",
"error_message",
"metadata",
"updated_at",
]
)
logger.exception(
"Remote sensing analysis failed",
extra={"run_id": run.id, "soil_location_id": run.soil_location_id, "block_code": run.block_code},
)
def _load_grid_cells(location: SoilLocation, block_code: str) -> list[AnalysisGridCell]:
queryset = AnalysisGridCell.objects.filter(soil_location=location)
queryset = queryset.filter(block_code=block_code or "")
return list(queryset.order_by("cell_code"))
def _load_observations(
*,
location: SoilLocation,
block_code: str,
temporal_start,
temporal_end,
) -> list[AnalysisGridObservation]:
queryset = (
AnalysisGridObservation.objects.select_related("cell", "run")
.filter(
cell__soil_location=location,
cell__block_code=block_code or "",
temporal_start=temporal_start,
temporal_end=temporal_end,
)
.order_by("cell__cell_code")
)
return list(queryset)
def _select_cells_for_processing(
*,
all_cells: list[AnalysisGridCell],
temporal_start,
temporal_end,
force_refresh: bool,
) -> list[AnalysisGridCell]:
if force_refresh:
return all_cells
existing_ids = set(
AnalysisGridObservation.objects.filter(
cell__in=all_cells,
temporal_start=temporal_start,
temporal_end=temporal_end,
).values_list("cell_id", flat=True)
)
return [cell for cell in all_cells if cell.id not in existing_ids]
def _upsert_grid_observations(
*,
cells: list[AnalysisGridCell],
run: RemoteSensingRun,
temporal_start,
temporal_end,
metric_payload: dict[str, Any],
) -> dict[str, int]:
metadata_template = {
"backend_name": metric_payload["metadata"].get("backend"),
"backend_url": metric_payload["metadata"].get("backend_url"),
"collections_used": metric_payload["metadata"].get("collections_used", []),
"slope_supported": metric_payload["metadata"].get("slope_supported", False),
"job_refs": metric_payload["metadata"].get("job_refs", {}),
"failed_metrics": metric_payload["metadata"].get("failed_metrics", []),
"run_id": run.id,
}
result_by_cell = metric_payload.get("results", {})
created_count = 0
updated_count = 0
with transaction.atomic():
for cell in cells:
values = result_by_cell.get(cell.cell_code, {})
defaults = {
"run": run,
"ndvi": values.get("ndvi"),
"ndwi": values.get("ndwi"),
"lst_c": values.get("lst_c"),
"soil_vv": values.get("soil_vv"),
"soil_vv_db": values.get("soil_vv_db"),
"dem_m": values.get("dem_m"),
"slope_deg": values.get("slope_deg"),
"metadata": metadata_template,
}
observation, created = AnalysisGridObservation.objects.update_or_create(
cell=cell,
temporal_start=temporal_start,
temporal_end=temporal_end,
defaults=defaults,
)
if created:
created_count += 1
else:
updated_count += 1
return {"created_count": created_count, "updated_count": updated_count}
def _ensure_subdivision_result(
*,
location: SoilLocation,
run: RemoteSensingRun,
subdivision: BlockSubdivision | None,
block_code: str,
observations: list[AnalysisGridObservation],
cluster_count: int | None,
selected_features: list[str] | None,
) -> RemoteSensingSubdivisionResult:
if not observations:
raise DataDrivenSubdivisionError("هیچ observation برای ساخت subdivision داده‌محور پیدا نشد.")
result = create_remote_sensing_subdivision_result(
location=location,
run=run,
observations=observations,
block_subdivision=subdivision,
block_code=block_code,
selected_features=selected_features or list(DEFAULT_CLUSTER_FEATURES),
explicit_k=cluster_count,
)
return result
def _record_run_stage(run: RemoteSensingRun, stage: str, details: dict[str, Any] | None = None) -> None:
metadata = dict(run.metadata or {})
metadata["stage"] = stage
metadata["stage_details"] = {
**dict(metadata.get("stage_details") or {}),
stage: details or {},
}
metadata["timestamps"] = {
**dict(metadata.get("timestamps") or {}),
f"{stage}_at": timezone.now().isoformat(),
}
run.metadata = metadata
run.save(update_fields=["metadata", "updated_at"])
def _build_clustering_stage_metadata(
result: RemoteSensingSubdivisionResult,
) -> dict[str, Any]:
metadata = dict(result.metadata or {})
return {
"subdivision_result_id": result.id,
"cluster_count": result.cluster_count,
"selected_features": result.selected_features,
"used_cell_count": metadata.get("used_cell_count", 0),
"skipped_cell_count": metadata.get("skipped_cell_count", 0),
"skipped_cell_codes": result.skipped_cell_codes,
"kmeans_params": metadata.get("kmeans_params", {}),
} }
+44
View File
@@ -0,0 +1,44 @@
from django.test import SimpleTestCase, override_settings
from location_data.block_subdivision import (
build_block_subdivision_payload,
detect_elbow_point,
)
@override_settings(SUBDIVISION_CHUNK_SQM=900)
class BlockSubdivisionServiceTests(SimpleTestCase):
def test_detect_elbow_point_from_sse_curve(self):
inertia_curve = [
{"k": 1, "sse": 1000.0},
{"k": 2, "sse": 400.0},
{"k": 3, "sse": 220.0},
{"k": 4, "sse": 180.0},
]
optimal_k = detect_elbow_point(inertia_curve)
self.assertEqual(optimal_k, 2)
def test_build_block_subdivision_payload_returns_grid_and_centroids(self):
boundary = {
"type": "Polygon",
"coordinates": [
[
[51.3890, 35.6890],
[51.3902, 35.6890],
[51.3902, 35.6900],
[51.3890, 35.6900],
[51.3890, 35.6890],
]
],
}
result = build_block_subdivision_payload(boundary, block_code="block-1")
self.assertEqual(result["block_code"], "block-1")
self.assertEqual(result["chunk_size_sqm"], 900)
self.assertGreater(result["grid_point_count"], 0)
self.assertGreater(result["centroid_count"], 0)
self.assertIn("optimal_k", result["metadata"])
self.assertTrue(result["metadata"]["inertia_curve"])
@@ -0,0 +1,135 @@
from datetime import date
from django.test import TestCase
from location_data.data_driven_subdivision import sync_block_subdivision_with_result
from location_data.models import (
AnalysisGridCell,
AnalysisGridObservation,
BlockSubdivision,
RemoteSensingRun,
RemoteSensingSubdivisionResult,
SoilLocation,
)
class DataDrivenSubdivisionSyncTests(TestCase):
def setUp(self):
self.boundary = {
"type": "Polygon",
"coordinates": [
[
[51.3890, 35.6890],
[51.3900, 35.6890],
[51.3900, 35.6900],
[51.3890, 35.6900],
[51.3890, 35.6890],
]
],
}
self.location = SoilLocation.objects.create(
latitude="35.689200",
longitude="51.389000",
farm_boundary=self.boundary,
)
self.subdivision = BlockSubdivision.objects.create(
soil_location=self.location,
block_code="block-1",
source_boundary=self.boundary,
chunk_size_sqm=900,
status="defined",
)
self.run = RemoteSensingRun.objects.create(
soil_location=self.location,
block_subdivision=self.subdivision,
block_code="block-1",
chunk_size_sqm=900,
temporal_start=date(2025, 1, 1),
temporal_end=date(2025, 1, 31),
status=RemoteSensingRun.STATUS_SUCCESS,
)
def test_sync_block_subdivision_with_result_updates_saved_sub_blocks(self):
cell_1 = AnalysisGridCell.objects.create(
soil_location=self.location,
block_subdivision=self.subdivision,
block_code="block-1",
cell_code="cell-1",
chunk_size_sqm=900,
geometry=self.boundary,
centroid_lat="35.689200",
centroid_lon="51.389200",
)
cell_2 = AnalysisGridCell.objects.create(
soil_location=self.location,
block_subdivision=self.subdivision,
block_code="block-1",
cell_code="cell-2",
chunk_size_sqm=900,
geometry=self.boundary,
centroid_lat="35.689700",
centroid_lon="51.389700",
)
observation_1 = AnalysisGridObservation.objects.create(
cell=cell_1,
run=self.run,
temporal_start=date(2025, 1, 1),
temporal_end=date(2025, 1, 31),
ndvi=0.5,
)
observation_2 = AnalysisGridObservation.objects.create(
cell=cell_2,
run=self.run,
temporal_start=date(2025, 1, 1),
temporal_end=date(2025, 1, 31),
ndvi=0.7,
)
result = RemoteSensingSubdivisionResult.objects.create(
soil_location=self.location,
run=self.run,
block_subdivision=self.subdivision,
block_code="block-1",
chunk_size_sqm=900,
temporal_start=date(2025, 1, 1),
temporal_end=date(2025, 1, 31),
cluster_count=2,
selected_features=["ndvi"],
metadata={
"used_cell_count": 2,
"skipped_cell_count": 0,
"inertia_curve": [{"k": 1, "sse": 1.0}, {"k": 2, "sse": 0.1}],
},
)
sync_block_subdivision_with_result(
block_subdivision=self.subdivision,
result=result,
observations=[observation_1, observation_2],
cluster_summaries=[
{
"cluster_label": 0,
"centroid_lat": 35.6892,
"centroid_lon": 51.3892,
"cell_count": 1,
"cell_codes": ["cell-1"],
},
{
"cluster_label": 1,
"centroid_lat": 35.6897,
"centroid_lon": 51.3897,
"cell_count": 1,
"cell_codes": ["cell-2"],
},
],
)
self.subdivision.refresh_from_db()
self.assertEqual(self.subdivision.status, "subdivided")
self.assertEqual(self.subdivision.grid_point_count, 2)
self.assertEqual(self.subdivision.centroid_count, 2)
self.assertEqual(self.subdivision.grid_points[0]["cell_code"], "cell-1")
self.assertEqual(self.subdivision.centroid_points[0]["sub_block_code"], "cluster-0")
self.assertEqual(
self.subdivision.metadata["data_driven_subdivision"]["cluster_count"],
2,
)
+114
View File
@@ -0,0 +1,114 @@
from django.test import TestCase, override_settings
from location_data.grid_analysis import create_or_get_analysis_grid_cells
from location_data.models import AnalysisGridCell, BlockSubdivision, SoilLocation
@override_settings(SUBDIVISION_CHUNK_SQM=900)
class AnalysisGridServiceTests(TestCase):
def setUp(self):
self.boundary = {
"type": "Polygon",
"coordinates": [
[
[51.389000, 35.689000],
[51.389760, 35.689000],
[51.389760, 35.689620],
[51.389000, 35.689620],
[51.389000, 35.689000],
]
],
}
self.location = SoilLocation.objects.create(
latitude="35.689310",
longitude="51.389380",
farm_boundary=self.boundary,
)
self.location.set_input_block_count(1)
self.location.save(update_fields=["input_block_count", "block_layout", "updated_at"])
self.subdivision = BlockSubdivision.objects.create(
soil_location=self.location,
block_code="block-1",
source_boundary=self.boundary,
chunk_size_sqm=900,
status="created",
)
def test_create_analysis_grid_cells_persists_30x30_cells(self):
result = create_or_get_analysis_grid_cells(
self.location,
block_code="block-1",
block_subdivision=self.subdivision,
)
self.assertTrue(result["created"])
self.assertEqual(result["chunk_size_sqm"], 900)
self.assertGreater(result["created_count"], 0)
self.assertEqual(result["created_count"], result["total_count"])
cells = list(
AnalysisGridCell.objects.filter(
soil_location=self.location,
block_code="block-1",
chunk_size_sqm=900,
).order_by("cell_code")
)
self.assertEqual(len(cells), result["total_count"])
self.assertTrue(all(cell.block_subdivision_id == self.subdivision.id for cell in cells))
self.assertTrue(all(cell.geometry.get("type") == "Polygon" for cell in cells))
self.assertTrue(all(len(cell.geometry.get("coordinates", [[]])[0]) == 5 for cell in cells))
self.subdivision.refresh_from_db()
self.location.refresh_from_db()
self.assertEqual(
self.subdivision.metadata["analysis_grid"]["chunk_size_sqm"],
900,
)
self.assertEqual(
self.subdivision.metadata["analysis_grid"]["cell_count"],
result["total_count"],
)
self.assertEqual(
self.location.block_layout["blocks"][0]["analysis_grid_summary"]["chunk_size_sqm"],
900,
)
def test_create_analysis_grid_cells_is_idempotent(self):
first = create_or_get_analysis_grid_cells(
self.location,
block_code="block-1",
block_subdivision=self.subdivision,
)
second = create_or_get_analysis_grid_cells(
self.location,
block_code="block-1",
block_subdivision=self.subdivision,
)
self.assertTrue(first["created"])
self.assertFalse(second["created"])
self.assertEqual(second["created_count"], 0)
self.assertEqual(second["existing_count"], first["total_count"])
self.assertEqual(
AnalysisGridCell.objects.filter(
soil_location=self.location,
block_code="block-1",
chunk_size_sqm=900,
).count(),
first["total_count"],
)
def test_create_analysis_grid_cells_uses_location_boundary_without_subdivision(self):
result = create_or_get_analysis_grid_cells(
self.location,
block_code="",
)
self.assertGreater(result["total_count"], 0)
self.assertTrue(
AnalysisGridCell.objects.filter(
soil_location=self.location,
block_code="",
chunk_size_sqm=900,
).exists()
)
+66
View File
@@ -0,0 +1,66 @@
from decimal import Decimal
from django.test import SimpleTestCase
from location_data.openeo_service import (
build_empty_metric_payload,
linear_to_db,
merge_metric_results,
parse_aggregate_spatial_response,
)
class OpenEOServiceParsingTests(SimpleTestCase):
def test_parse_feature_collection_results(self):
payload = {
"type": "FeatureCollection",
"features": [
{
"type": "Feature",
"id": "cell-1",
"properties": {"mean": 0.61},
},
{
"type": "Feature",
"id": "cell-2",
"properties": {"mean": 0.47},
},
],
}
result = parse_aggregate_spatial_response(payload, "ndvi")
self.assertEqual(result["cell-1"]["ndvi"], 0.61)
self.assertEqual(result["cell-2"]["ndvi"], 0.47)
def test_parse_mapping_results(self):
payload = {
"cell-1": {"mean": 12.4},
"cell-2": {"mean": 15.1},
}
result = parse_aggregate_spatial_response(payload, "lst_c")
self.assertEqual(result["cell-1"]["lst_c"], 12.4)
self.assertEqual(result["cell-2"]["lst_c"], 15.1)
def test_linear_to_db(self):
self.assertEqual(linear_to_db(10.0), 10.0)
self.assertEqual(linear_to_db(Decimal("1.0")), 0.0)
self.assertIsNone(linear_to_db(0))
self.assertIsNone(linear_to_db(-1))
def test_merge_metric_results(self):
target = {"cell-1": build_empty_metric_payload()}
merge_metric_results(
target,
{
"cell-1": {"ndvi": 0.5},
"cell-2": {"ndwi": 0.2},
},
)
self.assertEqual(target["cell-1"]["ndvi"], 0.5)
self.assertEqual(target["cell-2"]["ndwi"], 0.2)
self.assertIn("soil_vv_db", target["cell-2"])
+265
View File
@@ -0,0 +1,265 @@
from datetime import date
from types import SimpleNamespace
from unittest.mock import patch
from django.test import TestCase, override_settings
from rest_framework.test import APIClient
from location_data.models import (
AnalysisGridCell,
AnalysisGridObservation,
BlockSubdivision,
RemoteSensingClusterAssignment,
RemoteSensingRun,
RemoteSensingSubdivisionResult,
SoilLocation,
)
@override_settings(ROOT_URLCONF="location_data.urls")
class RemoteSensingApiTests(TestCase):
def setUp(self):
self.client = APIClient()
self.boundary = {
"type": "Polygon",
"coordinates": [
[
[51.3890, 35.6890],
[51.3900, 35.6890],
[51.3900, 35.6900],
[51.3890, 35.6900],
[51.3890, 35.6890],
]
],
}
self.location = SoilLocation.objects.create(
latitude="35.689200",
longitude="51.389000",
farm_boundary=self.boundary,
)
self.location.set_input_block_count(1)
self.location.save(update_fields=["input_block_count", "block_layout", "updated_at"])
self.subdivision = BlockSubdivision.objects.create(
soil_location=self.location,
block_code="block-1",
source_boundary=self.boundary,
chunk_size_sqm=900,
status="created",
)
def test_post_remote_sensing_returns_404_when_location_missing(self):
response = self.client.post(
"/remote-sensing/",
data={
"lat": 35.7000,
"lon": 51.4000,
"start_date": "2025-01-01",
"end_date": "2025-01-31",
},
format="json",
)
self.assertEqual(response.status_code, 404)
self.assertEqual(response.json()["msg"], "location پیدا نشد.")
@patch("location_data.views.run_remote_sensing_analysis_task.delay")
def test_post_remote_sensing_enqueues_task_and_returns_processing(self, mock_delay):
mock_delay.return_value = SimpleNamespace(id="task-123")
response = self.client.post(
"/remote-sensing/",
data={
"lat": 35.6892,
"lon": 51.3890,
"block_code": "block-1",
"start_date": "2025-01-01",
"end_date": "2025-01-31",
"force_refresh": False,
},
format="json",
)
self.assertEqual(response.status_code, 202)
payload = response.json()["data"]
self.assertEqual(payload["status"], "processing")
self.assertEqual(payload["source"], "processing")
self.assertEqual(payload["task_id"], "task-123")
self.assertEqual(payload["block_code"], "block-1")
self.assertEqual(payload["summary"]["cell_count"], 0)
run = RemoteSensingRun.objects.get(id=payload["run"]["id"])
self.assertEqual(run.block_code, "block-1")
self.assertEqual(run.status, RemoteSensingRun.STATUS_PENDING)
self.assertEqual(run.metadata["stage"], "queued")
self.assertEqual(run.metadata["selected_features"], [])
mock_delay.assert_called_once()
def test_get_remote_sensing_returns_processing_when_run_exists_without_results(self):
RemoteSensingRun.objects.create(
soil_location=self.location,
block_subdivision=self.subdivision,
block_code="block-1",
chunk_size_sqm=900,
temporal_start=date(2025, 1, 1),
temporal_end=date(2025, 1, 31),
status=RemoteSensingRun.STATUS_RUNNING,
metadata={"task_id": "task-123"},
)
response = self.client.get(
"/remote-sensing/",
data={
"lat": 35.6892,
"lon": 51.3890,
"block_code": "block-1",
"start_date": "2025-01-01",
"end_date": "2025-01-31",
},
)
self.assertEqual(response.status_code, 200)
payload = response.json()["data"]
self.assertEqual(payload["status"], "processing")
self.assertEqual(payload["source"], "processing")
self.assertEqual(payload["cells"], [])
self.assertEqual(payload["run"]["status"], RemoteSensingRun.STATUS_RUNNING)
def test_get_remote_sensing_returns_cached_results(self):
run = RemoteSensingRun.objects.create(
soil_location=self.location,
block_subdivision=self.subdivision,
block_code="block-1",
chunk_size_sqm=900,
temporal_start=date(2025, 1, 1),
temporal_end=date(2025, 1, 31),
status=RemoteSensingRun.STATUS_SUCCESS,
)
cell = AnalysisGridCell.objects.create(
soil_location=self.location,
block_subdivision=self.subdivision,
block_code="block-1",
cell_code="cell-1",
chunk_size_sqm=900,
geometry=self.boundary,
centroid_lat="35.689500",
centroid_lon="51.389500",
)
AnalysisGridObservation.objects.create(
cell=cell,
run=run,
temporal_start=date(2025, 1, 1),
temporal_end=date(2025, 1, 31),
ndvi=0.61,
ndwi=0.22,
lst_c=24.5,
soil_vv=0.13,
soil_vv_db=-8.860566,
dem_m=1550.0,
slope_deg=4.2,
metadata={"backend_name": "openeo"},
)
response = self.client.get(
"/remote-sensing/",
data={
"lat": 35.6892,
"lon": 51.3890,
"block_code": "block-1",
"start_date": "2025-01-01",
"end_date": "2025-01-31",
},
)
self.assertEqual(response.status_code, 200)
payload = response.json()["data"]
self.assertEqual(payload["status"], "success")
self.assertEqual(payload["source"], "database")
self.assertEqual(payload["summary"]["cell_count"], 1)
self.assertEqual(payload["summary"]["ndvi_mean"], 0.61)
self.assertEqual(payload["summary"]["soil_vv_db_mean"], -8.860566)
self.assertEqual(len(payload["cells"]), 1)
self.assertEqual(payload["cells"][0]["cell_code"], "cell-1")
def test_run_status_endpoint_returns_normalized_status(self):
run = RemoteSensingRun.objects.create(
soil_location=self.location,
block_subdivision=self.subdivision,
block_code="block-1",
chunk_size_sqm=900,
temporal_start=date(2025, 1, 1),
temporal_end=date(2025, 1, 31),
status=RemoteSensingRun.STATUS_SUCCESS,
metadata={"stage": "completed", "selected_features": ["ndvi"]},
)
response = self.client.get(f"/remote-sensing/runs/{run.id}/status/")
self.assertEqual(response.status_code, 200)
payload = response.json()["data"]
self.assertEqual(payload["status"], "completed")
self.assertEqual(payload["run"]["pipeline_status"], "completed")
self.assertEqual(payload["run"]["stage"], "completed")
self.assertEqual(payload["run"]["selected_features"], ["ndvi"])
def test_run_result_endpoint_returns_paginated_assignments(self):
run = RemoteSensingRun.objects.create(
soil_location=self.location,
block_subdivision=self.subdivision,
block_code="block-1",
chunk_size_sqm=900,
temporal_start=date(2025, 1, 1),
temporal_end=date(2025, 1, 31),
status=RemoteSensingRun.STATUS_SUCCESS,
metadata={"stage": "completed"},
)
cell = AnalysisGridCell.objects.create(
soil_location=self.location,
block_subdivision=self.subdivision,
block_code="block-1",
cell_code="cell-1",
chunk_size_sqm=900,
geometry=self.boundary,
centroid_lat="35.689500",
centroid_lon="51.389500",
)
AnalysisGridObservation.objects.create(
cell=cell,
run=run,
temporal_start=date(2025, 1, 1),
temporal_end=date(2025, 1, 31),
ndvi=0.61,
ndwi=0.22,
lst_c=24.5,
soil_vv=0.13,
soil_vv_db=-8.860566,
dem_m=1550.0,
slope_deg=4.2,
metadata={"backend_name": "openeo"},
)
result = RemoteSensingSubdivisionResult.objects.create(
soil_location=self.location,
run=run,
block_subdivision=self.subdivision,
block_code="block-1",
chunk_size_sqm=900,
temporal_start=date(2025, 1, 1),
temporal_end=date(2025, 1, 31),
cluster_count=1,
selected_features=["ndvi"],
metadata={"used_cell_count": 1, "skipped_cell_count": 0},
)
RemoteSensingClusterAssignment.objects.create(
result=result,
cell=cell,
cluster_label=0,
raw_feature_values={"ndvi": 0.61},
scaled_feature_values={"ndvi": 0.0},
)
response = self.client.get(f"/remote-sensing/runs/{run.id}/result/", data={"page": 1, "page_size": 10})
self.assertEqual(response.status_code, 200)
payload = response.json()["data"]
self.assertEqual(payload["status"], "completed")
self.assertEqual(payload["subdivision_result"]["cluster_count"], 1)
self.assertEqual(len(payload["subdivision_result"]["assignments"]), 1)
self.assertEqual(payload["pagination"]["assignments"]["total_items"], 1)
-92
View File
@@ -1,92 +0,0 @@
from __future__ import annotations
from django.apps import apps
from django.test import SimpleTestCase, TestCase, override_settings
from location_data.models import SoilDepthData, SoilLocation
from location_data.soil_adapters import (
DEPTHS,
MockSoilDataAdapter,
SoilGridsAdapter,
)
from location_data.tasks import fetch_soil_data_for_coordinates
class MockSoilDataAdapterTests(SimpleTestCase):
def setUp(self):
self.adapter = MockSoilDataAdapter(delay_seconds=0)
def test_same_coordinate_returns_same_values(self):
first = self.adapter.fetch_depth_fields(51.4, 35.71, "0-5cm")
second = self.adapter.fetch_depth_fields(51.4, 35.71, "0-5cm")
self.assertEqual(first, second)
def test_nearby_coordinates_produce_nearby_values(self):
first = self.adapter.fetch_depth_fields(51.4, 35.71, "0-5cm")
second = self.adapter.fetch_depth_fields(51.405, 35.715, "0-5cm")
self.assertLess(abs(first["sand"] - second["sand"]), 4.5)
self.assertLess(abs(first["clay"] - second["clay"]), 4.5)
self.assertLess(abs(first["phh2o"] - second["phh2o"]), 0.35)
self.assertLess(abs(first["wv1500"] - second["wv1500"]), 0.03)
def test_depth_profiles_follow_expected_trend(self):
shallow = self.adapter.fetch_depth_fields(51.4, 35.71, "0-5cm")
medium = self.adapter.fetch_depth_fields(51.4, 35.71, "5-15cm")
deep = self.adapter.fetch_depth_fields(51.4, 35.71, "15-30cm")
self.assertGreaterEqual(deep["bdod"], medium["bdod"])
self.assertGreaterEqual(medium["bdod"], shallow["bdod"])
self.assertLessEqual(deep["soc"], medium["soc"])
self.assertLessEqual(medium["soc"], shallow["soc"])
class SoilDataAdapterSelectionTests(SimpleTestCase):
def tearDown(self):
apps.get_app_config("location_data").__dict__.pop("soil_data_adapter", None)
@override_settings(SOIL_DATA_PROVIDER="mock", SOIL_MOCK_DELAY_SECONDS=0)
def test_app_config_returns_mock_adapter(self):
config = apps.get_app_config("location_data")
config.__dict__.pop("soil_data_adapter", None)
adapter = config.get_soil_data_adapter()
self.assertIsInstance(adapter, MockSoilDataAdapter)
@override_settings(SOIL_DATA_PROVIDER="soilgrids", SOILGRIDS_TIMEOUT_SECONDS=12)
def test_app_config_returns_live_adapter(self):
config = apps.get_app_config("location_data")
config.__dict__.pop("soil_data_adapter", None)
adapter = config.get_soil_data_adapter()
self.assertIsInstance(adapter, SoilGridsAdapter)
self.assertEqual(adapter.timeout, 12)
@override_settings(SOIL_DATA_PROVIDER="mock", SOIL_MOCK_DELAY_SECONDS=0)
class SoilDataFetchTests(TestCase):
def test_fetch_soil_data_for_coordinates_persists_three_depths(self):
result = fetch_soil_data_for_coordinates(latitude=35.71, longitude=51.4)
self.assertEqual(result["status"], "completed")
self.assertEqual(result["depths"], DEPTHS)
location = SoilLocation.objects.get(latitude="35.710000", longitude="51.400000")
self.assertEqual(location.depths.count(), 3)
self.assertTrue(location.is_complete)
self.assertCountEqual(
list(location.depths.values_list("depth_label", flat=True)),
DEPTHS,
)
self.assertTrue(
SoilDepthData.objects.filter(
soil_location=location,
depth_label="0-5cm",
sand__isnull=False,
clay__isnull=False,
wv1500__isnull=False,
).exists()
)
+257
View File
@@ -0,0 +1,257 @@
from django.test import TestCase, override_settings
from rest_framework.test import APIClient
from location_data.models import AnalysisGridCell, BlockSubdivision, RemoteSensingRun, SoilLocation
@override_settings(ROOT_URLCONF="location_data.urls")
class SoilDataApiTests(TestCase):
def setUp(self):
self.client = APIClient()
self.boundary = {
"type": "Polygon",
"coordinates": [
[
[51.3890, 35.6890],
[51.3902, 35.6890],
[51.3902, 35.6900],
[51.3890, 35.6900],
[51.3890, 35.6890],
]
],
}
self.block_boundary = {
"type": "Polygon",
"coordinates": [
[
[51.3890, 35.6890],
[51.3896, 35.6890],
[51.3896, 35.6900],
[51.3890, 35.6900],
[51.3890, 35.6890],
]
],
}
def test_post_creates_default_single_block_layout(self):
response = self.client.post(
"/",
data={
"lat": 35.6892,
"lon": 51.3890,
"farm_boundary": self.boundary,
"blocks": [
{
"block_code": "block-1",
"boundary": self.block_boundary,
}
],
},
format="json",
)
self.assertEqual(response.status_code, 200)
payload = response.json()["data"]
self.assertEqual(payload["source"], "created")
self.assertEqual(payload["input_block_count"], 1)
self.assertEqual(len(payload["block_layout"]["blocks"]), 1)
self.assertEqual(payload["block_layout"]["blocks"][0]["boundary"], self.block_boundary)
self.assertEqual(payload["block_layout"]["algorithm_status"], "pending")
self.assertEqual(len(payload["block_subdivisions"]), 1)
self.assertEqual(payload["block_subdivisions"][0]["status"], "defined")
self.assertEqual(payload["satellite_snapshots"][0]["status"], "missing")
def test_post_updates_block_layout_from_input(self):
SoilLocation.objects.create(
latitude="35.689200",
longitude="51.389000",
)
response = self.client.post(
"/",
data={
"lat": 35.6892,
"lon": 51.3890,
"farm_boundary": self.boundary,
"blocks": [
{"block_code": "block-a", "boundary": self.block_boundary},
{"block_code": "block-b", "boundary": self.block_boundary},
],
},
format="json",
)
self.assertEqual(response.status_code, 200)
payload = response.json()["data"]
self.assertEqual(payload["input_block_count"], 2)
self.assertEqual(len(payload["block_layout"]["blocks"]), 2)
self.assertEqual(len(payload["block_subdivisions"]), 2)
location = SoilLocation.objects.get(latitude="35.689200", longitude="51.389000")
self.assertEqual(location.input_block_count, 2)
self.assertEqual(len(location.block_layout["blocks"]), 2)
self.assertEqual(location.block_layout["algorithm_status"], "pending")
self.assertTrue(
BlockSubdivision.objects.filter(
soil_location=location,
block_code="block-a",
status="defined",
).exists()
)
def test_get_returns_stored_subdivisions_without_processing(self):
self.client.post(
"/",
data={
"lat": 35.6892,
"lon": 51.3890,
"farm_boundary": self.boundary,
"blocks": [
{
"block_code": "block-1",
"boundary": self.block_boundary,
}
],
},
format="json",
)
response = self.client.get(
"/",
data={"lat": 35.6892, "lon": 51.3890},
)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.json()["data"]["source"], "database")
self.assertEqual(len(response.json()["data"]["block_subdivisions"]), 1)
def test_post_removes_blocks_not_present_in_latest_farmer_input(self):
self.client.post(
"/",
data={
"lat": 35.6892,
"lon": 51.3890,
"farm_boundary": self.boundary,
"blocks": [
{"block_code": "block-a", "boundary": self.block_boundary},
{"block_code": "block-b", "boundary": self.block_boundary},
],
},
format="json",
)
response = self.client.post(
"/",
data={
"lat": 35.6892,
"lon": 51.3890,
"farm_boundary": self.boundary,
"blocks": [
{"block_code": "block-a", "boundary": self.block_boundary},
],
},
format="json",
)
self.assertEqual(response.status_code, 200)
payload = response.json()["data"]
self.assertEqual(len(payload["block_subdivisions"]), 1)
self.assertEqual(payload["block_subdivisions"][0]["block_code"], "block-a")
location = SoilLocation.objects.get(latitude="35.689200", longitude="51.389000")
self.assertTrue(
BlockSubdivision.objects.filter(soil_location=location, block_code="block-a").exists()
)
self.assertFalse(
BlockSubdivision.objects.filter(soil_location=location, block_code="block-b").exists()
)
def test_post_clears_cached_grid_and_run_when_block_boundary_changes(self):
self.client.post(
"/",
data={
"lat": 35.6892,
"lon": 51.3890,
"farm_boundary": self.boundary,
"blocks": [
{"block_code": "block-1", "boundary": self.block_boundary},
],
},
format="json",
)
location = SoilLocation.objects.get(latitude="35.689200", longitude="51.389000")
subdivision = BlockSubdivision.objects.get(soil_location=location, block_code="block-1")
AnalysisGridCell.objects.create(
soil_location=location,
block_subdivision=subdivision,
block_code="block-1",
cell_code="cell-1",
chunk_size_sqm=900,
geometry=self.block_boundary,
centroid_lat="35.689500",
centroid_lon="51.389300",
)
RemoteSensingRun.objects.create(
soil_location=location,
block_subdivision=subdivision,
block_code="block-1",
chunk_size_sqm=900,
temporal_start="2025-01-01",
temporal_end="2025-01-31",
status=RemoteSensingRun.STATUS_SUCCESS,
)
subdivision.grid_points = [{"cell_code": "cell-1"}]
subdivision.centroid_points = [{"sub_block_code": "cluster-0"}]
subdivision.grid_point_count = 1
subdivision.centroid_count = 1
subdivision.status = "subdivided"
subdivision.save(
update_fields=[
"grid_points",
"centroid_points",
"grid_point_count",
"centroid_count",
"status",
"updated_at",
]
)
updated_boundary = {
"type": "Polygon",
"coordinates": [
[
[51.3892, 35.6890],
[51.3898, 35.6890],
[51.3898, 35.6900],
[51.3892, 35.6900],
[51.3892, 35.6890],
]
],
}
response = self.client.post(
"/",
data={
"lat": 35.6892,
"lon": 51.3890,
"farm_boundary": self.boundary,
"blocks": [
{"block_code": "block-1", "boundary": updated_boundary},
],
},
format="json",
)
self.assertEqual(response.status_code, 200)
subdivision.refresh_from_db()
self.assertEqual(subdivision.status, "defined")
self.assertEqual(subdivision.source_boundary, updated_boundary)
self.assertEqual(subdivision.grid_points, [])
self.assertEqual(subdivision.centroid_points, [])
self.assertEqual(subdivision.grid_point_count, 0)
self.assertEqual(subdivision.centroid_count, 0)
self.assertFalse(
AnalysisGridCell.objects.filter(soil_location=location, block_code="block-1").exists()
)
self.assertFalse(
RemoteSensingRun.objects.filter(soil_location=location, block_code="block-1").exists()
)
+10 -2
View File
@@ -1,9 +1,17 @@
from django.urls import path from django.urls import path
from .views import NdviHealthView, SoilDataTaskStatusView, SoilDataView from .views import (
NdviHealthView,
RemoteSensingAnalysisView,
RemoteSensingRunResultView,
RemoteSensingRunStatusView,
SoilDataView,
)
urlpatterns = [ urlpatterns = [
path("", SoilDataView.as_view(), name="soil-data"), path("", SoilDataView.as_view(), name="soil-data"),
path("remote-sensing/", RemoteSensingAnalysisView.as_view(), name="remote-sensing"),
path("remote-sensing/runs/<int:run_id>/status/", RemoteSensingRunStatusView.as_view(), name="remote-sensing-run-status"),
path("remote-sensing/runs/<int:run_id>/result/", RemoteSensingRunResultView.as_view(), name="remote-sensing-run-result"),
path("ndvi-health/", NdviHealthView.as_view(), name="ndvi-health"), path("ndvi-health/", NdviHealthView.as_view(), name="ndvi-health"),
path("tasks/<str:task_id>/status/", SoilDataTaskStatusView.as_view(), name="soil-data-task-status"),
] ]
+856 -113
View File
File diff suppressed because it is too large Load Diff
-1
View File
@@ -1 +0,0 @@
2026-03-27 08:38:35,473 [INFO] django.utils.autoreload: Watching for file changes with StatReloader
-23
View File
@@ -1,23 +0,0 @@
2026-04-02 11:49:29,344 [INFO] django.utils.autoreload: Watching for file changes with StatReloader
2026-04-02 12:11:41,087 [INFO] django.utils.autoreload: Watching for file changes with StatReloader
2026-04-02 12:14:51,907 [INFO] rag.api_provider: gapgpt
2026-04-02 12:14:51,907 [INFO] rag.api_provider: sk-ZeFmDwROcQ2rYOFmUxHLjIwMTSUdo2qNc3Uraug9dOK2ihn5 https://api.gapgpt.app/v1
2026-04-02 12:16:13,420 [INFO] django.utils.autoreload: /app/rag/api_provider.py changed, reloading.
2026-04-02 12:16:15,842 [INFO] django.utils.autoreload: Watching for file changes with StatReloader
2026-04-02 12:16:41,558 [INFO] rag.api_provider: embedding provider=gapgpt
2026-04-02 12:16:41,559 [INFO] rag.api_provider: embedding base_url=https://api.gapgpt.app/v1 api_key=sk-Z...ihn5
2026-04-02 12:23:15,783 [INFO] django.utils.autoreload: Watching for file changes with StatReloader
2026-04-02 12:45:46,635 [INFO] rag.api_provider: embedding provider=gapgpt
2026-04-02 12:45:46,635 [INFO] rag.api_provider: embedding base_url=https://api.gapgpt.app/v1 api_key=sk-Z...ihn5
2026-04-02 12:46:00,212 [INFO] httpx: HTTP Request: POST https://api.gapgpt.app/v1/embeddings "HTTP/1.1 500 Internal Server Error"
2026-04-02 12:46:00,214 [INFO] openai._base_client: Retrying request to /embeddings in 0.836547 seconds
2026-04-02 12:46:01,336 [INFO] httpx: HTTP Request: POST https://api.gapgpt.app/v1/embeddings "HTTP/1.1 500 Internal Server Error"
2026-04-02 12:46:01,337 [INFO] openai._base_client: Retrying request to /embeddings in 1.855433 seconds
2026-04-02 12:46:03,485 [INFO] httpx: HTTP Request: POST https://api.gapgpt.app/v1/embeddings "HTTP/1.1 500 Internal Server Error"
2026-04-02 12:46:09,716 [INFO] rag.api_provider: embedding provider=gapgpt
2026-04-02 12:46:09,716 [INFO] rag.api_provider: embedding base_url=https://api.gapgpt.app/v1 api_key=sk-Z...ihn5
2026-04-02 12:46:10,114 [INFO] httpx: HTTP Request: POST https://api.gapgpt.app/v1/embeddings "HTTP/1.1 500 Internal Server Error"
2026-04-02 12:46:10,114 [INFO] openai._base_client: Retrying request to /embeddings in 0.908246 seconds
2026-04-02 12:46:11,326 [INFO] httpx: HTTP Request: POST https://api.gapgpt.app/v1/embeddings "HTTP/1.1 500 Internal Server Error"
2026-04-02 12:46:11,326 [INFO] openai._base_client: Retrying request to /embeddings in 1.841081 seconds
2026-04-02 12:46:13,570 [INFO] httpx: HTTP Request: POST https://api.gapgpt.app/v1/embeddings "HTTP/1.1 500 Internal Server Error"
-9
View File
@@ -1,9 +0,0 @@
2026-04-05 18:53:10,339 [INFO] django.utils.autoreload: Watching for file changes with StatReloader
2026-04-05 18:53:20,171 [INFO] django.server: "GET /api/docs/ HTTP/1.1" 200 4633
2026-04-05 18:53:20,293 [INFO] django.server: "GET /static/drf_spectacular_sidecar/swagger-ui-dist/swagger-ui.css HTTP/1.1" 200 152072
2026-04-05 18:53:20,298 [INFO] django.server: "GET /static/drf_spectacular_sidecar/swagger-ui-dist/swagger-ui-standalone-preset.js HTTP/1.1" 200 230007
2026-04-05 18:53:20,345 [INFO] django.server: "GET /static/drf_spectacular_sidecar/swagger-ui-dist/swagger-ui-bundle.js HTTP/1.1" 200 1426050
2026-04-05 18:53:20,679 [INFO] django.server: "GET /api/schema/ HTTP/1.1" 200 146697
2026-04-05 18:53:20,690 [INFO] django.server: "GET /static/drf_spectacular_sidecar/swagger-ui-dist/favicon-32x32.png HTTP/1.1" 200 628
2026-04-05 19:26:12,454 [INFO] django.utils.autoreload: /app/location_data/urls.py changed, reloading.
2026-04-05 19:26:14,602 [INFO] django.utils.autoreload: Watching for file changes with StatReloader
-122
View File
@@ -1,122 +0,0 @@
2026-04-06 11:26:32,124 [INFO] django.utils.autoreload: /app/location_data/urls.py changed, reloading.
2026-04-06 11:26:34,398 [INFO] django.utils.autoreload: Watching for file changes with StatReloader
2026-04-06 11:39:14,251 [INFO] django.utils.autoreload: /app/sensor_data/urls.py changed, reloading.
2026-04-06 11:39:16,822 [INFO] django.utils.autoreload: Watching for file changes with StatReloader
2026-04-06 11:42:41,500 [INFO] django.utils.autoreload: /app/sensor_data/views.py changed, reloading.
2026-04-06 11:42:43,947 [INFO] django.utils.autoreload: Watching for file changes with StatReloader
2026-04-06 11:43:26,823 [INFO] django.utils.autoreload: /app/sensor_data/models.py changed, reloading.
2026-04-06 11:43:29,150 [INFO] django.utils.autoreload: Watching for file changes with StatReloader
2026-04-06 11:45:54,258 [INFO] django.utils.autoreload: /app/sensor_data/apps.py changed, reloading.
2026-04-06 11:45:56,525 [INFO] django.utils.autoreload: Watching for file changes with StatReloader
2026-04-06 11:46:06,483 [INFO] django.utils.autoreload: /app/sensor_data/models.py changed, reloading.
2026-04-06 11:46:09,070 [INFO] django.utils.autoreload: Watching for file changes with StatReloader
2026-04-06 11:46:11,650 [INFO] django.utils.autoreload: /app/location_data/models.py changed, reloading.
2026-04-06 11:46:14,185 [INFO] django.utils.autoreload: Watching for file changes with StatReloader
2026-04-06 12:18:40,264 [INFO] django.utils.autoreload: /app/rag/user_data.py changed, reloading.
2026-04-06 12:18:41,538 [INFO] django.utils.autoreload: Watching for file changes with StatReloader
2026-04-06 12:24:14,956 [INFO] django.utils.autoreload: /app/location_data/models.py changed, reloading.
2026-04-06 12:24:16,211 [INFO] django.utils.autoreload: Watching for file changes with StatReloader
2026-04-06 12:24:42,989 [INFO] django.utils.autoreload: /app/sensor_data/models.py changed, reloading.
2026-04-06 12:24:44,253 [INFO] django.utils.autoreload: Watching for file changes with StatReloader
2026-04-06 12:25:21,172 [INFO] django.utils.autoreload: /app/sensor_data/serializers.py changed, reloading.
2026-04-06 12:25:22,430 [INFO] django.utils.autoreload: Watching for file changes with StatReloader
2026-04-06 12:27:52,025 [INFO] django.utils.autoreload: /app/sensor_data/views.py changed, reloading.
2026-04-06 12:27:53,320 [INFO] django.utils.autoreload: Watching for file changes with StatReloader
2026-04-06 12:29:13,931 [INFO] django.utils.autoreload: /app/dashboard_data/context.py changed, reloading.
2026-04-06 12:29:15,202 [INFO] django.utils.autoreload: Watching for file changes with StatReloader
2026-04-06 12:29:52,134 [INFO] django.utils.autoreload: /app/dashboard_data/services.py changed, reloading.
2026-04-06 12:29:53,502 [INFO] django.utils.autoreload: Watching for file changes with StatReloader
2026-04-06 12:30:32,458 [INFO] django.utils.autoreload: /app/dashboard_data/cards/soil_moisture_heatmap.py changed, reloading.
2026-04-06 12:30:33,743 [INFO] django.utils.autoreload: Watching for file changes with StatReloader
2026-04-06 12:31:21,793 [INFO] django.utils.autoreload: /app/rag/user_data.py changed, reloading.
2026-04-06 12:31:23,054 [INFO] django.utils.autoreload: Watching for file changes with StatReloader
2026-04-06 12:32:42,612 [INFO] django.utils.autoreload: /app/sensor_data/admin.py changed, reloading.
2026-04-06 12:32:43,862 [INFO] django.utils.autoreload: Watching for file changes with StatReloader
2026-04-06 12:37:47,634 [INFO] django.utils.autoreload: /app/sensor_data/models.py changed, reloading.
2026-04-06 12:37:48,919 [INFO] django.utils.autoreload: Watching for file changes with StatReloader
2026-04-06 12:38:37,993 [INFO] django.utils.autoreload: /app/sensor_data/serializers.py changed, reloading.
2026-04-06 12:38:39,289 [INFO] django.utils.autoreload: Watching for file changes with StatReloader
2026-04-06 12:39:26,803 [INFO] django.server: "GET /api/docs/ HTTP/1.1" 200 4633
2026-04-06 12:39:26,837 [INFO] django.server: "GET /static/drf_spectacular_sidecar/swagger-ui-dist/swagger-ui.css HTTP/1.1" 304 0
2026-04-06 12:39:26,839 [INFO] django.server: "GET /static/drf_spectacular_sidecar/swagger-ui-dist/swagger-ui-bundle.js HTTP/1.1" 304 0
2026-04-06 12:39:26,840 [INFO] django.server: "GET /static/drf_spectacular_sidecar/swagger-ui-dist/swagger-ui-standalone-preset.js HTTP/1.1" 304 0
2026-04-06 12:39:27,022 [INFO] django.server: "GET /api/schema/ HTTP/1.1" 200 146171
2026-04-06 12:39:27,033 [INFO] django.server: "GET /static/drf_spectacular_sidecar/swagger-ui-dist/favicon-32x32.png HTTP/1.1" 304 0
2026-04-06 12:39:31,562 [INFO] django.utils.autoreload: /app/sensor_data/views.py changed, reloading.
2026-04-06 12:39:32,855 [INFO] django.utils.autoreload: Watching for file changes with StatReloader
2026-04-06 12:40:00,782 [INFO] django.utils.autoreload: /app/sensor_data/admin.py changed, reloading.
2026-04-06 12:40:02,109 [INFO] django.utils.autoreload: Watching for file changes with StatReloader
2026-04-06 12:44:16,444 [INFO] django.utils.autoreload: /app/sensor_data/serializers.py changed, reloading.
2026-04-06 12:44:17,780 [INFO] django.utils.autoreload: Watching for file changes with StatReloader
2026-04-06 12:45:00,763 [INFO] django.utils.autoreload: /app/sensor_data/views.py changed, reloading.
2026-04-06 12:45:02,047 [INFO] django.utils.autoreload: Watching for file changes with StatReloader
2026-04-06 12:45:10,529 [INFO] django.utils.autoreload: /app/sensor_data/urls.py changed, reloading.
2026-04-06 12:45:11,817 [INFO] django.utils.autoreload: Watching for file changes with StatReloader
2026-04-06 12:51:28,201 [INFO] django.utils.autoreload: /app/config/urls.py changed, reloading.
2026-04-06 12:51:29,679 [INFO] django.utils.autoreload: Watching for file changes with StatReloader
2026-04-06 12:54:04,072 [INFO] django.utils.autoreload: /app/dashboard_data/cards/soil_moisture_heatmap.py changed, reloading.
2026-04-06 12:54:05,375 [INFO] django.utils.autoreload: Watching for file changes with StatReloader
2026-04-06 19:42:33,233 [INFO] django.utils.autoreload: Watching for file changes with StatReloader
2026-04-06 19:45:04,902 [INFO] django.utils.autoreload: /app/farm_data/models.py changed, reloading.
2026-04-06 19:45:07,113 [INFO] django.utils.autoreload: Watching for file changes with StatReloader
2026-04-06 19:49:48,250 [INFO] django.utils.autoreload: /app/location_data/models.py changed, reloading.
2026-04-06 19:49:50,545 [INFO] django.utils.autoreload: Watching for file changes with StatReloader
2026-04-06 19:51:47,708 [INFO] django.server: "GET /api/docs/ HTTP/1.1" 200 4633
2026-04-06 19:51:48,041 [INFO] django.server: "GET /api/schema/ HTTP/1.1" 200 151490
2026-04-06 19:52:16,066 [ERROR] django.request: Internal Server Error: /api/farm-data/11111111-1111-1111-1111-111111111111/detail/
Traceback (most recent call last):
File "/usr/local/lib/python3.10/site-packages/django/core/handlers/exception.py", line 55, in inner
response = get_response(request)
File "/usr/local/lib/python3.10/site-packages/django/core/handlers/base.py", line 197, in _get_response
response = wrapped_callback(request, *callback_args, **callback_kwargs)
File "/usr/local/lib/python3.10/site-packages/django/views/decorators/csrf.py", line 65, in _view_wrapper
return view_func(request, *args, **kwargs)
File "/usr/local/lib/python3.10/site-packages/django/views/generic/base.py", line 105, in view
return self.dispatch(request, *args, **kwargs)
File "/usr/local/lib/python3.10/site-packages/rest_framework/views.py", line 509, in dispatch
response = self.handle_exception(exc)
File "/usr/local/lib/python3.10/site-packages/rest_framework/views.py", line 469, in handle_exception
self.raise_uncaught_exception(exc)
File "/usr/local/lib/python3.10/site-packages/rest_framework/views.py", line 480, in raise_uncaught_exception
raise exc
File "/usr/local/lib/python3.10/site-packages/rest_framework/views.py", line 506, in dispatch
response = handler(request, *args, **kwargs)
File "/app/farm_data/views.py", line 257, in get
data = get_farm_details(str(farm_uuid))
File "/app/farm_data/services.py", line 47, in get_farm_details
"ideal_sensor_profile": center_location.ideal_sensor_profile,
AttributeError: 'SoilLocation' object has no attribute 'ideal_sensor_profile'
2026-04-06 19:52:16,072 [ERROR] django.server: "GET /api/farm-data/11111111-1111-1111-1111-111111111111/detail/ HTTP/1.1" 500 18299
2026-04-06 19:53:58,048 [INFO] django.utils.autoreload: /app/farm_data/services.py changed, reloading.
2026-04-06 19:54:00,346 [INFO] django.utils.autoreload: Watching for file changes with StatReloader
2026-04-06 19:54:30,811 [INFO] django.utils.autoreload: /app/dashboard_data/cards/sensor_radar_chart.py changed, reloading.
2026-04-06 19:54:33,051 [INFO] django.utils.autoreload: Watching for file changes with StatReloader
2026-04-06 19:56:59,327 [INFO] django.utils.autoreload: /app/farm_data/serializers.py changed, reloading.
2026-04-06 19:57:01,484 [INFO] django.utils.autoreload: Watching for file changes with StatReloader
2026-04-06 19:57:23,727 [INFO] django.utils.autoreload: /app/farm_data/services.py changed, reloading.
2026-04-06 19:57:25,926 [INFO] django.utils.autoreload: Watching for file changes with StatReloader
2026-04-06 19:57:52,204 [INFO] django.utils.autoreload: /app/farm_data/views.py changed, reloading.
2026-04-06 19:57:54,397 [INFO] django.utils.autoreload: Watching for file changes with StatReloader
2026-04-06 19:59:14,870 [INFO] django.server: "GET /api/docs/ HTTP/1.1" 200 4633
2026-04-06 19:59:15,305 [INFO] django.server: "GET /api/schema/ HTTP/1.1" 200 153398
2026-04-06 19:59:38,242 [INFO] django.utils.autoreload: /app/farm_data/services.py changed, reloading.
2026-04-06 19:59:40,459 [INFO] django.utils.autoreload: Watching for file changes with StatReloader
2026-04-06 20:01:11,430 [INFO] django.utils.autoreload: /app/farm_data/services.py changed, reloading.
2026-04-06 20:01:13,631 [INFO] django.utils.autoreload: Watching for file changes with StatReloader
2026-04-06 20:05:55,681 [INFO] django.utils.autoreload: Watching for file changes with StatReloader
2026-04-06 20:06:01,544 [INFO] django.server: "GET /api/docs/ HTTP/1.1" 200 4633
2026-04-06 20:06:01,906 [INFO] django.server: "GET /api/schema/ HTTP/1.1" 200 153398
2026-04-06 20:06:22,223 [INFO] django.server: "GET /api/farm-data/11111111-1111-1111-1111-111111111111/detail/ HTTP/1.1" 200 2088
2026-04-06 20:09:19,767 [INFO] django.utils.autoreload: /app/farm_data/serializers.py changed, reloading.
2026-04-06 20:09:21,982 [INFO] django.utils.autoreload: Watching for file changes with StatReloader
2026-04-06 20:09:27,799 [INFO] django.utils.autoreload: /app/farm_data/services.py changed, reloading.
2026-04-06 20:09:29,995 [INFO] django.utils.autoreload: Watching for file changes with StatReloader
2026-04-06 20:09:45,822 [INFO] django.server: "GET /api/farm-data/11111111-1111-1111-1111-111111111111/detail/ HTTP/1.1" 200 3125
2026-04-06 20:10:27,291 [INFO] django.server: "POST /api/farm-data/ HTTP/1.1" 201 407
2026-04-06 20:11:07,920 [INFO] django.server: "GET /api/farm-data/550e8400-e29b-41d4-a716-446655440000/detail/ HTTP/1.1" 200 919
2026-04-06 20:26:57,235 [INFO] django.utils.autoreload: /app/location_data/tasks.py changed, reloading.
2026-04-06 20:26:59,781 [INFO] django.utils.autoreload: Watching for file changes with StatReloader
2026-04-06 20:27:32,325 [INFO] django.utils.autoreload: /app/farm_data/views.py changed, reloading.
2026-04-06 20:27:34,615 [INFO] django.utils.autoreload: Watching for file changes with StatReloader
2026-04-06 21:09:58,465 [ERROR] django.request: Bad Gateway: /api/farm-data/
2026-04-06 21:09:58,465 [ERROR] django.server: "POST /api/farm-data/ HTTP/1.1" 502 654
+14 -18
View File
@@ -38,13 +38,13 @@ def _model_to_data_fields(instance: Model, exclude: set[str] | None = None) -> d
def build_user_soil_text(sensor_uuid: str) -> str | None: def build_user_soil_text(sensor_uuid: str) -> str | None:
""" """
ساخت متن قابل embed برای یک سنسور (کاربر). ساخت متن قابل embed برای یک سنسور (کاربر).
از SensorData SoilLocation SoilDepthData خوانده میشود. از SensorData SoilLocation latest remote sensing snapshots خوانده میشود.
Returns: Returns:
متن متنی قابل چانک، یا None اگر سنسور یافت نشد. متن متنی قابل چانک، یا None اگر سنسور یافت نشد.
""" """
from farm_data.models import SensorData from farm_data.models import SensorData
from location_data.models import SoilDepthData from location_data.satellite_snapshot import build_location_block_satellite_snapshots
try: try:
sensor = SensorData.objects.select_related("center_location").get( sensor = SensorData.objects.select_related("center_location").get(
@@ -72,23 +72,19 @@ def build_user_soil_text(sensor_uuid: str) -> str | None:
sensor_lines = [f" {k}: {v}" for k, v in sorted(sensor_fields.items())] sensor_lines = [f" {k}: {v}" for k, v in sorted(sensor_fields.items())]
parts.append("خوانش‌های سنسور:\n" + "\n".join(sensor_lines)) parts.append("خوانش‌های سنسور:\n" + "\n".join(sensor_lines))
# داده‌های خاک به تفکیک عمق snapshots = build_location_block_satellite_snapshots(loc)
depths = ( if snapshots:
SoilDepthData.objects.filter(soil_location=loc) snapshot_lines = []
.order_by("depth_label") for snapshot in snapshots:
.all() metrics = snapshot.get("resolved_metrics") or {}
if not metrics:
continue
lines = [f" {k}: {v}" for k, v in sorted(metrics.items())]
snapshot_lines.append(
f" بلوک {snapshot.get('block_code') or 'farm'}:\n" + "\n".join(lines)
) )
if depths: if snapshot_lines:
depth_parts = [] parts.append("داده‌های ماهواره‌ای:\n" + "\n".join(snapshot_lines))
for d in depths:
d_data = _model_to_data_fields(
d, exclude={"soil_location", "soil_location_id"}
)
if d_data:
lines = [f" {k}: {v}" for k, v in sorted(d_data.items())]
depth_parts.append(f" عمق {d.depth_label}:\n" + "\n".join(lines))
if depth_parts:
parts.append("داده‌های خاک:\n" + "\n".join(depth_parts))
return "\n\n".join(parts) if len(parts) > 1 else None return "\n\n".join(parts) if len(parts) > 1 else None
+4
View File
@@ -26,9 +26,13 @@ redis>=5.0,<5.1
requests>=2.31,<2.32 requests>=2.31,<2.32
httpx>=0.27,<0.28 httpx>=0.27,<0.28
openai>=1.0,<1.40 openai>=1.0,<1.40
openeo>=0.29,<0.40
# === NumPy (pinned for Python 3.10 compatibility) === # === NumPy (pinned for Python 3.10 compatibility) ===
numpy>=1.23,<1.27 numpy>=1.23,<1.27
scikit-learn>=1.3,<1.6
matplotlib>=3.7,<3.9
Pillow>=10.0,<11.0
pcse pcse
# === Vector Databases === # === Vector Databases ===
+20 -13
View File
@@ -9,6 +9,7 @@ from django.utils import timezone
from farm_data.context import load_farm_context from farm_data.context import load_farm_context
from farm_data.models import SensorData from farm_data.models import SensorData
from location_data.satellite_snapshot import build_location_satellite_snapshot
from rag.services import get_soil_anomaly_insight from rag.services import get_soil_anomaly_insight
from .anomaly_detection import build_anomaly_detection_card from .anomaly_detection import build_anomaly_detection_card
@@ -198,7 +199,6 @@ def _load_sensor_network(current_sensor: Any) -> list[Any]:
) )
queryset = SensorData.objects.select_related("center_location").prefetch_related( queryset = SensorData.objects.select_related("center_location").prefetch_related(
"plant_assignments__plant", "plant_assignments__plant",
"center_location__depths",
) )
if plant_ids: if plant_ids:
queryset = queryset.filter( queryset = queryset.filter(
@@ -208,20 +208,27 @@ def _load_sensor_network(current_sensor: Any) -> list[Any]:
def _soil_profile(sensor: Any) -> list[dict[str, Any]]: def _soil_profile(sensor: Any) -> list[dict[str, Any]]:
depths = sensor.center_location.depths.all() snapshot = build_location_satellite_snapshot(sensor.center_location)
metrics = snapshot.get("resolved_metrics") or {}
if not metrics:
return []
return [ return [
{ {
"depth_label": depth.depth_label, "depth_label": "surface_30x30_remote_sensing",
"field_capacity": depth.wv0033, "field_capacity": metrics.get("ndwi"),
"wilting_point": depth.wv1500, "wilting_point": None,
"saturation": depth.wv0010, "saturation": None,
"nitrogen": depth.nitrogen, "nitrogen": None,
"ph": depth.phh2o, "ph": None,
"sand": depth.sand, "sand": None,
"silt": depth.silt, "silt": None,
"clay": depth.clay, "clay": None,
"ndvi": metrics.get("ndvi"),
"lst_c": metrics.get("lst_c"),
"soil_vv_db": metrics.get("soil_vv_db"),
"dem_m": metrics.get("dem_m"),
"slope_deg": metrics.get("slope_deg"),
} }
for depth in depths
] ]
@@ -277,7 +284,7 @@ class SoilMoistureHeatmapService:
def get_heatmap(self, *, farm_uuid: str) -> dict[str, Any]: def get_heatmap(self, *, farm_uuid: str) -> dict[str, Any]:
current_sensor = ( current_sensor = (
SensorData.objects.select_related("center_location") SensorData.objects.select_related("center_location")
.prefetch_related("plant_assignments__plant", "center_location__depths") .prefetch_related("plant_assignments__plant")
.filter(farm_uuid=farm_uuid) .filter(farm_uuid=farm_uuid)
.first() .first()
) )