This commit is contained in:
2026-05-13 22:28:56 +03:30
parent 46fe62fa04
commit 45fee1dfd3
26 changed files with 2329 additions and 878 deletions
-1
View File
@@ -15,7 +15,6 @@ urlpatterns = [
path("api/farm-data/", include("farm_data.urls")), path("api/farm-data/", include("farm_data.urls")),
path("api/weather/", include("weather.urls")), path("api/weather/", include("weather.urls")),
path("api/economy/", include("economy.urls")), path("api/economy/", include("economy.urls")),
path("api/plants/", include("plant.urls")),
path("api/pest-disease/", include("pest_disease.urls")), path("api/pest-disease/", include("pest_disease.urls")),
path("api/irrigation/", include("irrigation.urls")), path("api/irrigation/", include("irrigation.urls")),
path("api/fertilization/", include("fertilization.urls")), path("api/fertilization/", include("fertilization.urls")),
+7 -5
View File
@@ -10,8 +10,8 @@ import logging
from django.apps import apps from django.apps import apps
from django.core.paginator import EmptyPage, Paginator from django.core.paginator import EmptyPage, Paginator
from farm_data.models import SensorData from farm_data.models import PlantCatalogSnapshot, SensorData
from farm_data.services import get_canonical_farm_record, get_runtime_plant_for_farm from farm_data.services import clone_snapshot_as_runtime_plant, get_canonical_farm_record, get_runtime_plant_for_farm
from location_data.satellite_snapshot import build_location_satellite_snapshot from location_data.satellite_snapshot import build_location_satellite_snapshot
from plant.gdd import calculate_daily_gdd, resolve_growth_profile from plant.gdd import calculate_daily_gdd, resolve_growth_profile
from weather.models import WeatherForecast from weather.models import WeatherForecast
@@ -277,9 +277,11 @@ def _resolve_plant_simulation_defaults(plant: Any) -> tuple[dict[str, Any] | Non
def build_growth_context(payload: dict[str, Any]) -> GrowthSimulationContext: def build_growth_context(payload: dict[str, Any]) -> GrowthSimulationContext:
plant_name = apps.get_app_config("plant").resolve_plant_name(payload["plant_name"]) or payload["plant_name"] plant_name = apps.get_app_config("plant").resolve_plant_name(payload["plant_name"]) or payload["plant_name"]
from plant.models import Plant snapshot = (
PlantCatalogSnapshot.objects.filter(name=plant_name).first()
plant = Plant.objects.filter(name=plant_name).first() or PlantCatalogSnapshot.objects.filter(name__iexact=plant_name).first()
)
plant = clone_snapshot_as_runtime_plant(snapshot)
if plant is None: if plant is None:
raise GrowthSimulationError("Plant not found.") raise GrowthSimulationError("Plant not found.")
@@ -6,7 +6,7 @@ from unittest.mock import patch
from django.test import TestCase, override_settings from django.test import TestCase, override_settings
from rest_framework.test import APIClient from rest_framework.test import APIClient
from plant.models import Plant from farm_data.models import PlantCatalogSnapshot
from .growth_simulation import paginate_growth_stages, run_growth_simulation from .growth_simulation import paginate_growth_stages, run_growth_simulation
@@ -15,7 +15,8 @@ from .growth_simulation import paginate_growth_stages, run_growth_simulation
class PlantGrowthSimulationApiTests(TestCase): class PlantGrowthSimulationApiTests(TestCase):
def setUp(self): def setUp(self):
self.client = APIClient() self.client = APIClient()
self.plant = Plant.objects.create( self.plant = PlantCatalogSnapshot.objects.create(
backend_plant_id=301,
name="گوجه‌فرنگی", name="گوجه‌فرنگی",
growth_profile={ growth_profile={
"base_temperature": 10, "base_temperature": 10,
+2 -1
View File
@@ -12,6 +12,7 @@ from rest_framework.test import APIRequestFactory
from .models import SimulationRun, SimulationScenario from .models import SimulationRun, SimulationScenario
from farm_data.models import PlantCatalogSnapshot, SensorData from farm_data.models import PlantCatalogSnapshot, SensorData
from farm_data.services import assign_farm_plants_from_backend_ids
from irrigation.models import IrrigationMethod from irrigation.models import IrrigationMethod
from location_data.models import SoilLocation from location_data.models import SoilLocation
from weather.models import WeatherForecast from weather.models import WeatherForecast
@@ -393,7 +394,7 @@ class CropSimulationCanonicalSnapshotTests(TestCase):
weather_forecast=self.weather, weather_forecast=self.weather,
irrigation_method=self.irrigation_method, irrigation_method=self.irrigation_method,
) )
self.farm.plants.add(self.plant) assign_farm_plants_from_backend_ids(self.farm, [self.plant.backend_plant_id])
@patch("crop_simulation.services.build_ai_farm_snapshot") @patch("crop_simulation.services.build_ai_farm_snapshot")
def test_build_simulation_payload_from_farm_uses_aggregated_metrics(self, mock_snapshot): def test_build_simulation_payload_from_farm_uses_aggregated_metrics(self, mock_snapshot):
+2
View File
@@ -61,6 +61,7 @@ services:
PROXYCHAINS_PROXY_HOST: ${PROXYCHAINS_PROXY_HOST:-host.docker.internal} PROXYCHAINS_PROXY_HOST: ${PROXYCHAINS_PROXY_HOST:-host.docker.internal}
PROXYCHAINS_PROXY_PORT: ${PROXYCHAINS_PROXY_PORT:-10808} PROXYCHAINS_PROXY_PORT: ${PROXYCHAINS_PROXY_PORT:-10808}
PROXYCHAINS_CHAIN_MODE: ${PROXYCHAINS_CHAIN_MODE:-strict_chain} PROXYCHAINS_CHAIN_MODE: ${PROXYCHAINS_CHAIN_MODE:-strict_chain}
OPENEO_VERIFY_ON_STARTUP: ${OPENEO_VERIFY_ON_STARTUP:-0}
depends_on: depends_on:
db: db:
condition: service_healthy condition: service_healthy
@@ -101,6 +102,7 @@ services:
PROXYCHAINS_PROXY_HOST: ${PROXYCHAINS_PROXY_HOST:-host.docker.internal} PROXYCHAINS_PROXY_HOST: ${PROXYCHAINS_PROXY_HOST:-host.docker.internal}
PROXYCHAINS_PROXY_PORT: ${PROXYCHAINS_PROXY_PORT:-10808} PROXYCHAINS_PROXY_PORT: ${PROXYCHAINS_PROXY_PORT:-10808}
PROXYCHAINS_CHAIN_MODE: ${PROXYCHAINS_CHAIN_MODE:-strict_chain} PROXYCHAINS_CHAIN_MODE: ${PROXYCHAINS_CHAIN_MODE:-strict_chain}
OPENEO_VERIFY_ON_STARTUP: ${OPENEO_VERIFY_ON_STARTUP:-0}
depends_on: depends_on:
db: db:
condition: service_healthy condition: service_healthy
+2
View File
@@ -81,6 +81,7 @@ services:
PROXYCHAINS_PROXY_HOST: ${PROXYCHAINS_PROXY_HOST:-host.docker.internal} PROXYCHAINS_PROXY_HOST: ${PROXYCHAINS_PROXY_HOST:-host.docker.internal}
PROXYCHAINS_PROXY_PORT: ${PROXYCHAINS_PROXY_PORT:-10808} PROXYCHAINS_PROXY_PORT: ${PROXYCHAINS_PROXY_PORT:-10808}
PROXYCHAINS_CHAIN_MODE: ${PROXYCHAINS_CHAIN_MODE:-strict_chain} PROXYCHAINS_CHAIN_MODE: ${PROXYCHAINS_CHAIN_MODE:-strict_chain}
OPENEO_VERIFY_ON_STARTUP: ${OPENEO_VERIFY_ON_STARTUP:-0}
depends_on: depends_on:
db: db:
condition: service_healthy condition: service_healthy
@@ -118,6 +119,7 @@ services:
PROXYCHAINS_PROXY_HOST: ${PROXYCHAINS_PROXY_HOST:-host.docker.internal} PROXYCHAINS_PROXY_HOST: ${PROXYCHAINS_PROXY_HOST:-host.docker.internal}
PROXYCHAINS_PROXY_PORT: ${PROXYCHAINS_PROXY_PORT:-10808} PROXYCHAINS_PROXY_PORT: ${PROXYCHAINS_PROXY_PORT:-10808}
PROXYCHAINS_CHAIN_MODE: ${PROXYCHAINS_CHAIN_MODE:-strict_chain} PROXYCHAINS_CHAIN_MODE: ${PROXYCHAINS_CHAIN_MODE:-strict_chain}
OPENEO_VERIFY_ON_STARTUP: ${OPENEO_VERIFY_ON_STARTUP:-0}
depends_on: depends_on:
db: db:
condition: service_healthy condition: service_healthy
File diff suppressed because it is too large Load Diff
+291
View File
@@ -0,0 +1,291 @@
# ارتباط سرویس‌ها با Plant و گیاهان
این سند توضیح می‌دهد که در پروژه، داده‌ی گیاه از کجا می‌آید، چطور در `farm_data` نگه‌داری می‌شود، چگونه به سرویس‌های `crop_simulation` می‌رسد و در نهایت چطور در `location_data` برای پیشنهاد گیاه هر کلاستر استفاده می‌شود.
## نمای کلی
در این پروژه سه لایه اصلی برای کار با گیاه وجود دارد:
1. `plant`
2. `farm_data`
3. `crop_simulation` و `location_data`
نقش هر لایه:
- `plant`: مرجع canonical نام گیاه و aliasها است.
- `farm_data`: نسخه snapshot شده‌ی گیاهان Backend و assignment هر مزرعه به گیاه‌ها را نگه می‌دارد.
- `crop_simulation`: از گیاه انتخاب‌شده برای ساخت ورودی شبیه‌سازی استفاده می‌کند.
- `location_data`: داده‌ی کلاسترهای KMeans را با گیاه‌های مزرعه ترکیب می‌کند و پیشنهاد گیاه می‌سازد.
## 1) لایه plant
اپ `plant` مرجع اصلی برای resolve کردن نام گیاه است.
فایل مهم:
- `plant/apps.py`
تابع مهم:
- `resolve_plant_name`
رفتار این تابع:
- نام ورودی را می‌گیرد.
- اگر همان نام در جدول `plant.Plant` وجود داشته باشد، همان را برمی‌گرداند.
- اگر alias برای آن تعریف شده باشد، alias را به نام canonical تبدیل می‌کند.
- اگر از نظر نرمال‌سازی متنی با یک گیاه match شود، همان نام canonical را برمی‌گرداند.
در نتیجه:
- ورودی‌هایی مثل `plant_name`، `crop` یا `crop_name` قبل از ورود به شبیه‌سازی، به نام استاندارد تبدیل می‌شوند.
## 2) لایه farm_data
اپ `farm_data` مدل read-model مربوط به گیاهان هر مزرعه را نگه می‌دارد.
مدل‌های اصلی:
- `farm_data.models.PlantCatalogSnapshot`
- `farm_data.models.FarmPlantAssignment`
- `farm_data.models.SensorData`
### PlantCatalogSnapshot
این مدل کپی محلی و خواندنی از کاتالوگ گیاه Backend است.
اطلاعاتی که در آن نگه‌داری می‌شود:
- نام گیاه
- توضیحات
- `growth_profile`
- `irrigation_profile`
- `health_profile`
- فصل کاشت، زمان برداشت، فاصله کاشت، کود و ...
این مدل منبع اصلی هوش مصنوعی برای خواندن پروفایل گیاه است، نه relation قدیمی `SensorData.plants`.
### FarmPlantAssignment
این مدل مشخص می‌کند هر مزرعه چه گیاه‌هایی دارد.
فیلدهای مهم:
- `farm`
- `plant`
- `position`
- `stage`
- `metadata`
یعنی هر مزرعه می‌تواند چند گیاه داشته باشد و ترتیب و مرحله رشد هرکدام هم ثبت می‌شود.
### توابع مهم در farm_data/services.py
فایل مهم:
- `farm_data/services.py`
توابع کلیدی:
- `sync_plant_catalog_from_backend`
- `assign_farm_plants_from_backend_ids`
- `get_farm_plant_assignments`
- `get_farm_plant_snapshots`
- `get_primary_plant_snapshot`
- `get_farm_plant_snapshot_by_name`
- `clone_snapshot_as_runtime_plant`
- `get_runtime_plant_for_farm`
- `list_runtime_plants_for_farm`
### جریان داده در farm_data
1. کاتالوگ گیاه از Backend خوانده می‌شود و داخل `PlantCatalogSnapshot` ذخیره می‌شود.
2. گیاه‌های انتخاب‌شده‌ی هر مزرعه با `FarmPlantAssignment` ثبت می‌شوند.
3. اگر سرویس شبیه‌سازی یک `plant_name` مشخص بگیرد، همان گیاه از assignmentها پیدا می‌شود.
4. اگر `plant_name` ارسال نشود، گیاه اول مزرعه به عنوان پیش‌فرض انتخاب می‌شود.
### Runtime Plant
تابع `get_runtime_plant_for_farm` یک snapshot را به یک object سبک runtime تبدیل می‌کند تا downstream serviceها بدون وابستگی مستقیم به مدل DB از آن استفاده کنند.
این object شامل فیلدهایی مثل:
- `name`
- `growth_profile`
- `irrigation_profile`
- `health_profile`
- `planting_season`
- `harvest_time`
است.
## 3) ورود گیاه به crop_simulation
فایل‌های مهم:
- `crop_simulation/services.py`
- `crop_simulation/growth_simulation.py`
- `crop_simulation/harvest_prediction.py`
- `crop_simulation/yield_prediction.py`
### build_simulation_payload_from_farm
مهم‌ترین نقطه اتصال بین `farm_data` و `crop_simulation` این تابع است:
- `crop_simulation.services.build_simulation_payload_from_farm`
این تابع:
1. مزرعه را با `get_canonical_farm_record` پیدا می‌کند.
2. گیاه را با `get_runtime_plant_for_farm` resolve می‌کند.
3. snapshot هوش مصنوعی مزرعه را می‌خواند.
4. weather, soil, site_parameters را می‌سازد.
5. از پروفایل گیاه، `crop_parameters` و در صورت وجود `agromanagement` پیش‌فرض را استخراج می‌کند.
خروجی این تابع شامل این بخش‌هاست:
- `plant`
- `runtime_plants`
- `weather`
- `soil`
- `site_parameters`
- `crop_parameters`
- `agromanagement`
یعنی تمام چیزی که موتور شبیه‌سازی لازم دارد.
### استفاده در Growth Simulation
در `crop_simulation/growth_simulation.py` اگر `farm_uuid` داده شود:
- `build_growth_context` از `build_simulation_payload_from_farm` استفاده می‌کند.
- گیاه انتخاب‌شده وارد context می‌شود.
- سپس شبیه‌سازی PCSE یا fallback projection روی همان گیاه اجرا می‌شود.
### استفاده در Harvest Prediction
در `crop_simulation/harvest_prediction.py` اگر `plant_name` ارسال نشود:
- سرویس با `get_runtime_plant_for_farm` گیاه پیش‌فرض مزرعه را پیدا می‌کند.
- سپس از همان گیاه برای محاسبه‌ی GDD و پیش‌بینی برداشت استفاده می‌شود.
### استفاده در Yield Prediction
در `crop_simulation/yield_prediction.py`:
- سرویس chart فعلی مزرعه را صدا می‌زند.
- chart هم قبلاً گیاه را از مسیر canonical مزرعه resolve کرده است.
- بنابراین yield همیشه روی یک گیاه مشخص از assignmentهای مزرعه محاسبه می‌شود.
## 4) نقش serializerها در resolve کردن نام گیاه
فایل مهم:
- `crop_simulation/serializers.py`
کلاس مهم:
- `PlantNameAliasMixin`
این mixin:
- `plant_name`
- `crop`
- `crop_name`
را قبول می‌کند و با `apps.get_app_config("plant").resolve_plant_name(...)` آن را canonical می‌کند.
پس حتی اگر کلاینت نام گیاه را با alias بفرستد، سرویس شبیه‌سازی با نام استاندارد کار می‌کند.
## 5) ارتباط location_data با گیاه‌ها
فایل مهم:
- `location_data/cluster_recommendation.py`
تابع اصلی:
- `build_cluster_crop_recommendations`
این تابع ارتباط بین کلاسترهای KMeans و گیاه‌های مزرعه را می‌سازد.
### ورودی
- `farm_uuid`
### کارهایی که انجام می‌دهد
1. مزرعه را از `farm_data` پیدا می‌کند.
2. لیست گیاه‌های ثبت‌شده را با `get_farm_plant_assignments` می‌خواند.
3. snapshot کلاسترهای `location_data` را می‌گیرد.
4. برای هر گیاه ثبت‌شده، با `build_simulation_payload_from_farm` یک payload پایه می‌سازد.
5. برای هر کلاستر:
- متریک‌های همان کلاستر مثل `ndvi`, `ndwi`, `soil_vv`, `soil_vv_db` را جمع می‌کند.
- پارامترهای soil/site را با داده همان کلاستر override می‌کند.
- برای تک‌تک گیاه‌های مزرعه شبیه‌سازی اجرا می‌کند.
- خروجی‌ها را بر اساس `yield_estimate` رتبه‌بندی می‌کند.
6. بهترین گیاه را به عنوان `suggested_plant` برمی‌گرداند.
### نتیجه
`location_data` خودش مرجع گیاه نیست؛ فقط:
- گیاه‌ها را از `farm_data`
- نام canonical را از `plant`
- منطق شبیه‌سازی را از `crop_simulation`
می‌گیرد و روی داده‌های کلاستر اعمال می‌کند.
## 6) ترتیب مسئولیت‌ها
برای جلوگیری از ابهام، مسئولیت هر بخش این است:
- `plant`:
- canonical name
- alias resolving
- `farm_data`:
- snapshot گیاه
- assignment گیاه به مزرعه
- تبدیل snapshot به runtime plant
- `crop_simulation`:
- ساخت payload شبیه‌سازی از مزرعه و گیاه
- اجرای شبیه‌سازی رشد، عملکرد و برداشت
- `location_data`:
- خواندن کلاسترهای KMeans
- مقایسه گیاه‌های مزرعه برای هر کلاستر
- پیشنهاد گیاه برای sub-block
## 7) نکات مهم طراحی
- relation قدیمی `SensorData.plants` مسیر legacy است و منبع canonical نیست.
- مسیر canonical برای گیاه‌های مزرعه، `FarmPlantAssignment` و `PlantCatalogSnapshot` است.
- سرویس‌های شبیه‌سازی نباید مستقیم از `plant.Plant` برای گیاه مزرعه استفاده کنند؛ مسیر درست، `farm_data.services.get_runtime_plant_for_farm` است.
- اگر `plant_name` صریح داده نشود، معمولاً گیاه اول assignmentهای مزرعه انتخاب می‌شود.
- اگر چند گیاه برای مزرعه ثبت شده باشد، endpoint پیشنهاد کلاستر همه‌ی آن‌ها را compare می‌کند.
## 8) خلاصه جریان end-to-end
جریان کامل به این صورت است:
1. Backend plant catalog -> `PlantCatalogSnapshot`
2. farm selected plants -> `FarmPlantAssignment`
3. client request with `farm_uuid`
4. farm -> runtime plant resolution
5. runtime plant + farm metrics -> simulation payload
6. simulation payload -> PCSE/projection
7. cluster metrics + plant candidates -> recommended crop per cluster
## 9) فایل‌های کلیدی برای مرور سریع
- `plant/apps.py`
- `farm_data/models.py`
- `farm_data/services.py`
- `crop_simulation/services.py`
- `crop_simulation/growth_simulation.py`
- `crop_simulation/harvest_prediction.py`
- `crop_simulation/yield_prediction.py`
- `location_data/cluster_recommendation.py`
+41 -6
View File
@@ -2,6 +2,33 @@
set -e set -e
PROXYCHAINS_CONFIG_FILE="${PROXYCHAINS_CONFIG_FILE:-/etc/proxychains.conf}" PROXYCHAINS_CONFIG_FILE="${PROXYCHAINS_CONFIG_FILE:-/etc/proxychains.conf}"
OPENEO_VERIFY_ON_STARTUP="${OPENEO_VERIFY_ON_STARTUP:-1}"
disable_proxy_mode() {
reason="$1"
echo "Proxy support disabled: ${reason}" >&2
ENABLE_PROXYCHAINS=0
export ENABLE_PROXYCHAINS
export OPENEO_PROXY_URL=""
export OPENEO_VERIFY_ON_STARTUP=0
}
proxy_endpoint_reachable() {
proxy_host="$1"
proxy_port="$2"
python - "$proxy_host" "$proxy_port" <<'PY'
import socket
import sys
host = sys.argv[1]
port = int(sys.argv[2])
try:
with socket.create_connection((host, port), timeout=2):
sys.exit(0)
except OSError:
sys.exit(1)
PY
}
setup_proxychains() { setup_proxychains() {
if [ "${ENABLE_PROXYCHAINS}" != "1" ]; then if [ "${ENABLE_PROXYCHAINS}" != "1" ]; then
@@ -10,8 +37,8 @@ setup_proxychains() {
fi fi
if ! command -v proxychains4 >/dev/null 2>&1; then if ! command -v proxychains4 >/dev/null 2>&1; then
echo "proxychains4 is not installed but ENABLE_PROXYCHAINS=1 was set." >&2 disable_proxy_mode "proxychains4 is not installed but ENABLE_PROXYCHAINS=1 was set."
exit 1 return 0
fi fi
proxy_type="${PROXYCHAINS_PROXY_TYPE:-socks4}" proxy_type="${PROXYCHAINS_PROXY_TYPE:-socks4}"
@@ -21,8 +48,13 @@ setup_proxychains() {
proxy_ip="$(getent hosts "${proxy_host}" | awk 'NR==1 {print $1}')" proxy_ip="$(getent hosts "${proxy_host}" | awk 'NR==1 {print $1}')"
if [ -z "${proxy_ip}" ]; then if [ -z "${proxy_ip}" ]; then
echo "Could not resolve proxy host: ${proxy_host}" >&2 disable_proxy_mode "could not resolve proxy host ${proxy_host}"
exit 1 return 0
fi
if ! proxy_endpoint_reachable "${proxy_host}" "${proxy_port}"; then
disable_proxy_mode "proxy ${proxy_host}:${proxy_port} is unreachable"
return 0
fi fi
cat > "${PROXYCHAINS_CONFIG_FILE}" <<EOF cat > "${PROXYCHAINS_CONFIG_FILE}" <<EOF
@@ -88,18 +120,21 @@ if [ "${SKIP_MIGRATE}" != "1" ]; then
fi fi
if [ -n "${DEVELOP}" ] && [ "${SKIP_MIGRATE}" != "1" ]; then if [ -n "${DEVELOP}" ] && [ "${SKIP_MIGRATE}" != "1" ]; then
echo "DEVELOP is set. Seeding demo location_data, plant, weather_data, and farm_data..." echo "DEVELOP is set. Seeding demo location_data, weather_data, and farm_data..."
run_cmd python manage.py seed_location_data run_cmd python manage.py seed_location_data
run_cmd python manage.py seed_plants
run_cmd python manage.py seed_weather_data run_cmd python manage.py seed_weather_data
run_cmd python manage.py seed_farm_data run_cmd python manage.py seed_farm_data
echo "Demo seeders done." echo "Demo seeders done."
fi fi
if [ "${OPENEO_VERIFY_ON_STARTUP}" = "1" ]; then
echo "Checking openEO authentication..." echo "Checking openEO authentication..."
if ! run_cmd python manage.py verify_openeo_auth --skip-if-unconfigured; then if ! run_cmd python manage.py verify_openeo_auth --skip-if-unconfigured; then
echo "openEO authentication failed; continuing startup with degraded openEO-dependent features." >&2 echo "openEO authentication failed; continuing startup with degraded openEO-dependent features." >&2
fi fi
else
echo "Skipping openEO authentication during startup."
fi
echo "Collecting static files..." echo "Collecting static files..."
run_cmd python manage.py collectstatic --noinput run_cmd python manage.py collectstatic --noinput
+2 -10
View File
@@ -7,7 +7,6 @@ import uuid
import warnings import warnings
from django.conf import settings from django.conf import settings
from django.apps import apps
from django.db import transaction from django.db import transaction
from django.utils.dateparse import parse_datetime from django.utils.dateparse import parse_datetime
from django.utils import timezone from django.utils import timezone
@@ -217,16 +216,9 @@ def reconcile_legacy_farm_plants_relation(
farm: SensorData, farm: SensorData,
snapshots: list[PlantCatalogSnapshot] | None = None, snapshots: list[PlantCatalogSnapshot] | None = None,
) -> None: ) -> None:
snapshots = list(snapshots if snapshots is not None else get_farm_plant_snapshots(farm)) # AI no longer mirrors canonical plant rows locally; the legacy relation is cleared
Plant = apps.get_model("plant", "Plant") # so downstream services cannot accidentally read stale plant data.
if Plant is None:
return
names = [snapshot.name for snapshot in snapshots if snapshot and snapshot.name]
if not names:
farm.plants.clear() farm.plants.clear()
return
legacy_plants = list(Plant.objects.filter(name__in=names).order_by("name", "id"))
farm.plants.set(legacy_plants)
def get_canonical_farm_record(farm_uuid: str) -> SensorData | None: def get_canonical_farm_record(farm_uuid: str) -> SensorData | None:
+5 -2
View File
@@ -86,8 +86,11 @@ class FarmDetailApiTests(TestCase):
self.assertEqual([plant.name for plant in list_runtime_plants_for_farm(farm)], ["خیار", "گوجه‌فرنگی"]) self.assertEqual([plant.name for plant in list_runtime_plants_for_farm(farm)], ["خیار", "گوجه‌فرنگی"])
self.assertEqual(get_runtime_plant_for_farm(farm).name, "خیار") self.assertEqual(get_runtime_plant_for_farm(farm).name, "خیار")
def test_assignment_sync_reconciles_legacy_relation_for_transition(self): def test_assignment_sync_uses_backend_snapshots_as_canonical_source(self):
self.assertEqual(list(self.farm.plants.values_list("name", flat=True)), ["خیار", "گوجه‌فرنگی"]) self.assertEqual(
list(self.farm.plant_assignments.values_list("plant__name", flat=True)),
["خیار", "گوجه‌فرنگی"],
)
def test_runtime_plant_lookup_resolves_by_name_from_canonical_assignments(self): def test_runtime_plant_lookup_resolves_by_name_from_canonical_assignments(self):
farm = get_canonical_farm_record(str(self.farm_uuid)) farm = get_canonical_farm_record(str(self.farm_uuid))
+30 -3
View File
@@ -7,6 +7,7 @@ import uuid
from django.test import TransactionTestCase from django.test import TransactionTestCase
from rest_framework.test import APIClient from rest_framework.test import APIClient
from farm_data.models import PlantCatalogSnapshot
from location_data.models import NdviObservation, SoilLocation from location_data.models import NdviObservation, SoilLocation
from weather.models import WeatherForecast from weather.models import WeatherForecast
@@ -40,6 +41,7 @@ class IntegrationAPITestCase(TransactionTestCase):
def setUp(self) -> None: def setUp(self) -> None:
super().setUp() super().setUp()
self.client = APIClient() self.client = APIClient()
self._next_backend_plant_id = 100
self.primary_boundary = square_boundary(self.primary_lat, self.primary_lon) self.primary_boundary = square_boundary(self.primary_lat, self.primary_lon)
self.primary_location = self.create_complete_location( self.primary_location = self.create_complete_location(
lat=self.primary_lat, lat=self.primary_lat,
@@ -55,6 +57,7 @@ class IntegrationAPITestCase(TransactionTestCase):
lat: float, lat: float,
lon: float, lon: float,
boundary: dict[str, Any] | None = None, boundary: dict[str, Any] | None = None,
**_ignored: Any,
) -> SoilLocation: ) -> SoilLocation:
location = SoilLocation.objects.create( location = SoilLocation.objects.create(
latitude=f"{lat:.6f}", latitude=f"{lat:.6f}",
@@ -126,22 +129,46 @@ class IntegrationAPITestCase(TransactionTestCase):
return response.json()["data"] return response.json()["data"]
def create_plant_via_api(self, name: str, **overrides: Any) -> dict[str, Any]: def create_plant_via_api(self, name: str, **overrides: Any) -> dict[str, Any]:
backend_plant_id = int(overrides.pop("id", self._next_backend_plant_id))
self._next_backend_plant_id = max(self._next_backend_plant_id, backend_plant_id + 1)
payload = { payload = {
"id": backend_plant_id,
"name": name, "name": name,
"icon": "leaf",
"light": "full sun", "light": "full sun",
"watering": "every 2 days", "watering": "every 2 days",
"soil": "loamy", "soil": "loamy",
"temperature": "20-28C", "temperature": "20-28C",
"growth_stage": "vegetative", "growth_stage": "vegetative",
"growth_stages": ["vegetative"],
"planting_season": "spring", "planting_season": "spring",
"harvest_time": "90 days", "harvest_time": "90 days",
"spacing": "50 cm", "spacing": "50 cm",
"fertilizer": "balanced NPK", "fertilizer": "balanced NPK",
} }
payload.update(overrides) payload.update(overrides)
response = self.client.post("/api/plants/", data=payload, format="json") if "growth_stages" not in overrides:
self.assertEqual(response.status_code, 201, response.json()) payload["growth_stages"] = [payload["growth_stage"]] if payload.get("growth_stage") else []
return response.json()["data"] response = self.client.post("/api/farm-data/plants/sync/", data=[payload], format="json")
self.assertEqual(response.status_code, 200, response.json())
snapshot = PlantCatalogSnapshot.objects.get(backend_plant_id=backend_plant_id)
return {
"id": snapshot.backend_plant_id,
"backend_plant_id": snapshot.backend_plant_id,
"name": snapshot.name,
"icon": snapshot.icon,
"light": snapshot.light,
"watering": snapshot.watering,
"soil": snapshot.soil,
"temperature": snapshot.temperature,
"growth_stage": snapshot.growth_stage,
"growth_stages": list(snapshot.growth_stages or []),
"planting_season": snapshot.planting_season,
"harvest_time": snapshot.harvest_time,
"spacing": snapshot.spacing,
"fertilizer": snapshot.fertilizer,
}
def create_sensor_parameter_via_api(self, **overrides: Any) -> dict[str, Any]: def create_sensor_parameter_via_api(self, **overrides: Any) -> dict[str, Any]:
payload = { payload = {
+33 -75
View File
@@ -5,9 +5,8 @@ from unittest.mock import patch
from django.test import override_settings from django.test import override_settings
from farm_data.models import ParameterUpdateLog, SensorData, SensorParameter from farm_data.models import ParameterUpdateLog, PlantCatalogSnapshot, SensorData, SensorParameter
from integration_tests.base import IntegrationAPITestCase from integration_tests.base import IntegrationAPITestCase
from plant.models import Plant
@override_settings(ROOT_URLCONF="config.urls") @override_settings(ROOT_URLCONF="config.urls")
@@ -44,88 +43,44 @@ class FarmManagementJourneyTests(IntegrationAPITestCase):
tomato = self.create_plant_via_api("Tomato") tomato = self.create_plant_via_api("Tomato")
cucumber = self.create_plant_via_api("Cucumber", watering="daily") cucumber = self.create_plant_via_api("Cucumber", watering="daily")
removable_plant = self.create_plant_via_api("Remove Plant")
plants_list_response = self.client.get("/api/plants/")
self.assertEqual(plants_list_response.status_code, 200)
returned_names = {item["name"] for item in plants_list_response.json()["data"]}
self.assertTrue({"Tomato", "Cucumber", "Remove Plant"}.issubset(returned_names))
plant_catalog = self.create_plant_via_api( plant_catalog = self.create_plant_via_api(
"Pepper", "Pepper",
growth_stage="", growth_stage="",
icon="sprout", icon="",
)
Plant.objects.filter(pk=plant_catalog["id"]).update(growth_stage="", icon="")
plant_names_response = self.client.get("/api/plants/names/")
self.assertEqual(plant_names_response.status_code, 200)
plant_names_payload = {
item["name"]: item for item in plant_names_response.json()["data"]
}
self.assertEqual(plant_names_payload["Pepper"]["icon"], "leaf")
self.assertEqual(
plant_names_payload["Pepper"]["growth_stages"],
["initial", "vegetative", "flowering", "fruiting", "maturity"],
)
pepper = Plant.objects.get(pk=plant_catalog["id"])
self.assertEqual(
pepper.growth_stage,
"initial, vegetative, flowering, fruiting, maturity",
) )
pepper = PlantCatalogSnapshot.objects.get(backend_plant_id=plant_catalog["id"])
self.assertEqual(pepper.icon, "leaf") self.assertEqual(pepper.icon, "leaf")
self.assertEqual(pepper.growth_stages, [])
plant_patch_response = self.client.patch( updated_tomato = self.create_plant_via_api(
f"/api/plants/{tomato['id']}/", "Tomato",
data={"growth_stage": "flowering", "watering": "daily"}, id=tomato["id"],
format="json", growth_stage="flowering",
growth_stages=["flowering"],
watering="daily",
) )
self.assertEqual(plant_patch_response.status_code, 200) self.assertEqual(updated_tomato["growth_stage"], "flowering")
self.assertEqual(Plant.objects.get(pk=tomato["id"]).growth_stage, "flowering") self.assertEqual(
PlantCatalogSnapshot.objects.get(backend_plant_id=tomato["id"]).growth_stage,
plant_put_response = self.client.put( "flowering",
f"/api/plants/{cucumber['id']}/",
data={
"name": "Cucumber",
"light": "full sun",
"watering": "every day",
"soil": "sandy loam",
"temperature": "18-30C",
"growth_stage": "fruiting",
"planting_season": "spring",
"harvest_time": "70 days",
"spacing": "40 cm",
"fertilizer": "potassium rich",
},
format="json",
) )
self.assertEqual(plant_put_response.status_code, 200)
with patch( updated_cucumber = self.create_plant_via_api(
"plant.views.fetch_plant_info_from_api", "Cucumber",
return_value={ id=cucumber["id"],
"name": "Tomato", light="full sun",
"light": "full sun", watering="every day",
"watering": "daily", soil="sandy loam",
"soil": "loamy", temperature="18-30C",
"temperature": "20-28C", growth_stage="fruiting",
"growth_stage": "flowering", growth_stages=["fruiting"],
"planting_season": "spring", planting_season="spring",
"harvest_time": "90 days", harvest_time="70 days",
"spacing": "50 cm", spacing="40 cm",
"fertilizer": "balanced NPK", fertilizer="potassium rich",
},
):
plant_fetch_response = self.client.post(
"/api/plants/fetch-info/",
data={"name": "Tomato"},
format="json",
) )
self.assertEqual(plant_fetch_response.status_code, 200) self.assertEqual(updated_cucumber["watering"], "every day")
self.assertEqual(plant_fetch_response.json()["data"]["name"], "Tomato")
plant_delete_response = self.client.delete(f"/api/plants/{removable_plant['id']}/")
self.assertEqual(plant_delete_response.status_code, 200)
self.assertFalse(Plant.objects.filter(pk=removable_plant["id"]).exists())
farm_uuid = uuid.uuid4() farm_uuid = uuid.uuid4()
created_farm = self.upsert_farm_via_api( created_farm = self.upsert_farm_via_api(
@@ -147,7 +102,7 @@ class FarmManagementJourneyTests(IntegrationAPITestCase):
self.assertEqual(created_farm["farm_uuid"], str(farm_uuid)) self.assertEqual(created_farm["farm_uuid"], str(farm_uuid))
farm_record = SensorData.objects.get(farm_uuid=farm_uuid) farm_record = SensorData.objects.get(farm_uuid=farm_uuid)
self.assertCountEqual( self.assertCountEqual(
list(farm_record.plants.values_list("id", flat=True)), list(farm_record.plant_assignments.values_list("plant__backend_plant_id", flat=True)),
[tomato["id"], cucumber["id"]], [tomato["id"], cucumber["id"]],
) )
self.assertEqual(farm_record.irrigation_method_id, primary_method["id"]) self.assertEqual(farm_record.irrigation_method_id, primary_method["id"])
@@ -172,7 +127,10 @@ class FarmManagementJourneyTests(IntegrationAPITestCase):
farm_record.refresh_from_db() farm_record.refresh_from_db()
self.assertEqual(farm_record.irrigation_method_id, backup_method["id"]) self.assertEqual(farm_record.irrigation_method_id, backup_method["id"])
self.assertCountEqual(list(farm_record.plants.values_list("id", flat=True)), [tomato["id"]]) self.assertCountEqual(
list(farm_record.plant_assignments.values_list("plant__backend_plant_id", flat=True)),
[tomato["id"]],
)
self.assertEqual(farm_record.sensor_payload["sensor-7-1"]["soil_temperature"], 23.4) self.assertEqual(farm_record.sensor_payload["sensor-7-1"]["soil_temperature"], 23.4)
self.assertEqual(farm_record.sensor_payload["sensor-7-1"]["soil_moisture"], 44.0) self.assertEqual(farm_record.sensor_payload["sensor-7-1"]["soil_moisture"], 44.0)
self.assertEqual(farm_record.sensor_payload["sensor-7-1"]["nitrogen"], 19.5) self.assertEqual(farm_record.sensor_payload["sensor-7-1"]["nitrogen"], 19.5)
@@ -11,6 +11,7 @@ from django.test import override_settings
from crop_simulation.models import SimulationRun, SimulationScenario from crop_simulation.models import SimulationRun, SimulationScenario
from farm_alerts.models import FarmAlertNotification from farm_alerts.models import FarmAlertNotification
from farm_data.models import SensorData from farm_data.models import SensorData
from farm_data.services import assign_farm_plants_from_backend_ids
from integration_tests.base import IntegrationAPITestCase, square_boundary from integration_tests.base import IntegrationAPITestCase, square_boundary
@@ -79,7 +80,7 @@ class ReportingAndAiJourneyTests(IntegrationAPITestCase):
} }
}, },
) )
neighbor_sensor.plants.set([self.primary_plant["id"]]) assign_farm_plants_from_backend_ids(neighbor_sensor, [self.primary_plant["id"]])
def test_reporting_endpoints_read_from_persisted_farm_context(self) -> None: def test_reporting_endpoints_read_from_persisted_farm_context(self) -> None:
soil_response = self.client.get( soil_response = self.client.get(
+101 -2
View File
@@ -2,6 +2,8 @@ from __future__ import annotations
from copy import deepcopy from copy import deepcopy
from dataclasses import dataclass from dataclasses import dataclass
from datetime import date, datetime
from decimal import Decimal
from typing import Any from typing import Any
from django.db.models import Avg from django.db.models import Avg
@@ -9,7 +11,7 @@ from django.db.models import Avg
from crop_simulation.growth_simulation import GrowthSimulationContext, _run_projection_engine from crop_simulation.growth_simulation import GrowthSimulationContext, _run_projection_engine
from crop_simulation.services import PcseSimulationManager, build_simulation_payload_from_farm from crop_simulation.services import PcseSimulationManager, build_simulation_payload_from_farm
from farm_data.services import get_canonical_farm_record, get_farm_plant_assignments from farm_data.services import get_canonical_farm_record, get_farm_plant_assignments
from .models import AnalysisGridObservation, RemoteSensingClusterBlock from .models import AnalysisGridObservation, RemoteSensingClusterBlock, RemoteSensingSubdivisionResult
from .satellite_snapshot import build_location_block_satellite_snapshots from .satellite_snapshot import build_location_block_satellite_snapshots
@@ -70,6 +72,23 @@ def _clamp(value: float, minimum: float, maximum: float) -> float:
return max(minimum, min(value, maximum)) return max(minimum, min(value, maximum))
def _json_safe(value: Any) -> Any:
if isinstance(value, Decimal):
return float(value)
if isinstance(value, datetime):
formatted = value.isoformat()
if formatted.endswith("+00:00"):
return formatted[:-6] + "Z"
return formatted
if isinstance(value, date):
return value.isoformat()
if isinstance(value, dict):
return {str(key): _json_safe(item) for key, item in value.items()}
if isinstance(value, (list, tuple)):
return [_json_safe(item) for item in value]
return value
def _build_cluster_entries( def _build_cluster_entries(
snapshots: list[dict[str, Any]], snapshots: list[dict[str, Any]],
*, *,
@@ -353,6 +372,21 @@ def build_cluster_crop_recommendations(farm_uuid: str) -> dict[str, Any]:
if not cluster_entries: if not cluster_entries:
raise ClusterRecommendationNotFound("برای این مزرعه هنوز کلاستر قابل استفاده پیدا نشد.") raise ClusterRecommendationNotFound("برای این مزرعه هنوز کلاستر قابل استفاده پیدا نشد.")
recommendation_result_ids = sorted(
{
int(cluster_block.result_id)
for cluster_block in cluster_blocks_by_uuid.values()
if cluster_block.result_id
}
)
cached_payload = _load_cached_cluster_recommendations(
farm_uuid=str(farm.farm_uuid),
result_ids=recommendation_result_ids,
plant_assignments=plant_assignments,
)
if cached_payload is not None:
return cached_payload
base_payloads: dict[str, dict[str, Any]] = {} base_payloads: dict[str, dict[str, Any]] = {}
for assignment in plant_assignments: for assignment in plant_assignments:
plant_name = str(getattr(assignment.plant, "name", "") or "").strip() plant_name = str(getattr(assignment.plant, "name", "") or "").strip()
@@ -392,7 +426,7 @@ def build_cluster_crop_recommendations(farm_uuid: str) -> dict[str, Any]:
} }
) )
return { payload = {
"farm_uuid": str(farm.farm_uuid), "farm_uuid": str(farm.farm_uuid),
"location_id": location.id, "location_id": location.id,
"evaluated_plant_count": len(base_payloads), "evaluated_plant_count": len(base_payloads),
@@ -413,3 +447,68 @@ def build_cluster_crop_recommendations(farm_uuid: str) -> dict[str, Any]:
"snapshot_block_count": len(snapshots), "snapshot_block_count": len(snapshots),
}, },
} }
_store_cached_cluster_recommendations(
farm_uuid=str(farm.farm_uuid),
result_ids=recommendation_result_ids,
plant_assignments=plant_assignments,
payload=payload,
)
return payload
def _build_assignment_cache_signature(plant_assignments: list[Any]) -> list[dict[str, Any]]:
return [
{
"plant_id": getattr(assignment.plant, "backend_plant_id", None),
"position": int(assignment.position or 0),
"stage": str(assignment.stage or ""),
}
for assignment in plant_assignments
]
def _load_cached_cluster_recommendations(
*,
farm_uuid: str,
result_ids: list[int],
plant_assignments: list[Any],
) -> dict[str, Any] | None:
if not result_ids:
return None
cache_key = f"farm::{farm_uuid}"
assignment_signature = _build_assignment_cache_signature(plant_assignments)
for result in RemoteSensingSubdivisionResult.objects.filter(id__in=result_ids):
metadata = dict(result.metadata or {})
recommendation_cache = dict(metadata.get("cluster_recommendations") or {})
cached_entry = recommendation_cache.get(cache_key)
if not isinstance(cached_entry, dict):
continue
if cached_entry.get("assignment_signature") != assignment_signature:
continue
payload = cached_entry.get("payload")
if isinstance(payload, dict):
return payload
return None
def _store_cached_cluster_recommendations(
*,
farm_uuid: str,
result_ids: list[int],
plant_assignments: list[Any],
payload: dict[str, Any],
) -> None:
if not result_ids:
return
cache_key = f"farm::{farm_uuid}"
assignment_signature = _build_assignment_cache_signature(plant_assignments)
for result in RemoteSensingSubdivisionResult.objects.filter(id__in=result_ids):
metadata = dict(result.metadata or {})
recommendation_cache = dict(metadata.get("cluster_recommendations") or {})
recommendation_cache[cache_key] = {
"assignment_signature": assignment_signature,
"payload": _json_safe(payload),
}
metadata["cluster_recommendations"] = recommendation_cache
result.metadata = metadata
result.save(update_fields=["metadata", "updated_at"])
@@ -8,6 +8,7 @@ from rest_framework.test import APIClient
from location_data.models import ( from location_data.models import (
AnalysisGridCell, AnalysisGridCell,
AnalysisGridObservation,
BlockSubdivision, BlockSubdivision,
RemoteSensingClusterBlock, RemoteSensingClusterBlock,
RemoteSensingRun, RemoteSensingRun,
@@ -193,3 +194,61 @@ class RemoteSensingClusterBlockLiveApiTests(TestCase):
expected_start = expected_end - timedelta(days=6) expected_start = expected_end - timedelta(days=6)
self.assertEqual(kwargs["temporal_start"], expected_start) self.assertEqual(kwargs["temporal_start"], expected_start)
self.assertEqual(kwargs["temporal_end"], expected_end) self.assertEqual(kwargs["temporal_end"], expected_end)
@patch("location_data.views.compute_remote_sensing_metrics")
def test_get_cluster_block_live_uses_database_cache_for_matching_window(self, compute_mock):
cell_1 = AnalysisGridCell.objects.create(
soil_location=self.location,
block_subdivision=self.subdivision,
block_code="block-1",
cell_code="cell-1",
chunk_size_sqm=900,
geometry=self.boundary,
centroid_lat="35.689250",
centroid_lon="51.389250",
)
cell_2 = AnalysisGridCell.objects.create(
soil_location=self.location,
block_subdivision=self.subdivision,
block_code="block-1",
cell_code="cell-2",
chunk_size_sqm=900,
geometry=self.boundary,
centroid_lat="35.689750",
centroid_lon="51.389750",
)
AnalysisGridObservation.objects.create(
cell=cell_1,
run=self.run,
temporal_start=date(2025, 1, 1),
temporal_end=date(2025, 1, 31),
ndvi=0.44,
ndwi=0.12,
soil_vv=0.09,
soil_vv_db=-11.0,
metadata={"backend_name": "openeo"},
)
AnalysisGridObservation.objects.create(
cell=cell_2,
run=self.run,
temporal_start=date(2025, 1, 1),
temporal_end=date(2025, 1, 31),
ndvi=0.64,
ndwi=0.22,
soil_vv=0.19,
soil_vv_db=-7.0,
metadata={"backend_name": "openeo"},
)
response = self.client.get(
f"/remote-sensing/cluster-blocks/{self.cluster_block.uuid}/live/",
data={"temporal_start": "2025-01-01", "temporal_end": "2025-01-31"},
)
self.assertEqual(response.status_code, 200)
payload = response.json()["data"]
self.assertEqual(payload["source"], "database")
self.assertTrue(payload["metadata"]["cache_hit"])
self.assertEqual(payload["summary"]["ndvi_mean"], 0.54)
self.assertEqual(payload["metrics"]["soil_vv_db"], -9.0)
compute_mock.assert_not_called()
@@ -279,3 +279,37 @@ class RemoteSensingClusterRecommendationApiTests(TestCase):
response.json()["msg"], response.json()["msg"],
"برای این مزرعه هنوز هیچ گیاهی در farm_data ثبت نشده است.", "برای این مزرعه هنوز هیچ گیاهی در farm_data ثبت نشده است.",
) )
@patch("location_data.cluster_recommendation._simulate_candidate")
def test_cluster_recommendations_use_cached_payload_for_same_farm_assignments(self, simulate_mock):
simulate_mock.return_value = (
{
"engine": "pcse",
"model_name": "Wofost81_NWLP_CWB_CNB",
"metrics": {
"yield_estimate": 100.0,
"biomass": 200.0,
"max_lai": 3.1,
},
},
None,
)
first_response = self.client.get(
"/remote-sensing/cluster-recommendations/",
data={"farm_uuid": str(self.farm.farm_uuid)},
)
self.assertEqual(first_response.status_code, 200)
self.assertGreater(simulate_mock.call_count, 0)
simulate_mock.reset_mock()
simulate_mock.side_effect = AssertionError("cached recommendations should skip simulation")
second_response = self.client.get(
"/remote-sensing/cluster-recommendations/",
data={"farm_uuid": str(self.farm.farm_uuid)},
)
self.assertEqual(second_response.status_code, 200)
self.assertEqual(first_response.json()["data"], second_response.json()["data"])
simulate_mock.assert_not_called()
+236 -1
View File
@@ -46,7 +46,7 @@ class RemoteSensingApiTests(TestCase):
self.farm = SensorData.objects.create( self.farm = SensorData.objects.create(
farm_uuid="11111111-1111-1111-1111-111111111111", farm_uuid="11111111-1111-1111-1111-111111111111",
center_location=self.location, center_location=self.location,
payload={}, sensor_payload={},
) )
self.temporal_end = timezone.localdate() - timedelta(days=1) self.temporal_end = timezone.localdate() - timedelta(days=1)
self.temporal_start = self.temporal_end - timedelta(days=30) self.temporal_start = self.temporal_end - timedelta(days=30)
@@ -176,6 +176,241 @@ class RemoteSensingApiTests(TestCase):
self.assertEqual(len(payload["cells"]), 1) self.assertEqual(len(payload["cells"]), 1)
self.assertEqual(payload["cells"][0]["cell_code"], "cell-1") self.assertEqual(payload["cells"][0]["cell_code"], "cell-1")
@patch("location_data.views.run_remote_sensing_analysis_task.delay")
def test_post_remote_sensing_reuses_latest_completed_farm_cache_when_window_differs(self, mock_delay):
fallback_start = self.temporal_start - timedelta(days=1)
fallback_end = self.temporal_end - timedelta(days=1)
run = RemoteSensingRun.objects.create(
soil_location=self.location,
block_subdivision=self.subdivision,
block_code="",
chunk_size_sqm=900,
temporal_start=fallback_start,
temporal_end=fallback_end,
status=RemoteSensingRun.STATUS_SUCCESS,
metadata={"farm_uuid": str(self.farm.farm_uuid), "stage": "completed"},
)
cell = AnalysisGridCell.objects.create(
soil_location=self.location,
block_subdivision=self.subdivision,
block_code="",
cell_code="cell-seeded-1",
chunk_size_sqm=900,
geometry=self.boundary,
centroid_lat="35.689500",
centroid_lon="51.389500",
)
AnalysisGridObservation.objects.create(
cell=cell,
run=run,
temporal_start=fallback_start,
temporal_end=fallback_end,
ndvi=0.49,
ndwi=0.17,
soil_vv=0.10,
soil_vv_db=-9.8,
metadata={"backend_name": "openeo"},
)
response = self.client.post(
"/remote-sensing/",
data={"farm_uuid": str(self.farm.farm_uuid), "force_refresh": False},
format="json",
)
self.assertEqual(response.status_code, 200)
payload = response.json()["data"]
self.assertEqual(payload["status"], "success")
self.assertEqual(payload["source"], "database")
self.assertEqual(payload["temporal_extent"]["start_date"], fallback_start.isoformat())
self.assertEqual(payload["temporal_extent"]["end_date"], fallback_end.isoformat())
self.assertEqual(payload["metadata"]["cache_match"], "latest_completed_for_farm")
self.assertEqual(payload["cells"][0]["cell_code"], "cell-seeded-1")
self.assertEqual(payload["run"]["id"], run.id)
self.assertNotIn("task_id", payload)
mock_delay.assert_not_called()
@patch("location_data.views.run_remote_sensing_analysis_task.delay")
def test_post_remote_sensing_returns_cached_results_without_enqueuing(self, mock_delay):
run = RemoteSensingRun.objects.create(
soil_location=self.location,
block_subdivision=self.subdivision,
block_code="",
chunk_size_sqm=900,
temporal_start=self.temporal_start,
temporal_end=self.temporal_end,
status=RemoteSensingRun.STATUS_SUCCESS,
metadata={"farm_uuid": str(self.farm.farm_uuid), "stage": "completed"},
)
cell = AnalysisGridCell.objects.create(
soil_location=self.location,
block_subdivision=self.subdivision,
block_code="",
cell_code="cell-cache-1",
chunk_size_sqm=900,
geometry=self.boundary,
centroid_lat="35.689500",
centroid_lon="51.389500",
)
AnalysisGridObservation.objects.create(
cell=cell,
run=run,
temporal_start=self.temporal_start,
temporal_end=self.temporal_end,
ndvi=0.52,
ndwi=0.18,
soil_vv=0.11,
soil_vv_db=-9.2,
metadata={"backend_name": "openeo"},
)
response = self.client.post(
"/remote-sensing/",
data={"farm_uuid": str(self.farm.farm_uuid), "force_refresh": False},
format="json",
)
self.assertEqual(response.status_code, 200)
payload = response.json()["data"]
self.assertEqual(payload["status"], "success")
self.assertEqual(payload["source"], "database")
self.assertTrue(payload["metadata"]["cache_hit"])
self.assertEqual(payload["cells"][0]["cell_code"], "cell-cache-1")
self.assertEqual(payload["run"]["id"], run.id)
self.assertEqual(payload["run"]["status"], RemoteSensingRun.STATUS_SUCCESS)
self.assertNotIn("task_id", payload)
self.assertEqual(RemoteSensingRun.objects.count(), 1)
mock_delay.assert_not_called()
@patch("location_data.views.run_remote_sensing_analysis_task.delay")
def test_post_remote_sensing_cached_results_do_not_create_status_run(self, mock_delay):
source_run = RemoteSensingRun.objects.create(
soil_location=self.location,
block_subdivision=self.subdivision,
block_code="",
chunk_size_sqm=900,
temporal_start=self.temporal_start,
temporal_end=self.temporal_end,
status=RemoteSensingRun.STATUS_SUCCESS,
metadata={"farm_uuid": str(self.farm.farm_uuid), "stage": "completed"},
)
cell = AnalysisGridCell.objects.create(
soil_location=self.location,
block_subdivision=self.subdivision,
block_code="",
cell_code="cell-status-cache-1",
chunk_size_sqm=900,
geometry=self.boundary,
centroid_lat="35.689500",
centroid_lon="51.389500",
)
AnalysisGridObservation.objects.create(
cell=cell,
run=source_run,
temporal_start=self.temporal_start,
temporal_end=self.temporal_end,
ndvi=0.57,
ndwi=0.19,
soil_vv=0.12,
soil_vv_db=-8.7,
metadata={"backend_name": "openeo"},
)
post_response = self.client.post(
"/remote-sensing/",
data={"farm_uuid": str(self.farm.farm_uuid), "force_refresh": False},
format="json",
)
self.assertEqual(post_response.status_code, 200)
payload = post_response.json()["data"]
self.assertEqual(payload["status"], "success")
self.assertEqual(payload["run"]["id"], source_run.id)
self.assertEqual(payload["summary"]["cell_count"], 1)
self.assertEqual(payload["cells"][0]["cell_code"], "cell-status-cache-1")
self.assertNotIn("task_id", payload)
self.assertEqual(RemoteSensingRun.objects.count(), 1)
mock_delay.assert_not_called()
@patch("location_data.views.run_remote_sensing_analysis_task.delay")
def test_post_remote_sensing_returns_existing_processing_run_without_enqueuing(self, mock_delay):
run = RemoteSensingRun.objects.create(
soil_location=self.location,
block_subdivision=self.subdivision,
block_code="",
chunk_size_sqm=900,
temporal_start=self.temporal_start,
temporal_end=self.temporal_end,
status=RemoteSensingRun.STATUS_PENDING,
metadata={
"farm_uuid": str(self.farm.farm_uuid),
"task_id": "e723ba3e-c53c-401b-b3a0-5f7013c7b401",
"stage": "queued",
},
)
response = self.client.post(
"/remote-sensing/",
data={"farm_uuid": str(self.farm.farm_uuid), "force_refresh": False},
format="json",
)
self.assertEqual(response.status_code, 202)
payload = response.json()["data"]
self.assertEqual(payload["status"], "processing")
self.assertEqual(payload["source"], "processing")
self.assertEqual(payload["run"]["id"], run.id)
mock_delay.assert_not_called()
@patch("location_data.views.run_remote_sensing_analysis_task.delay")
def test_post_remote_sensing_ignores_other_farm_cache_on_same_location(self, mock_delay):
other_farm_uuid = "33333333-3333-3333-3333-333333333333"
mock_delay.return_value = SimpleNamespace(id="f723ba3e-c53c-401b-b3a0-5f7013c7b402")
other_run = RemoteSensingRun.objects.create(
soil_location=self.location,
block_subdivision=self.subdivision,
block_code="",
chunk_size_sqm=900,
temporal_start=self.temporal_start,
temporal_end=self.temporal_end,
status=RemoteSensingRun.STATUS_SUCCESS,
metadata={"farm_uuid": other_farm_uuid, "stage": "completed"},
)
other_cell = AnalysisGridCell.objects.create(
soil_location=self.location,
block_subdivision=self.subdivision,
block_code="",
cell_code="cell-other-farm",
chunk_size_sqm=900,
geometry=self.boundary,
centroid_lat="35.689510",
centroid_lon="51.389510",
)
AnalysisGridObservation.objects.create(
cell=other_cell,
run=other_run,
temporal_start=self.temporal_start,
temporal_end=self.temporal_end,
ndvi=0.66,
ndwi=0.31,
soil_vv=0.15,
soil_vv_db=-8.1,
metadata={"backend_name": "openeo"},
)
response = self.client.post(
"/remote-sensing/",
data={"farm_uuid": str(self.farm.farm_uuid), "force_refresh": False},
format="json",
)
self.assertEqual(response.status_code, 202)
payload = response.json()["data"]
self.assertEqual(payload["status"], "processing")
self.assertEqual(RemoteSensingRun.objects.count(), 2)
self.assertNotEqual(payload["run"]["id"], other_run.id)
mock_delay.assert_called_once()
def test_run_status_endpoint_returns_normalized_status(self): def test_run_status_endpoint_returns_normalized_status(self):
run = RemoteSensingRun.objects.create( run = RemoteSensingRun.objects.create(
soil_location=self.location, soil_location=self.location,
+436 -100
View File
@@ -1,6 +1,7 @@
from datetime import timedelta from datetime import timedelta
from types import SimpleNamespace from types import SimpleNamespace
from typing import Any from typing import Any
from uuid import uuid4
from django.apps import apps from django.apps import apps
from django.core.paginator import EmptyPage, Paginator from django.core.paginator import EmptyPage, Paginator
@@ -416,9 +417,16 @@ class RemoteSensingAnalysisView(APIView):
@extend_schema( @extend_schema(
tags=["Location Data"], tags=["Location Data"],
summary="اجرای async تحلیل سنجش‌ازدور و subdivision داده‌محور", summary="اجرای async تحلیل سنجش‌ازدور و subdivision داده‌محور",
description="برای location موجود، pipeline کامل grid + openEO + observation persistence + KMeans clustering در Celery صف می‌شود و sync اجرا نمی‌شود.", description=(
"اگر خروجی cache شده برای مزرعه موجود باشد، همان داده مستقیم برگردانده می‌شود. "
"در غیر این صورت pipeline کامل grid + openEO + observation persistence + KMeans clustering در Celery صف می‌شود."
),
request=RemoteSensingFarmRequestSerializer, request=RemoteSensingFarmRequestSerializer,
responses={ responses={
200: build_response(
RemoteSensingEnvelopeSerializer,
"خروجی cache شده remote sensing بدون enqueue کردن Celery بازگردانده شد.",
),
202: build_response( 202: build_response(
RemoteSensingQueuedEnvelopeSerializer, RemoteSensingQueuedEnvelopeSerializer,
"درخواست تحلیل سنجش‌ازدور در صف قرار گرفت.", "درخواست تحلیل سنجش‌ازدور در صف قرار گرفت.",
@@ -462,6 +470,28 @@ class RemoteSensingAnalysisView(APIView):
temporal_end = timezone.localdate() - timedelta(days=1) temporal_end = timezone.localdate() - timedelta(days=1)
temporal_start = temporal_end - timedelta(days=30) temporal_start = temporal_end - timedelta(days=30)
if not payload.get("force_refresh", False):
cached_response = _build_cached_remote_sensing_response(
location=location,
farm_uuid=str(payload["farm_uuid"]),
block_code="",
start_date=temporal_start,
end_date=temporal_end,
page=payload.get("page", 1),
page_size=payload.get("page_size", 100),
)
if cached_response is not None:
processing = cached_response.get("status") == "processing"
status_code = status.HTTP_202_ACCEPTED if processing else status.HTTP_200_OK
response_payload = cached_response
return Response(
{
"code": 202 if status_code == status.HTTP_202_ACCEPTED else 200,
"msg": "success" if processing else "داده cache شده بازگردانده شد.",
"data": response_payload,
},
status=status_code,
)
run = RemoteSensingRun.objects.create( run = RemoteSensingRun.objects.create(
soil_location=location, soil_location=location,
block_code="", block_code="",
@@ -471,6 +501,7 @@ class RemoteSensingAnalysisView(APIView):
status=RemoteSensingRun.STATUS_PENDING, status=RemoteSensingRun.STATUS_PENDING,
metadata={ metadata={
"requested_via": "api", "requested_via": "api",
"stage": "queued",
"status_label": "pending", "status_label": "pending",
"requested_cluster_count": None, "requested_cluster_count": None,
"selected_features": list(DEFAULT_CLUSTER_FEATURES), "selected_features": list(DEFAULT_CLUSTER_FEATURES),
@@ -585,92 +616,15 @@ class RemoteSensingAnalysisView(APIView):
temporal_end = timezone.localdate() - timedelta(days=1) temporal_end = timezone.localdate() - timedelta(days=1)
temporal_start = temporal_end - timedelta(days=30) temporal_start = temporal_end - timedelta(days=30)
block_code = "" response_payload = _build_cached_remote_sensing_response(
observations = _get_remote_sensing_observations(
location=location, location=location,
block_code=block_code, farm_uuid=str(payload["farm_uuid"]),
block_code="",
start_date=temporal_start, start_date=temporal_start,
end_date=temporal_end, end_date=temporal_end,
)
run = _get_latest_remote_sensing_run(
location=location,
block_code=block_code,
start_date=temporal_start,
end_date=temporal_end,
)
subdivision_result = _get_remote_sensing_subdivision_result(
location=location,
block_code=block_code,
start_date=temporal_start,
end_date=temporal_end,
)
if not observations.exists():
processing = run is not None and run.status in {
RemoteSensingRun.STATUS_PENDING,
RemoteSensingRun.STATUS_RUNNING,
}
response_payload = {
"status": "processing" if processing else "not_found",
"source": "processing" if processing else "database",
"location": SoilLocationResponseSerializer(location).data,
"block_code": "",
"chunk_size_sqm": getattr(run, "chunk_size_sqm", None),
"temporal_extent": {
"start_date": temporal_start.isoformat(),
"end_date": temporal_end.isoformat(),
},
"summary": _empty_remote_sensing_summary(),
"cells": [],
"run": RemoteSensingRunSerializer(run).data if run else None,
"subdivision_result": None,
}
return Response(
{"code": 200, "msg": "success", "data": response_payload},
status=status.HTTP_200_OK,
)
paginated_observations = _paginate_observations(
observations,
page=payload["page"], page=payload["page"],
page_size=payload["page_size"], page_size=payload["page_size"],
) )
paginated_assignments = []
pagination = {"cells": paginated_observations["pagination"]}
if subdivision_result is not None:
paginated = _paginate_assignments(
subdivision_result,
page=payload["page"],
page_size=payload["page_size"],
)
paginated_assignments = paginated["items"]
pagination["assignments"] = paginated["pagination"]
cells_data = RemoteSensingCellObservationSerializer(paginated_observations["items"], many=True).data
subdivision_data = None
if subdivision_result is not None:
subdivision_data = RemoteSensingSubdivisionResultSerializer(
subdivision_result,
context={"paginated_assignments": paginated_assignments},
).data
response_payload = {
"status": "success",
"source": "database",
"location": SoilLocationResponseSerializer(location).data,
"block_code": "",
"chunk_size_sqm": observations.first().cell.chunk_size_sqm,
"temporal_extent": {
"start_date": temporal_start.isoformat(),
"end_date": temporal_end.isoformat(),
},
"summary": _build_remote_sensing_summary(observations),
"cells": cells_data,
"run": RemoteSensingRunSerializer(run).data if run else None,
"subdivision_result": subdivision_data,
}
if pagination is not None:
response_payload["pagination"] = pagination
return Response( return Response(
{"code": 200, "msg": "success", "data": response_payload}, {"code": 200, "msg": "success", "data": response_payload},
status=status.HTTP_200_OK, status=status.HTTP_200_OK,
@@ -805,6 +759,16 @@ class RemoteSensingClusterBlockLiveView(APIView):
) )
temporal_start, temporal_end = _resolve_live_remote_sensing_window(serializer.validated_data) temporal_start, temporal_end = _resolve_live_remote_sensing_window(serializer.validated_data)
cached_cluster_payload = _build_cached_cluster_block_live_payload(
cluster_block=cluster_block,
temporal_start=temporal_start,
temporal_end=temporal_end,
)
if cached_cluster_payload is not None:
return Response(
{"code": 200, "msg": "success", "data": cached_cluster_payload},
status=status.HTTP_200_OK,
)
virtual_cell = _build_virtual_cluster_block_cell(cluster_block=cluster_block, geometry=geometry) virtual_cell = _build_virtual_cluster_block_cell(cluster_block=cluster_block, geometry=geometry)
try: try:
remote_payload = compute_remote_sensing_metrics( remote_payload = compute_remote_sensing_metrics(
@@ -1055,23 +1019,25 @@ def _build_remote_sensing_run_status_payload(run: RemoteSensingRun, *, page: int
if run.status == RemoteSensingRun.STATUS_FAILURE: if run.status == RemoteSensingRun.STATUS_FAILURE:
return status_payload return status_payload
source_run = _resolve_status_source_run(run)
location = _get_location_by_lat_lon(run.soil_location.latitude, run.soil_location.longitude, prefetch=True) location = _get_location_by_lat_lon(run.soil_location.latitude, run.soil_location.longitude, prefetch=True)
observations = _get_remote_sensing_observations( observations = _get_remote_sensing_observations(
location=run.soil_location, location=source_run.soil_location,
block_code=run.block_code, block_code=source_run.block_code,
start_date=run.temporal_start, start_date=source_run.temporal_start,
end_date=run.temporal_end, end_date=source_run.temporal_end,
run=source_run,
) )
subdivision_result = getattr(run, "subdivision_result", None) subdivision_result = _resolve_status_subdivision_result(run, source_run=source_run)
response_payload = { response_payload = {
**status_payload, **status_payload,
"location": SoilLocationResponseSerializer(location).data, "location": SoilLocationResponseSerializer(location).data,
"block_code": run.block_code, "block_code": source_run.block_code,
"chunk_size_sqm": run.chunk_size_sqm, "chunk_size_sqm": source_run.chunk_size_sqm,
"temporal_extent": { "temporal_extent": {
"start_date": run.temporal_start.isoformat() if run.temporal_start else None, "start_date": source_run.temporal_start.isoformat() if source_run.temporal_start else None,
"end_date": run.temporal_end.isoformat() if run.temporal_end else None, "end_date": source_run.temporal_end.isoformat() if source_run.temporal_end else None,
}, },
"summary": _empty_remote_sensing_summary(), "summary": _empty_remote_sensing_summary(),
"cells": [], "cells": [],
@@ -1287,6 +1253,73 @@ def _build_remote_sensing_celery_payload(task_id: str) -> dict | None:
return payload return payload
def _create_cached_status_run(
*,
location: SoilLocation,
farm_uuid: str,
block_code: str,
temporal_start,
temporal_end,
cached_response: dict[str, Any],
) -> RemoteSensingRun:
source_run_id = ((cached_response.get("run") or {}).get("id"))
source_result_id = ((cached_response.get("subdivision_result") or {}).get("id"))
task_id = str(uuid4())
return RemoteSensingRun.objects.create(
soil_location=location,
block_subdivision=None,
block_code=block_code or "",
chunk_size_sqm=int(cached_response.get("chunk_size_sqm") or _resolve_chunk_size_for_location(location, block_code)),
temporal_start=temporal_start,
temporal_end=temporal_end,
status=RemoteSensingRun.STATUS_SUCCESS,
started_at=timezone.now(),
finished_at=timezone.now(),
metadata={
"requested_via": "api",
"farm_uuid": farm_uuid,
"task_id": task_id,
"stage": "completed",
"status_label": "completed",
"selected_features": list(
((cached_response.get("subdivision_result") or {}).get("selected_features"))
or ((cached_response.get("run") or {}).get("selected_features"))
or DEFAULT_CLUSTER_FEATURES
),
"scope": "all_blocks",
"cache_hit": True,
"source_run_id": source_run_id,
"source_result_id": source_result_id,
"timestamps": {
"queued_at": timezone.now().isoformat(),
"completed_at": timezone.now().isoformat(),
},
},
)
def _resolve_status_source_run(run: RemoteSensingRun) -> RemoteSensingRun:
source_run_id = dict(run.metadata or {}).get("source_run_id")
if not source_run_id:
return run
return RemoteSensingRun.objects.filter(pk=source_run_id).select_related("soil_location").first() or run
def _resolve_status_subdivision_result(
run: RemoteSensingRun,
*,
source_run: RemoteSensingRun,
) -> RemoteSensingSubdivisionResult | None:
source_result_id = dict(run.metadata or {}).get("source_result_id")
if source_result_id:
return (
RemoteSensingSubdivisionResult.objects.filter(pk=source_result_id)
.prefetch_related("assignments__cell", "cluster_blocks")
.first()
)
return getattr(source_run, "subdivision_result", None)
def _get_location_by_lat_lon(lat, lon, *, prefetch: bool = False): def _get_location_by_lat_lon(lat, lon, *, prefetch: bool = False):
lat_rounded = round(lat, 6) lat_rounded = round(lat, 6)
lon_rounded = round(lon, 6) lon_rounded = round(lon, 6)
@@ -1428,6 +1461,210 @@ def _resolve_chunk_size_for_location(location: SoilLocation, block_code: str) ->
return 900 return 900
def _build_cached_remote_sensing_response(
*,
location: SoilLocation,
farm_uuid: str,
block_code: str,
start_date,
end_date,
page: int,
page_size: int,
) -> dict[str, Any] | None:
run = _get_latest_remote_sensing_run(
location=location,
farm_uuid=farm_uuid,
block_code=block_code,
start_date=start_date,
end_date=end_date,
)
subdivision_result = _get_remote_sensing_subdivision_result(
location=location,
farm_uuid=farm_uuid,
block_code=block_code,
start_date=start_date,
end_date=end_date,
)
observations = _get_remote_sensing_observations(
location=location,
block_code=block_code,
start_date=start_date,
end_date=end_date,
run=run if run is not None else getattr(subdivision_result, "run", None),
)
if run is None and subdivision_result is None:
observations = observations.none()
if not observations.exists():
fallback_cached_response = _build_fallback_cached_remote_sensing_response(
location=location,
farm_uuid=farm_uuid,
block_code=block_code,
page=page,
page_size=page_size,
)
if fallback_cached_response is not None:
return fallback_cached_response
if run is None:
return None
processing = run.status in {
RemoteSensingRun.STATUS_PENDING,
RemoteSensingRun.STATUS_RUNNING,
}
source = "processing" if processing else "database"
status_label = "processing" if processing else "not_found"
payload = {
"status": status_label,
"source": source,
"location": SoilLocationResponseSerializer(location).data,
"block_code": block_code or "",
"chunk_size_sqm": getattr(run, "chunk_size_sqm", None),
"temporal_extent": {
"start_date": start_date.isoformat(),
"end_date": end_date.isoformat(),
},
"summary": _empty_remote_sensing_summary(),
"cells": [],
"run": RemoteSensingRunSerializer(run).data,
"subdivision_result": None,
"metadata": {
"farm_uuid": farm_uuid,
"cache_hit": True,
},
}
return payload
paginated_observations = _paginate_observations(
observations,
page=page,
page_size=page_size,
)
paginated_assignments = []
pagination = {"cells": paginated_observations["pagination"]}
if subdivision_result is not None:
paginated = _paginate_assignments(
subdivision_result,
page=page,
page_size=page_size,
)
paginated_assignments = paginated["items"]
pagination["assignments"] = paginated["pagination"]
subdivision_data = None
if subdivision_result is not None:
subdivision_data = RemoteSensingSubdivisionResultSerializer(
subdivision_result,
context={"paginated_assignments": paginated_assignments},
).data
payload = {
"status": "success",
"source": "database",
"location": SoilLocationResponseSerializer(location).data,
"block_code": block_code or "",
"chunk_size_sqm": observations.first().cell.chunk_size_sqm,
"temporal_extent": {
"start_date": start_date.isoformat(),
"end_date": end_date.isoformat(),
},
"summary": _build_remote_sensing_summary(observations),
"cells": RemoteSensingCellObservationSerializer(
paginated_observations["items"],
many=True,
).data,
"run": RemoteSensingRunSerializer(run).data if run else None,
"subdivision_result": subdivision_data,
"pagination": pagination,
"metadata": {
"farm_uuid": farm_uuid,
"cache_hit": True,
},
}
return payload
def _build_fallback_cached_remote_sensing_response(
*,
location: SoilLocation,
farm_uuid: str,
block_code: str,
page: int,
page_size: int,
) -> dict[str, Any] | None:
fallback_run = _get_latest_completed_remote_sensing_run(
location=location,
farm_uuid=farm_uuid,
block_code=block_code,
)
if fallback_run is None:
return None
fallback_observations = _get_remote_sensing_observations(
location=location,
block_code=block_code,
start_date=fallback_run.temporal_start,
end_date=fallback_run.temporal_end,
run=fallback_run,
)
if not fallback_observations.exists():
return None
fallback_result = _get_remote_sensing_subdivision_result(
location=location,
farm_uuid=farm_uuid,
block_code=block_code,
start_date=fallback_run.temporal_start,
end_date=fallback_run.temporal_end,
)
paginated_observations = _paginate_observations(
fallback_observations,
page=page,
page_size=page_size,
)
paginated_assignments = []
pagination = {"cells": paginated_observations["pagination"]}
if fallback_result is not None:
paginated = _paginate_assignments(
fallback_result,
page=page,
page_size=page_size,
)
paginated_assignments = paginated["items"]
pagination["assignments"] = paginated["pagination"]
subdivision_data = None
if fallback_result is not None:
subdivision_data = RemoteSensingSubdivisionResultSerializer(
fallback_result,
context={"paginated_assignments": paginated_assignments},
).data
return {
"status": "success",
"source": "database",
"location": SoilLocationResponseSerializer(location).data,
"block_code": block_code or "",
"chunk_size_sqm": fallback_run.chunk_size_sqm,
"temporal_extent": {
"start_date": fallback_run.temporal_start.isoformat() if fallback_run.temporal_start else None,
"end_date": fallback_run.temporal_end.isoformat() if fallback_run.temporal_end else None,
},
"summary": _build_remote_sensing_summary(fallback_observations),
"cells": RemoteSensingCellObservationSerializer(
paginated_observations["items"],
many=True,
).data,
"run": RemoteSensingRunSerializer(fallback_run).data,
"subdivision_result": subdivision_data,
"pagination": pagination,
"metadata": {
"farm_uuid": farm_uuid,
"cache_hit": True,
"cache_match": "latest_completed_for_farm",
},
}
def _resolve_live_remote_sensing_window(payload: dict[str, Any]): def _resolve_live_remote_sensing_window(payload: dict[str, Any]):
temporal_start = payload.get("temporal_start") temporal_start = payload.get("temporal_start")
temporal_end = payload.get("temporal_end") temporal_end = payload.get("temporal_end")
@@ -1484,7 +1721,66 @@ def _build_virtual_cluster_block_cell(
) )
def _get_remote_sensing_observations(*, location, block_code: str, start_date, end_date): def _build_cached_cluster_block_live_payload(
*,
cluster_block: RemoteSensingClusterBlock,
temporal_start,
temporal_end,
) -> dict[str, Any] | None:
result = cluster_block.result
if result.temporal_start != temporal_start or result.temporal_end != temporal_end:
return None
observations = (
AnalysisGridObservation.objects.select_related("cell")
.filter(
cell__soil_location=cluster_block.soil_location,
cell__cell_code__in=list(cluster_block.cell_codes or []),
temporal_start=temporal_start,
temporal_end=temporal_end,
)
.order_by("cell__cell_code")
)
if not observations.exists():
return None
metrics = observations.aggregate(
ndvi=Avg("ndvi"),
ndwi=Avg("ndwi"),
soil_vv=Avg("soil_vv"),
soil_vv_db=Avg("soil_vv_db"),
)
return {
"status": "success",
"source": "database",
"cluster_block": RemoteSensingClusterBlockSerializer(cluster_block).data,
"temporal_extent": {
"start_date": temporal_start.isoformat(),
"end_date": temporal_end.isoformat(),
},
"selected_features": list(DEFAULT_CLUSTER_FEATURES),
"summary": {
"cell_count": int(cluster_block.cell_count or observations.count()),
"ndvi_mean": _round_or_none(metrics.get("ndvi")),
"ndwi_mean": _round_or_none(metrics.get("ndwi")),
"soil_vv_db_mean": _round_or_none(metrics.get("soil_vv_db")),
},
"metrics": {
"ndvi": _round_or_none(metrics.get("ndvi")),
"ndwi": _round_or_none(metrics.get("ndwi")),
"soil_vv": _round_or_none(metrics.get("soil_vv")),
"soil_vv_db": _round_or_none(metrics.get("soil_vv_db")),
},
"metadata": {
"requested_cluster_uuid": str(cluster_block.uuid),
"cache_hit": True,
"source_run_id": result.run_id,
"source_result_id": result.id,
},
}
def _get_remote_sensing_observations(*, location, block_code: str, start_date, end_date, run=None):
queryset = ( queryset = (
AnalysisGridObservation.objects.select_related("cell", "run") AnalysisGridObservation.objects.select_related("cell", "run")
.filter( .filter(
@@ -1494,24 +1790,56 @@ def _get_remote_sensing_observations(*, location, block_code: str, start_date, e
) )
.order_by("cell__cell_code") .order_by("cell__cell_code")
) )
return queryset.filter(cell__block_code=block_code or "") queryset = queryset.filter(cell__block_code=block_code or "")
if run is not None:
queryset = queryset.filter(run=run)
return queryset
def _get_latest_remote_sensing_run(*, location, block_code: str, start_date, end_date): def _select_farm_scoped_run(runs, farm_uuid: str):
return ( legacy_candidate = None
for run in runs:
metadata = dict(run.metadata or {})
scoped_farm_uuid = metadata.get("farm_uuid")
if scoped_farm_uuid == farm_uuid:
return run
if scoped_farm_uuid in (None, "") and legacy_candidate is None:
legacy_candidate = run
return legacy_candidate
def _get_latest_remote_sensing_run(*, location, farm_uuid: str, block_code: str, start_date, end_date):
runs = list(
RemoteSensingRun.objects.filter( RemoteSensingRun.objects.filter(
soil_location=location, soil_location=location,
block_code=block_code or "", block_code=block_code or "",
temporal_start=start_date, temporal_start=start_date,
temporal_end=end_date, temporal_end=end_date,
).order_by("-created_at", "-id")
) )
.order_by("-created_at", "-id") return _select_farm_scoped_run(runs, farm_uuid)
.first()
)
def _get_remote_sensing_subdivision_result(*, location, block_code: str, start_date, end_date): def _get_latest_completed_remote_sensing_run(*, location, farm_uuid: str, block_code: str):
return ( runs = list(
RemoteSensingRun.objects.filter(
soil_location=location,
block_code=block_code or "",
status=RemoteSensingRun.STATUS_SUCCESS,
).order_by("-created_at", "-id")
)
return _select_farm_scoped_run(runs, farm_uuid)
def _get_remote_sensing_subdivision_result(
*,
location,
farm_uuid: str,
block_code: str,
start_date,
end_date,
):
results = list(
RemoteSensingSubdivisionResult.objects.filter( RemoteSensingSubdivisionResult.objects.filter(
soil_location=location, soil_location=location,
block_code=block_code or "", block_code=block_code or "",
@@ -1521,8 +1849,16 @@ def _get_remote_sensing_subdivision_result(*, location, block_code: str, start_d
.select_related("run") .select_related("run")
.prefetch_related("assignments__cell", "cluster_blocks") .prefetch_related("assignments__cell", "cluster_blocks")
.order_by("-created_at", "-id") .order_by("-created_at", "-id")
.first()
) )
legacy_candidate = None
for result in results:
run = getattr(result, "run", None)
scoped_farm_uuid = dict(getattr(run, "metadata", {}) or {}).get("farm_uuid")
if scoped_farm_uuid == farm_uuid:
return result
if scoped_farm_uuid in (None, "") and legacy_candidate is None:
legacy_candidate = result
return legacy_candidate
def _build_remote_sensing_summary(observations): def _build_remote_sensing_summary(observations):
-74
View File
@@ -1,74 +0,0 @@
# Plant Names API
این API فقط لیست نام گیاه‌ها را به همراه آیکون و مراحل رشد برمی‌گرداند.
## Endpoint
- `GET /api/plants/names/`
## کاربرد
- گرفتن لیست سبک برای dropdown یا selector فرانت
- نمایش نام گیاه
- نمایش `icon`
- نمایش مراحل رشد هر گیاه
## رفتار API
- فقط فیلدهای `name`، `icon` و `growth_stages` را برمی‌گرداند
- اگر `growth_stage` برای یک گیاه خالی باشد، API به صورت خودکار این مراحل پیش‌فرض را اضافه و در دیتابیس ذخیره می‌کند:
- `initial`
- `vegetative`
- `flowering`
- `fruiting`
- `maturity`
- اگر `icon` خالی باشد، مقدار پیش‌فرض `leaf` ذخیره و برگردانده می‌شود
- اگر در `growth_profile.stage_thresholds` مرحله‌ای وجود داشته باشد، آن مرحله هم در خروجی `growth_stages` لحاظ می‌شود
## نمونه درخواست
```bash
curl -X GET http://localhost:8000/api/plants/names/
```
## نمونه پاسخ
```json
{
"code": 200,
"msg": "success",
"data": [
{
"name": "Tomato",
"icon": "leaf",
"growth_stages": [
"vegetative",
"flowering",
"fruiting"
]
},
{
"name": "Pepper",
"icon": "leaf",
"growth_stages": [
"initial",
"vegetative",
"flowering",
"fruiting",
"maturity"
]
}
]
}
```
## فیلدهای خروجی
- `name`: نام گیاه
- `icon`: آیکون گیاه برای فرانت
- `growth_stages`: آرایه‌ای از مراحل رشد گیاه
## نکته برای فرانت
- این endpoint برای لیست سبک طراحی شده و مناسب صفحه‌های انتخاب گیاه است
- اگر جزئیات کامل گیاه لازم دارید، از `GET /api/plants/` یا `GET /api/plants/{id}/` استفاده کنید
+7 -4
View File
@@ -85,24 +85,27 @@ class PlantConfig(AppConfig):
return self.growth_stage_aliases.get(normalized, value) return self.growth_stage_aliases.get(normalized, value)
def resolve_plant_name(self, plant_name: str | None) -> str | None: def resolve_plant_name(self, plant_name: str | None) -> str | None:
from .models import Plant from farm_data.models import PlantCatalogSnapshot
value = (plant_name or "").strip() value = (plant_name or "").strip()
if not value: if not value:
return value return value
plant = Plant.objects.filter(name=value).first() or Plant.objects.filter(name__iexact=value).first() plant = (
PlantCatalogSnapshot.objects.filter(name=value).first()
or PlantCatalogSnapshot.objects.filter(name__iexact=value).first()
)
if plant is not None: if plant is not None:
return plant.name return plant.name
normalized = self._normalize_lookup_value(value) normalized = self._normalize_lookup_value(value)
alias_target = self.plant_aliases.get(normalized) alias_target = self.plant_aliases.get(normalized)
if alias_target: if alias_target:
aliased_plant = Plant.objects.filter(name=alias_target).first() aliased_plant = PlantCatalogSnapshot.objects.filter(name=alias_target).first()
if aliased_plant is not None: if aliased_plant is not None:
return aliased_plant.name return aliased_plant.name
for plant in Plant.objects.only("name").iterator(): for plant in PlantCatalogSnapshot.objects.only("name").iterator():
if self._normalize_lookup_value(plant.name) == normalized: if self._normalize_lookup_value(plant.name) == normalized:
return plant.name return plant.name
-109
View File
@@ -1,109 +0,0 @@
"""
Management command to seed initial plant data.
Run: python manage.py seed_plants
"""
from django.core.management.base import BaseCommand
from plant.models import Plant
INITIAL_PLANTS = [
{
"name": "گوجه‌فرنگی",
"light": "آفتاب کامل (۶-۸ ساعت)",
"watering": "منظم، هفته‌ای ۲-۳ بار",
"soil": "لومی، غنی از مواد آلی، pH بین ۶-۶.۸",
"temperature": "۲۰-۳۰ درجه سانتی‌گراد",
"planting_season": "بهار",
"harvest_time": "۷۰-۹۰ روز پس از کاشت",
"spacing": "۴۵-۶۰ سانتی‌متر",
"fertilizer": "کود NPK متعادل، کمپوست",
},
{
"name": "خیار",
"light": "آفتاب کامل",
"watering": "روزانه در فصل گرم",
"soil": "لومی شنی، غنی از هوموس",
"temperature": "۱۸-۳۰ درجه سانتی‌گراد",
"planting_season": "بهار تا اوایل تابستان",
"harvest_time": "۵۰-۷۰ روز پس از کاشت",
"spacing": "۳۰-۴۵ سانتی‌متر",
"fertilizer": "کود ازته، کمپوست",
},
{
"name": "فلفل دلمه‌ای",
"light": "آفتاب کامل (۶-۸ ساعت)",
"watering": "منظم، هفته‌ای ۲-۳ بار",
"soil": "لومی، زهکشی مناسب",
"temperature": "۲۰-۳۰ درجه سانتی‌گراد",
"planting_season": "بهار",
"harvest_time": "۶۰-۹۰ روز پس از کاشت",
"spacing": "۴۰-۵۰ سانتی‌متر",
"fertilizer": "کود فسفره و پتاسه",
},
{
"name": "هویج",
"light": "آفتاب کامل تا نیمه‌سایه",
"watering": "منظم، خاک مرطوب",
"soil": "شنی لومی، عمیق، بدون سنگ",
"temperature": "۱۵-۲۵ درجه سانتی‌گراد",
"planting_season": "اوایل بهار یا پاییز",
"harvest_time": "۷۰-۸۰ روز پس از کاشت",
"spacing": "۵-۸ سانتی‌متر",
"fertilizer": "کود پتاسه، کمپوست پوسیده",
},
{
"name": "کاهو",
"light": "نیمه‌سایه تا آفتاب کامل",
"watering": "منظم، خاک مرطوب",
"soil": "لومی، غنی از مواد آلی",
"temperature": "۱۰-۲۰ درجه سانتی‌گراد",
"planting_season": "بهار و پاییز",
"harvest_time": "۴۵-۶۰ روز پس از کاشت",
"spacing": "۲۰-۳۰ سانتی‌متر",
"fertilizer": "کود ازته، کمپوست",
},
{
"name": "سیب‌زمینی",
"light": "آفتاب کامل",
"watering": "منظم، هفته‌ای ۲ بار",
"soil": "لومی شنی، اسیدی ملایم، pH بین ۵-۶",
"temperature": "۱۵-۲۲ درجه سانتی‌گراد",
"planting_season": "اواخر زمستان تا اوایل بهار",
"harvest_time": "۹۰-۱۲۰ روز پس از کاشت",
"spacing": "۳۰-۴۰ سانتی‌متر",
"fertilizer": "کود NPK، کمپوست",
},
{
"name": "پیاز",
"light": "آفتاب کامل",
"watering": "منظم، خاک مرطوب ولی نه غرقابی",
"soil": "لومی، زهکشی خوب",
"temperature": "۱۲-۲۴ درجه سانتی‌گراد",
"planting_season": "پاییز یا اوایل بهار",
"harvest_time": "۹۰-۱۵۰ روز پس از کاشت",
"spacing": "۱۰-۱۵ سانتی‌متر",
"fertilizer": "کود فسفره، سولفات پتاسیم",
},
]
class Command(BaseCommand):
help = "Seed initial plant data (7 common vegetables)"
def handle(self, *args, **options):
created_count = 0
for plant_data in INITIAL_PLANTS:
_, created = Plant.objects.get_or_create(
name=plant_data["name"],
defaults=plant_data,
)
if created:
created_count += 1
self.stdout.write(
self.style.SUCCESS(f" Created: {plant_data['name']}")
)
self.stdout.write(
self.style.SUCCESS(f"\nDone. Created {created_count} new plants.")
)
-64
View File
@@ -1,64 +0,0 @@
from rest_framework import serializers
from .models import Plant
DEFAULT_PLANT_GROWTH_STAGES = [
"initial",
"vegetative",
"flowering",
"fruiting",
"maturity",
]
def normalize_growth_stage_values(plant: Plant) -> list[str]:
stages: list[str] = []
raw_stage = (plant.growth_stage or "").replace("،", ",")
for part in raw_stage.split(","):
value = part.strip()
if value and value not in stages:
stages.append(value)
stage_thresholds = plant.growth_profile.get("stage_thresholds", {})
if isinstance(stage_thresholds, dict):
for stage_name in stage_thresholds.keys():
value = str(stage_name).strip()
if value and value not in stages:
stages.append(value)
if not stages:
stages = list(DEFAULT_PLANT_GROWTH_STAGES)
return stages
class PlantSerializer(serializers.ModelSerializer):
"""سریالایزر خروجی / ورودی برای Plant."""
class Meta:
model = Plant
fields = [
"id",
"name",
"icon",
"light",
"watering",
"soil",
"temperature",
"growth_stage",
"planting_season",
"harvest_time",
"spacing",
"fertilizer",
"created_at",
"updated_at",
]
read_only_fields = ["id", "created_at", "updated_at"]
class PlantNameStageSerializer(serializers.Serializer):
name = serializers.CharField()
icon = serializers.CharField()
growth_stages = serializers.ListField(child=serializers.CharField())
-34
View File
@@ -1,34 +0,0 @@
"""
سرویسهای گیاه دریافت مشخصات گیاه از API خارجی بر اساس نام.
"""
import logging
logger = logging.getLogger(__name__)
def fetch_plant_info_from_api(plant_name: str) -> dict | None:
"""
اتصال به API خارجی و دریافت مشخصات گیاه بر اساس نام.
TODO: پیادهسازی اتصال واقعی به API.
در حال حاضر این تابع خالی است و None برمیگرداند.
پارامترها:
plant_name: نام گیاه
خروجی مورد انتظار (وقتی پیادهسازی شود):
{
"name": "گوجه‌فرنگی",
"light": "آفتاب کامل",
"watering": "منظم، هفته‌ای ۲-۳ بار",
"soil": "لومی، غنی از مواد آلی",
"temperature": "۲۰-۳۰ درجه سانتی‌گراد",
"planting_season": "بهار",
"harvest_time": "۷۰-۹۰ روز پس از کاشت",
"spacing": "۴۵-۶۰ سانتی‌متر",
"fertilizer": "کود NPK متعادل",
}
"""
# TODO: اتصال واقعی به API
return None
-15
View File
@@ -1,15 +0,0 @@
from django.urls import path
from .views import (
PlantDetailView,
PlantFetchInfoView,
PlantListCreateView,
PlantNameStageListView,
)
urlpatterns = [
path("", PlantListCreateView.as_view(), name="plant-list-create"),
path("names/", PlantNameStageListView.as_view(), name="plant-name-stage-list"),
path("<int:pk>/", PlantDetailView.as_view(), name="plant-detail"),
path("fetch-info/", PlantFetchInfoView.as_view(), name="plant-fetch-info"),
]
-364
View File
@@ -1,364 +0,0 @@
from drf_spectacular.utils import (
OpenApiExample,
OpenApiResponse,
extend_schema,
inline_serializer,
)
from rest_framework import serializers as drf_serializers
from rest_framework import status
from rest_framework.response import Response
from rest_framework.views import APIView
from config.openapi import build_envelope_serializer, build_response
from .models import Plant
from .serializers import (
PlantNameStageSerializer,
PlantSerializer,
normalize_growth_stage_values,
)
from .services import fetch_plant_info_from_api
PlantListResponseSerializer = build_envelope_serializer(
"PlantListResponseSerializer",
PlantSerializer,
many=True,
)
PlantDetailResponseSerializer = build_envelope_serializer(
"PlantDetailResponseSerializer",
PlantSerializer,
)
PlantValidationErrorSerializer = build_envelope_serializer(
"PlantValidationErrorSerializer",
data_required=False,
allow_null=True,
)
PlantFetchInfoResponseSerializer = build_envelope_serializer(
"PlantFetchInfoResponseSerializer",
PlantSerializer,
)
PlantNameStageListResponseSerializer = build_envelope_serializer(
"PlantNameStageListResponseSerializer",
PlantNameStageSerializer,
many=True,
)
class PlantListCreateView(APIView):
"""لیست تمام گیاهان و ایجاد گیاه جدید."""
@extend_schema(
tags=["Plant"],
summary="لیست گیاهان",
description="لیست تمام گیاهان ذخیره‌شده را برمی‌گرداند.",
responses={
200: build_response(
PlantListResponseSerializer,
"لیست گیاهان ذخیره‌شده.",
),
},
)
def get(self, request):
plants = Plant.objects.all()
serializer = PlantSerializer(plants, many=True)
return Response(
{"code": 200, "msg": "success", "data": serializer.data},
status=status.HTTP_200_OK,
)
@extend_schema(
tags=["Plant"],
summary="ایجاد گیاه جدید",
description="یک گیاه جدید با مشخصات داده‌شده ایجاد می‌کند.",
request=PlantSerializer,
responses={
201: build_response(
PlantDetailResponseSerializer,
"گیاه جدید با موفقیت ایجاد شد.",
),
400: build_response(
PlantValidationErrorSerializer,
"داده ورودی نامعتبر است.",
),
},
examples=[
OpenApiExample(
"نمونه درخواست",
value={
"name": "گوجه‌فرنگی",
"light": "آفتاب کامل",
"watering": "منظم، هفته‌ای ۲-۳ بار",
"soil": "لومی، غنی از مواد آلی",
"temperature": "۲۰-۳۰ درجه سانتی‌گراد",
"growth_stage": "رشد رویشی",
"planting_season": "بهار",
"harvest_time": "۷۰-۹۰ روز پس از کاشت",
"spacing": "۴۵-۶۰ سانتی‌متر",
"fertilizer": "کود NPK متعادل",
},
request_only=True,
),
],
)
def post(self, request):
serializer = PlantSerializer(data=request.data)
if not serializer.is_valid():
return Response(
{"code": 400, "msg": "داده نامعتبر.", "data": serializer.errors},
status=status.HTTP_400_BAD_REQUEST,
)
serializer.save()
return Response(
{"code": 201, "msg": "success", "data": serializer.data},
status=status.HTTP_201_CREATED,
)
class PlantNameStageListView(APIView):
"""لیست سبک از نام گیاه، آیکون و مراحل رشد."""
@extend_schema(
tags=["Plant"],
summary="لیست نام گیاهان با مراحل رشد",
description=(
"فقط نام گیاه، آیکون و مراحل رشد را برمی‌گرداند. "
"اگر برای گیاهی مرحله رشد ثبت نشده باشد، مراحل پیش‌فرض به آن اضافه و ذخیره می‌شود."
),
responses={
200: build_response(
PlantNameStageListResponseSerializer,
"لیست نام گیاهان به همراه مراحل رشد و آیکون.",
),
},
)
def get(self, request):
payload = []
for plant in Plant.objects.all():
growth_stages = normalize_growth_stage_values(plant)
serialized_stages = ", ".join(growth_stages)
update_fields: list[str] = []
if plant.growth_stage != serialized_stages:
plant.growth_stage = serialized_stages
update_fields.append("growth_stage")
if not plant.icon:
plant.icon = "leaf"
update_fields.append("icon")
if update_fields:
update_fields.append("updated_at")
plant.save(update_fields=update_fields)
payload.append(
{
"name": plant.name,
"icon": plant.icon,
"growth_stages": growth_stages,
}
)
serializer = PlantNameStageSerializer(payload, many=True)
return Response(
{"code": 200, "msg": "success", "data": serializer.data},
status=status.HTTP_200_OK,
)
class PlantDetailView(APIView):
"""دریافت، ویرایش و حذف یک گیاه."""
def _get_plant(self, pk):
return Plant.objects.filter(pk=pk).first()
@extend_schema(
tags=["Plant"],
summary="جزئیات گیاه",
description="مشخصات یک گیاه را بر اساس شناسه برمی‌گرداند.",
responses={
200: build_response(
PlantDetailResponseSerializer,
"جزئیات گیاه.",
),
404: build_response(
PlantValidationErrorSerializer,
"گیاه یافت نشد.",
),
},
)
def get(self, request, pk):
plant = self._get_plant(pk)
if not plant:
return Response(
{"code": 404, "msg": "گیاه یافت نشد.", "data": None},
status=status.HTTP_404_NOT_FOUND,
)
serializer = PlantSerializer(plant)
return Response(
{"code": 200, "msg": "success", "data": serializer.data},
status=status.HTTP_200_OK,
)
@extend_schema(
tags=["Plant"],
summary="ویرایش کامل گیاه",
description="تمام فیلدهای یک گیاه را آپدیت می‌کند.",
request=PlantSerializer,
responses={
200: build_response(
PlantDetailResponseSerializer,
"گیاه با موفقیت به‌روزرسانی شد.",
),
400: build_response(
PlantValidationErrorSerializer,
"داده ورودی نامعتبر است.",
),
404: build_response(
PlantValidationErrorSerializer,
"گیاه یافت نشد.",
),
},
)
def put(self, request, pk):
plant = self._get_plant(pk)
if not plant:
return Response(
{"code": 404, "msg": "گیاه یافت نشد.", "data": None},
status=status.HTTP_404_NOT_FOUND,
)
serializer = PlantSerializer(plant, data=request.data)
if not serializer.is_valid():
return Response(
{"code": 400, "msg": "داده نامعتبر.", "data": serializer.errors},
status=status.HTTP_400_BAD_REQUEST,
)
serializer.save()
return Response(
{"code": 200, "msg": "success", "data": serializer.data},
status=status.HTTP_200_OK,
)
@extend_schema(
tags=["Plant"],
summary="ویرایش جزئی گیاه",
description="فقط فیلدهای ارسال‌شده آپدیت می‌شوند.",
request=PlantSerializer,
responses={
200: build_response(
PlantDetailResponseSerializer,
"گیاه با موفقیت به‌روزرسانی شد.",
),
400: build_response(
PlantValidationErrorSerializer,
"داده ورودی نامعتبر است.",
),
404: build_response(
PlantValidationErrorSerializer,
"گیاه یافت نشد.",
),
},
)
def patch(self, request, pk):
plant = self._get_plant(pk)
if not plant:
return Response(
{"code": 404, "msg": "گیاه یافت نشد.", "data": None},
status=status.HTTP_404_NOT_FOUND,
)
serializer = PlantSerializer(plant, data=request.data, partial=True)
if not serializer.is_valid():
return Response(
{"code": 400, "msg": "داده نامعتبر.", "data": serializer.errors},
status=status.HTTP_400_BAD_REQUEST,
)
serializer.save()
return Response(
{"code": 200, "msg": "success", "data": serializer.data},
status=status.HTTP_200_OK,
)
@extend_schema(
tags=["Plant"],
summary="حذف گیاه",
description="یک گیاه را حذف می‌کند.",
responses={
200: build_response(
PlantValidationErrorSerializer,
"گیاه با موفقیت حذف شد.",
),
404: build_response(
PlantValidationErrorSerializer,
"گیاه یافت نشد.",
),
},
)
def delete(self, request, pk):
plant = self._get_plant(pk)
if not plant:
return Response(
{"code": 404, "msg": "گیاه یافت نشد.", "data": None},
status=status.HTTP_404_NOT_FOUND,
)
plant.delete()
return Response(
{"code": 200, "msg": "گیاه با موفقیت حذف شد.", "data": None},
status=status.HTTP_200_OK,
)
class PlantFetchInfoView(APIView):
"""دریافت مشخصات گیاه از API خارجی بر اساس نام."""
@extend_schema(
tags=["Plant"],
summary="دریافت مشخصات گیاه از API خارجی",
description="بر اساس نام گیاه، مشخصات آن را از API خارجی دریافت می‌کند. (فعلاً خالی)",
request=inline_serializer(
name="PlantFetchInfoRequest",
fields={
"name": drf_serializers.CharField(help_text="نام گیاه"),
},
),
responses={
200: build_response(
PlantFetchInfoResponseSerializer,
"اطلاعات گیاه از سرویس خارجی دریافت شد.",
),
400: build_response(
PlantValidationErrorSerializer,
"نام گیاه ارسال نشده است.",
),
503: build_response(
PlantValidationErrorSerializer,
"سرویس خارجی در دسترس نیست.",
),
},
examples=[
OpenApiExample(
"نمونه درخواست",
value={"name": "گوجه‌فرنگی"},
request_only=True,
),
],
)
def post(self, request):
plant_name = request.data.get("name")
if not plant_name:
return Response(
{"code": 400, "msg": "نام گیاه الزامی است.", "data": None},
status=status.HTTP_400_BAD_REQUEST,
)
result = fetch_plant_info_from_api(plant_name)
if result is None:
return Response(
{
"code": 503,
"msg": "سرویس API هنوز پیاده‌سازی نشده است.",
"data": None,
},
status=status.HTTP_503_SERVICE_UNAVAILABLE,
)
return Response(
{"code": 200, "msg": "success", "data": result},
status=status.HTTP_200_OK,
)