This commit is contained in:
2026-05-10 02:02:48 +03:30
parent cead7dafe2
commit 2d1f7da89e
30 changed files with 1195 additions and 320 deletions
@@ -0,0 +1,47 @@
from django.core.management.base import BaseCommand, CommandError
from config.proxy import build_proxy_url_from_proxychains_env, resolve_requests_proxy_url
from location_data.openeo_service import (
OpenEOAuthenticationError,
OpenEOConnectionSettings,
OpenEOServiceError,
connect_openeo,
is_openeo_auth_configured,
)
class Command(BaseCommand):
help = "Verify openEO connectivity and authentication using the current environment."
def add_arguments(self, parser):
parser.add_argument(
"--skip-if-unconfigured",
action="store_true",
help="Exit successfully when the required auth environment variables are missing.",
)
def handle(self, *args, **options):
settings = OpenEOConnectionSettings.from_env()
if not is_openeo_auth_configured(settings):
message = "openEO auth check skipped because the required credentials are not configured."
if options["skip_if_unconfigured"]:
self.stdout.write(self.style.WARNING(message))
return
raise CommandError(message)
self.stdout.write(f"Verifying openEO auth against {settings.backend_url}...")
requests_proxy_url = resolve_requests_proxy_url(settings.proxy_url)
proxychains_url = build_proxy_url_from_proxychains_env()
if requests_proxy_url:
self.stdout.write(f"Using requests proxy for openEO auth: {requests_proxy_url}")
elif proxychains_url:
self.stdout.write(f"Using proxychains for openEO auth: {proxychains_url}")
try:
connect_openeo(settings)
except OpenEOAuthenticationError as exc:
raise CommandError(str(exc)) from exc
except OpenEOServiceError as exc:
raise CommandError(str(exc)) from exc
self.stdout.write(self.style.SUCCESS("openEO authentication succeeded."))
@@ -60,6 +60,6 @@ class Migration(migrations.Migration):
),
migrations.AddIndex(
model_name="remotesensingclusterassignment",
index=models.Index(fields=["result", "cluster_label"], name="rs_cluster_assign_result_label_idx"),
index=models.Index(fields=["result", "cluster_label"], name="rs_ca_result_label_idx"),
),
]
@@ -0,0 +1,9 @@
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("location_data", "0012_remote_sensing_subdivision_models"),
]
operations = []
@@ -0,0 +1,10 @@
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("location_data", "0013_rename_cluster_assignment_index"),
("location_data", "0014_blocksubdivision_chunk_size_30m"),
]
operations = []
+1 -1
View File
@@ -480,7 +480,7 @@ class RemoteSensingClusterAssignment(models.Model):
indexes = [
models.Index(
fields=["result", "cluster_label"],
name="rs_cluster_assign_result_label_idx",
name="rs_ca_result_label_idx",
)
]
verbose_name = "remote sensing cluster assignment"
+130 -15
View File
@@ -7,11 +7,16 @@ from datetime import date
from decimal import Decimal
from typing import Any
import requests
from config.proxy import apply_requests_proxy, build_proxy_url_from_proxychains_env
from .models import AnalysisGridCell
DEFAULT_OPENEO_BACKEND_URL = "https://openeofed.dataspace.copernicus.eu"
DEFAULT_OPENEO_PROVIDER = "openeo"
DEFAULT_OPENEO_PROXY_URL = "socks5h://host.docker.internal:10808"
SENTINEL2_COLLECTION = "SENTINEL2_L2A"
SENTINEL3_LST_COLLECTION = "SENTINEL3_SLSTR_L2_LST"
@@ -42,22 +47,39 @@ class OpenEOExecutionError(OpenEOServiceError):
"""Raised when a metric process graph can not be executed successfully."""
class TimeoutOverrideSession(requests.Session):
"""Requests session that enforces a minimum timeout for all outbound calls."""
def __init__(self, timeout_seconds: float):
super().__init__()
self.timeout_seconds = timeout_seconds
def request(self, method, url, **kwargs):
timeout = kwargs.get("timeout")
if timeout is None or timeout < self.timeout_seconds:
kwargs["timeout"] = self.timeout_seconds
return super().request(method, url, **kwargs)
@dataclass(frozen=True)
class OpenEOConnectionSettings:
backend_url: str = DEFAULT_OPENEO_BACKEND_URL
auth_method: str = "client_credentials"
timeout_seconds: float = 60.0
client_id: str = ""
client_secret: str = ""
provider_id: str = ""
username: str = ""
password: str = ""
allow_interactive_oidc: bool = False
proxy_url: str = ""
@classmethod
def from_env(cls) -> "OpenEOConnectionSettings":
return cls(
backend_url=os.environ.get("OPENEO_BACKEND_URL", DEFAULT_OPENEO_BACKEND_URL).strip(),
auth_method=os.environ.get("OPENEO_AUTH_METHOD", "client_credentials").strip().lower(),
timeout_seconds=float(os.environ.get("OPENEO_TIMEOUT_SECONDS", "60").strip() or "60"),
client_id=os.environ.get("OPENEO_AUTH_CLIENT_ID", "").strip(),
client_secret=os.environ.get("OPENEO_AUTH_CLIENT_SECRET", "").strip(),
provider_id=os.environ.get("OPENEO_AUTH_PROVIDER_ID", "").strip(),
@@ -65,9 +87,40 @@ class OpenEOConnectionSettings:
password=os.environ.get("OPENEO_PASSWORD", "").strip(),
allow_interactive_oidc=os.environ.get("OPENEO_ALLOW_INTERACTIVE_OIDC", "0").strip().lower()
in {"1", "true", "yes", "on"},
proxy_url=_resolve_openeo_proxy_url_from_env(),
)
def _resolve_openeo_proxy_url_from_env() -> str:
configured_proxy_url = os.environ.get("OPENEO_PROXY_URL", DEFAULT_OPENEO_PROXY_URL).strip()
if configured_proxy_url and configured_proxy_url != DEFAULT_OPENEO_PROXY_URL:
return configured_proxy_url
# Keep openEO traffic proxied even when process-wide proxychains is disabled.
derived_proxy_url = build_proxy_url_from_proxychains_env(require_enabled=False)
if derived_proxy_url:
return derived_proxy_url
return configured_proxy_url
def is_openeo_auth_configured(settings: OpenEOConnectionSettings | None = None) -> bool:
settings = settings or OpenEOConnectionSettings.from_env()
if settings.auth_method == "client_credentials":
return bool(settings.client_id and settings.client_secret)
if settings.auth_method == "password":
return bool(settings.username and settings.password)
if settings.auth_method == "oidc":
return settings.allow_interactive_oidc
return False
def build_openeo_requests_session(settings: OpenEOConnectionSettings) -> requests.Session:
session = TimeoutOverrideSession(settings.timeout_seconds)
return apply_requests_proxy(session, settings.proxy_url)
def connect_openeo(settings: OpenEOConnectionSettings | None = None):
"""
Build an authenticated openEO connection using environment-driven configuration.
@@ -77,36 +130,98 @@ def connect_openeo(settings: OpenEOConnectionSettings | None = None):
settings = settings or OpenEOConnectionSettings.from_env()
try:
import openeo
from openeo.rest.auth.oidc import (
OidcClientCredentialsAuthenticator,
OidcClientInfo,
OidcProviderInfo,
OidcResourceOwnerPasswordAuthenticator,
)
except ImportError as exc: # pragma: no cover - runtime dependency guard
raise OpenEOServiceError("The `openeo` Python client is required for remote sensing jobs.") from exc
connection = openeo.connect(settings.backend_url)
session = build_openeo_requests_session(settings)
connection = openeo.connect(
settings.backend_url,
session=session,
default_timeout=settings.timeout_seconds,
)
def resolve_oidc_context(
provider_id: str | None,
client_id: str | None,
client_secret: str | None,
) -> tuple[str, OidcClientInfo]:
resolved_provider_id, _ = connection._get_oidc_provider(provider_id, parse_info=False)
providers_payload = connection.get("/credentials/oidc", expected_status=200).json()
provider_map = {provider["id"]: provider for provider in providers_payload["providers"]}
provider_data = provider_map.get(resolved_provider_id)
if not provider_data:
raise OpenEOAuthenticationError(
f"OIDC provider metadata for {resolved_provider_id!r} was not returned by the backend."
)
provider_info = OidcProviderInfo(
provider_id=provider_data["id"],
title=provider_data["title"],
issuer=provider_data["issuer"],
scopes=provider_data.get("scopes"),
default_clients=provider_data.get("default_clients"),
requests_session=session,
)
if not client_id:
raise OpenEOAuthenticationError(
"OPENEO_AUTH_CLIENT_ID must be configured for this openEO auth flow."
)
return resolved_provider_id, OidcClientInfo(
client_id=client_id,
client_secret=client_secret,
provider=provider_info,
)
try:
if settings.auth_method == "client_credentials":
if not settings.client_id or not settings.client_secret:
raise OpenEOAuthenticationError(
"OPENEO_AUTH_CLIENT_ID and OPENEO_AUTH_CLIENT_SECRET must be configured."
)
auth_kwargs = {
"client_id": settings.client_id,
"client_secret": settings.client_secret,
}
if settings.provider_id:
auth_kwargs["provider_id"] = settings.provider_id
return connection.authenticate_oidc_client_credentials(**auth_kwargs)
provider_id, client_info = resolve_oidc_context(
settings.provider_id or None,
settings.client_id,
settings.client_secret,
)
authenticator = OidcClientCredentialsAuthenticator(
client_info=client_info,
requests_session=session,
)
return connection._authenticate_oidc(
authenticator,
provider_id=provider_id,
store_refresh_token=False,
oidc_auth_renewer=authenticator,
)
if settings.auth_method == "password":
if not settings.username or not settings.password:
raise OpenEOAuthenticationError(
"OPENEO_USERNAME and OPENEO_PASSWORD must be configured for password auth."
)
auth_kwargs = {
"username": settings.username,
"password": settings.password,
}
if settings.provider_id:
auth_kwargs["provider_id"] = settings.provider_id
return connection.authenticate_oidc_resource_owner_password_credentials(**auth_kwargs)
provider_id, client_info = resolve_oidc_context(
settings.provider_id or None,
settings.client_id or None,
settings.client_secret or None,
)
authenticator = OidcResourceOwnerPasswordAuthenticator(
client_info=client_info,
username=settings.username,
password=settings.password,
requests_session=session,
)
return connection._authenticate_oidc(
authenticator,
provider_id=provider_id,
store_refresh_token=False,
)
if settings.auth_method == "oidc":
if not settings.allow_interactive_oidc:
+12 -38
View File
@@ -1,6 +1,5 @@
from rest_framework import serializers
from .data_driven_subdivision import SUPPORTED_CLUSTER_FEATURES
from .models import (
AnalysisGridObservation,
BlockSubdivision,
@@ -108,7 +107,7 @@ class SoilDataTaskResponseSerializer(serializers.Serializer):
"""سریالایزر خروجی وقتی تسک در صف قرار گرفته (۲۰۲)."""
source = serializers.CharField(default="task")
task_id = serializers.CharField()
task_id = serializers.UUIDField()
lon = serializers.FloatField(source="longitude")
lat = serializers.FloatField(source="latitude")
status_url = serializers.CharField(required=False)
@@ -135,42 +134,9 @@ class NdviHealthResponseSerializer(serializers.Serializer):
healthData = NdviHealthDataItemSerializer(many=True)
class RemoteSensingTriggerSerializer(serializers.Serializer):
lon = serializers.DecimalField(max_digits=9, decimal_places=6, required=True)
lat = serializers.DecimalField(max_digits=9, decimal_places=6, required=True)
block_code = serializers.CharField(required=False, allow_blank=True, default="", max_length=64)
start_date = serializers.DateField(required=True)
end_date = serializers.DateField(required=True)
class RemoteSensingFarmRequestSerializer(serializers.Serializer):
farm_uuid = serializers.UUIDField(required=True, help_text="شناسه یکتای مزرعه")
force_refresh = serializers.BooleanField(required=False, default=False)
cluster_count = serializers.IntegerField(required=False, min_value=1, allow_null=True, default=None)
selected_features = serializers.ListField(
child=serializers.CharField(max_length=64),
required=False,
allow_empty=False,
)
def validate(self, attrs):
if attrs["start_date"] > attrs["end_date"]:
raise serializers.ValidationError("start_date نمی‌تواند بعد از end_date باشد.")
selected_features = attrs.get("selected_features") or []
invalid_features = sorted(
feature_name
for feature_name in selected_features
if feature_name not in SUPPORTED_CLUSTER_FEATURES
)
if invalid_features:
raise serializers.ValidationError(
{
"selected_features": [
"ویژگی‌های نامعتبر برای خوشه‌بندی: "
+ ", ".join(invalid_features)
]
}
)
return attrs
class RemoteSensingResultQuerySerializer(RemoteSensingTriggerSerializer):
page = serializers.IntegerField(required=False, min_value=1, default=1)
page_size = serializers.IntegerField(required=False, min_value=1, max_value=200, default=100)
@@ -323,7 +289,15 @@ class RemoteSensingRunStatusResponseSerializer(serializers.Serializer):
status = serializers.CharField()
source = serializers.CharField()
run = RemoteSensingRunSerializer()
task_id = serializers.CharField(allow_blank=True, allow_null=True, required=False)
task_id = serializers.UUIDField(allow_null=True, required=False)
location = SoilLocationResponseSerializer(required=False)
block_code = serializers.CharField(allow_blank=True, required=False)
chunk_size_sqm = serializers.IntegerField(allow_null=True, required=False)
temporal_extent = serializers.JSONField(required=False)
summary = RemoteSensingSummarySerializer(required=False)
cells = RemoteSensingCellObservationSerializer(many=True, required=False)
subdivision_result = RemoteSensingSubdivisionResultSerializer(allow_null=True, required=False)
pagination = serializers.JSONField(required=False)
class RemoteSensingRunResultResponseSerializer(serializers.Serializer):
+95
View File
@@ -1,9 +1,18 @@
from decimal import Decimal
from io import StringIO
import os
from unittest.mock import Mock, patch
from django.core.management import call_command
from django.test import SimpleTestCase
from config.proxy import resolve_requests_proxy_url
from location_data.openeo_service import (
OpenEOConnectionSettings,
_resolve_openeo_proxy_url_from_env,
build_empty_metric_payload,
connect_openeo,
is_openeo_auth_configured,
linear_to_db,
merge_metric_results,
parse_aggregate_spatial_response,
@@ -64,3 +73,89 @@ class OpenEOServiceParsingTests(SimpleTestCase):
self.assertEqual(target["cell-1"]["ndvi"], 0.5)
self.assertEqual(target["cell-2"]["ndwi"], 0.2)
self.assertIn("soil_vv_db", target["cell-2"])
class OpenEOConnectionTests(SimpleTestCase):
def test_default_openeo_proxy_url_uses_proxychains_endpoint_without_wrapping_process(self):
with patch.dict(
os.environ,
{
"ENABLE_PROXYCHAINS": "0",
"PROXYCHAINS_PROXY_TYPE": "socks4",
"PROXYCHAINS_PROXY_HOST": "host.docker.internal",
"PROXYCHAINS_PROXY_PORT": "10808",
"OPENEO_PROXY_URL": "socks5h://host.docker.internal:10808",
},
clear=False,
):
self.assertEqual(
_resolve_openeo_proxy_url_from_env(),
"socks4a://host.docker.internal:10808",
)
def test_requests_proxy_is_disabled_when_proxychains_targets_same_endpoint(self):
with patch.dict(
os.environ,
{
"ENABLE_PROXYCHAINS": "1",
"PROXYCHAINS_PROXY_TYPE": "socks4",
"PROXYCHAINS_PROXY_HOST": "host.docker.internal",
"PROXYCHAINS_PROXY_PORT": "10808",
},
clear=False,
):
self.assertEqual(
resolve_requests_proxy_url("socks5h://host.docker.internal:10808"),
"",
)
def test_is_openeo_auth_configured_for_client_credentials(self):
self.assertTrue(
is_openeo_auth_configured(
OpenEOConnectionSettings(
auth_method="client_credentials",
client_id="client-id",
client_secret="client-secret",
)
)
)
def test_is_openeo_auth_configured_for_password(self):
self.assertTrue(
is_openeo_auth_configured(
OpenEOConnectionSettings(
auth_method="password",
username="user@example.com",
password="secret",
)
)
)
def test_verify_openeo_auth_command_skips_when_unconfigured(self):
stdout = StringIO()
call_command("verify_openeo_auth", "--skip-if-unconfigured", stdout=stdout)
self.assertIn("openEO auth check skipped", stdout.getvalue())
def test_connect_openeo_applies_proxy_to_session(self):
connection = Mock()
connection.authenticate_oidc_resource_owner_password_credentials.return_value = connection
openeo_module = Mock()
openeo_module.connect.return_value = connection
settings = OpenEOConnectionSettings(
backend_url="https://openeofed.dataspace.copernicus.eu",
auth_method="password",
timeout_seconds=123,
username="user@example.com",
password="secret",
proxy_url="socks5h://127.0.0.1:10808",
)
with patch.dict("sys.modules", {"openeo": openeo_module}):
connect_openeo(settings)
self.assertEqual(openeo_module.connect.call_args.kwargs["default_timeout"], 123)
session = openeo_module.connect.call_args.kwargs["session"]
self.assertEqual(session.proxies["https"], "socks5h://127.0.0.1:10808")
self.assertFalse(session.trust_env)
+54 -49
View File
@@ -1,10 +1,13 @@
from datetime import date
from datetime import date, timedelta
from types import SimpleNamespace
from unittest.mock import patch
from django.test import TestCase, override_settings
from django.utils import timezone
from rest_framework.test import APIClient
from farm_data.models import SensorData
from location_data.data_driven_subdivision import DEFAULT_CLUSTER_FEATURES
from location_data.models import (
AnalysisGridCell,
AnalysisGridObservation,
@@ -39,6 +42,13 @@ class RemoteSensingApiTests(TestCase):
)
self.location.set_input_block_count(1)
self.location.save(update_fields=["input_block_count", "block_layout", "updated_at"])
self.farm = SensorData.objects.create(
farm_uuid="11111111-1111-1111-1111-111111111111",
center_location=self.location,
payload={},
)
self.temporal_end = timezone.localdate() - timedelta(days=1)
self.temporal_start = self.temporal_end - timedelta(days=30)
self.subdivision = BlockSubdivision.objects.create(
soil_location=self.location,
block_code="block-1",
@@ -51,10 +61,7 @@ class RemoteSensingApiTests(TestCase):
response = self.client.post(
"/remote-sensing/",
data={
"lat": 35.7000,
"lon": 51.4000,
"start_date": "2025-01-01",
"end_date": "2025-01-31",
"farm_uuid": "22222222-2222-2222-2222-222222222222",
},
format="json",
)
@@ -64,16 +71,12 @@ class RemoteSensingApiTests(TestCase):
@patch("location_data.views.run_remote_sensing_analysis_task.delay")
def test_post_remote_sensing_enqueues_task_and_returns_processing(self, mock_delay):
mock_delay.return_value = SimpleNamespace(id="task-123")
mock_delay.return_value = SimpleNamespace(id="e723ba3e-c53c-401b-b3a0-5f7013c7b401")
response = self.client.post(
"/remote-sensing/",
data={
"lat": 35.6892,
"lon": 51.3890,
"block_code": "block-1",
"start_date": "2025-01-01",
"end_date": "2025-01-31",
"farm_uuid": str(self.farm.farm_uuid),
"force_refresh": False,
},
format="json",
@@ -83,36 +86,34 @@ class RemoteSensingApiTests(TestCase):
payload = response.json()["data"]
self.assertEqual(payload["status"], "processing")
self.assertEqual(payload["source"], "processing")
self.assertEqual(payload["task_id"], "task-123")
self.assertEqual(payload["block_code"], "block-1")
self.assertEqual(payload["task_id"], "e723ba3e-c53c-401b-b3a0-5f7013c7b401")
self.assertEqual(payload["block_code"], "")
self.assertEqual(payload["summary"]["cell_count"], 0)
run = RemoteSensingRun.objects.get(id=payload["run"]["id"])
self.assertEqual(run.block_code, "block-1")
self.assertEqual(run.block_code, "")
self.assertEqual(run.temporal_start, self.temporal_start)
self.assertEqual(run.temporal_end, self.temporal_end)
self.assertEqual(run.status, RemoteSensingRun.STATUS_PENDING)
self.assertEqual(run.metadata["stage"], "queued")
self.assertEqual(run.metadata["selected_features"], [])
self.assertEqual(run.metadata["selected_features"], DEFAULT_CLUSTER_FEATURES)
mock_delay.assert_called_once()
def test_get_remote_sensing_returns_processing_when_run_exists_without_results(self):
RemoteSensingRun.objects.create(
soil_location=self.location,
block_subdivision=self.subdivision,
block_code="block-1",
block_code="",
chunk_size_sqm=900,
temporal_start=date(2025, 1, 1),
temporal_end=date(2025, 1, 31),
temporal_start=self.temporal_start,
temporal_end=self.temporal_end,
status=RemoteSensingRun.STATUS_RUNNING,
metadata={"task_id": "task-123"},
metadata={"task_id": "e723ba3e-c53c-401b-b3a0-5f7013c7b401"},
)
response = self.client.get(
"/remote-sensing/",
data={
"lat": 35.6892,
"lon": 51.3890,
"block_code": "block-1",
"start_date": "2025-01-01",
"end_date": "2025-01-31",
"farm_uuid": str(self.farm.farm_uuid),
},
)
@@ -127,16 +128,16 @@ class RemoteSensingApiTests(TestCase):
run = RemoteSensingRun.objects.create(
soil_location=self.location,
block_subdivision=self.subdivision,
block_code="block-1",
block_code="",
chunk_size_sqm=900,
temporal_start=date(2025, 1, 1),
temporal_end=date(2025, 1, 31),
temporal_start=self.temporal_start,
temporal_end=self.temporal_end,
status=RemoteSensingRun.STATUS_SUCCESS,
)
cell = AnalysisGridCell.objects.create(
soil_location=self.location,
block_subdivision=self.subdivision,
block_code="block-1",
block_code="",
cell_code="cell-1",
chunk_size_sqm=900,
geometry=self.boundary,
@@ -146,8 +147,8 @@ class RemoteSensingApiTests(TestCase):
AnalysisGridObservation.objects.create(
cell=cell,
run=run,
temporal_start=date(2025, 1, 1),
temporal_end=date(2025, 1, 31),
temporal_start=self.temporal_start,
temporal_end=self.temporal_end,
ndvi=0.61,
ndwi=0.22,
lst_c=24.5,
@@ -161,11 +162,7 @@ class RemoteSensingApiTests(TestCase):
response = self.client.get(
"/remote-sensing/",
data={
"lat": 35.6892,
"lon": 51.3890,
"block_code": "block-1",
"start_date": "2025-01-01",
"end_date": "2025-01-31",
"farm_uuid": str(self.farm.farm_uuid),
},
)
@@ -183,15 +180,19 @@ class RemoteSensingApiTests(TestCase):
run = RemoteSensingRun.objects.create(
soil_location=self.location,
block_subdivision=self.subdivision,
block_code="block-1",
block_code="",
chunk_size_sqm=900,
temporal_start=date(2025, 1, 1),
temporal_end=date(2025, 1, 31),
temporal_start=self.temporal_start,
temporal_end=self.temporal_end,
status=RemoteSensingRun.STATUS_SUCCESS,
metadata={"stage": "completed", "selected_features": ["ndvi"]},
)
response = self.client.get(f"/remote-sensing/runs/{run.id}/status/")
task_id = "e723ba3e-c53c-401b-b3a0-5f7013c7b401"
run.metadata = {**run.metadata, "task_id": task_id}
run.save(update_fields=["metadata", "updated_at"])
response = self.client.get(f"/remote-sensing/runs/{task_id}/status/")
self.assertEqual(response.status_code, 200)
payload = response.json()["data"]
@@ -204,17 +205,17 @@ class RemoteSensingApiTests(TestCase):
run = RemoteSensingRun.objects.create(
soil_location=self.location,
block_subdivision=self.subdivision,
block_code="block-1",
block_code="",
chunk_size_sqm=900,
temporal_start=date(2025, 1, 1),
temporal_end=date(2025, 1, 31),
temporal_start=self.temporal_start,
temporal_end=self.temporal_end,
status=RemoteSensingRun.STATUS_SUCCESS,
metadata={"stage": "completed"},
)
cell = AnalysisGridCell.objects.create(
soil_location=self.location,
block_subdivision=self.subdivision,
block_code="block-1",
block_code="",
cell_code="cell-1",
chunk_size_sqm=900,
geometry=self.boundary,
@@ -224,8 +225,8 @@ class RemoteSensingApiTests(TestCase):
AnalysisGridObservation.objects.create(
cell=cell,
run=run,
temporal_start=date(2025, 1, 1),
temporal_end=date(2025, 1, 31),
temporal_start=self.temporal_start,
temporal_end=self.temporal_end,
ndvi=0.61,
ndwi=0.22,
lst_c=24.5,
@@ -239,10 +240,10 @@ class RemoteSensingApiTests(TestCase):
soil_location=self.location,
run=run,
block_subdivision=self.subdivision,
block_code="block-1",
block_code="",
chunk_size_sqm=900,
temporal_start=date(2025, 1, 1),
temporal_end=date(2025, 1, 31),
temporal_start=self.temporal_start,
temporal_end=self.temporal_end,
cluster_count=1,
selected_features=["ndvi"],
metadata={"used_cell_count": 1, "skipped_cell_count": 0},
@@ -255,7 +256,11 @@ class RemoteSensingApiTests(TestCase):
scaled_feature_values={"ndvi": 0.0},
)
response = self.client.get(f"/remote-sensing/runs/{run.id}/result/", data={"page": 1, "page_size": 10})
task_id = "e723ba3e-c53c-401b-b3a0-5f7013c7b401"
run.metadata = {**run.metadata, "task_id": task_id}
run.save(update_fields=["metadata", "updated_at"])
response = self.client.get(f"/remote-sensing/runs/{task_id}/status/", data={"page": 1, "page_size": 10})
self.assertEqual(response.status_code, 200)
payload = response.json()["data"]
+1 -3
View File
@@ -3,7 +3,6 @@ from django.urls import path
from .views import (
NdviHealthView,
RemoteSensingAnalysisView,
RemoteSensingRunResultView,
RemoteSensingRunStatusView,
SoilDataView,
)
@@ -11,7 +10,6 @@ from .views import (
urlpatterns = [
path("", SoilDataView.as_view(), name="soil-data"),
path("remote-sensing/", RemoteSensingAnalysisView.as_view(), name="remote-sensing"),
path("remote-sensing/runs/<int:run_id>/status/", RemoteSensingRunStatusView.as_view(), name="remote-sensing-run-status"),
path("remote-sensing/runs/<int:run_id>/result/", RemoteSensingRunResultView.as_view(), name="remote-sensing-run-result"),
path("remote-sensing/runs/<uuid:run_id>/status/", RemoteSensingRunStatusView.as_view(), name="remote-sensing-run-status"),
path("ndvi-health/", NdviHealthView.as_view(), name="ndvi-health"),
]
+147 -184
View File
@@ -1,10 +1,14 @@
from datetime import timedelta
from django.apps import apps
from django.core.paginator import EmptyPage, Paginator
from django.db.models import Avg
from django.db import transaction
from django.utils import timezone
from rest_framework import status
from drf_spectacular.utils import (
OpenApiExample,
OpenApiParameter,
OpenApiResponse,
extend_schema,
inline_serializer,
@@ -25,19 +29,20 @@ from .models import (
RemoteSensingSubdivisionResult,
SoilLocation,
)
from farm_data.models import SensorData
from .data_driven_subdivision import DEFAULT_CLUSTER_FEATURES
from .serializers import (
BlockSubdivisionSerializer,
NdviHealthRequestSerializer,
NdviHealthResponseSerializer,
RemoteSensingCellObservationSerializer,
RemoteSensingResponseSerializer,
RemoteSensingResultQuerySerializer,
RemoteSensingRunResultResponseSerializer,
RemoteSensingFarmRequestSerializer,
RemoteSensingRunSerializer,
RemoteSensingRunStatusResponseSerializer,
RemoteSensingSummarySerializer,
RemoteSensingSubdivisionResultSerializer,
RemoteSensingTriggerSerializer,
SoilDataRequestSerializer,
SoilLocationResponseSerializer,
)
@@ -90,7 +95,7 @@ RemoteSensingQueuedEnvelopeSerializer = build_envelope_serializer(
"summary": RemoteSensingSummarySerializer(),
"cells": drf_serializers.JSONField(),
"run": drf_serializers.JSONField(allow_null=True),
"task_id": drf_serializers.CharField(),
"task_id": drf_serializers.UUIDField(),
},
),
)
@@ -98,19 +103,13 @@ RemoteSensingRunStatusEnvelopeSerializer = build_envelope_serializer(
"RemoteSensingRunStatusEnvelopeSerializer",
RemoteSensingRunStatusResponseSerializer,
)
RemoteSensingRunResultEnvelopeSerializer = build_envelope_serializer(
"RemoteSensingRunResultEnvelopeSerializer",
RemoteSensingRunResultResponseSerializer,
)
class SoilDataView(APIView):
"""
ثبت مختصات گوشه‌های مزرعه و بلوک‌های تعریف‌شده توسط کشاورز.
"""
@extend_schema(
tags=["Soil Data"],
tags=["Location Data"],
summary="خواندن ساختار مزرعه و بلوک‌ها (GET)",
description="با ارسال lat و lon، ساختار ذخیره‌شده مزرعه، بلوک‌ها و آخرین خلاصه سنجش‌ازدور هر بلوک بازگردانده می‌شود.",
parameters=[
@@ -175,7 +174,7 @@ class SoilDataView(APIView):
)
@extend_schema(
tags=["Soil Data"],
tags=["Location Data"],
summary="ثبت مزرعه و بلوک‌های کشاورز (POST)",
description="مختصات گوشه‌های مزرعه و boundary هر بلوک کشاورز ذخیره می‌شود. هیچ subdivision سنکرونی اجرا نمی‌شود.",
request=SoilDataRequestSerializer,
@@ -306,7 +305,7 @@ class SoilDataView(APIView):
class NdviHealthView(APIView):
@extend_schema(
tags=["Soil Data"],
tags=["Location Data"],
summary="دریافت NDVI سلامت مزرعه",
description="با دریافت farm_uuid، داده NDVI سلامت پوشش گیاهی مزرعه را به صورت مستقل از dashboard برمی گرداند.",
request=NdviHealthRequestSerializer,
@@ -359,10 +358,10 @@ class NdviHealthView(APIView):
class RemoteSensingAnalysisView(APIView):
@extend_schema(
tags=["Soil Data"],
tags=["Location Data"],
summary="اجرای async تحلیل سنجش‌ازدور و subdivision داده‌محور",
description="برای location موجود، pipeline کامل grid + openEO + observation persistence + KMeans clustering در Celery صف می‌شود و sync اجرا نمی‌شود.",
request=RemoteSensingTriggerSerializer,
request=RemoteSensingFarmRequestSerializer,
responses={
202: build_response(
RemoteSensingQueuedEnvelopeSerializer,
@@ -381,21 +380,15 @@ class RemoteSensingAnalysisView(APIView):
OpenApiExample(
"نمونه درخواست remote sensing",
value={
"lat": 35.6892,
"lon": 51.3890,
"block_code": "block-1",
"start_date": "2025-01-01",
"end_date": "2025-01-31",
"farm_uuid": "11111111-1111-1111-1111-111111111111",
"force_refresh": False,
"cluster_count": 3,
"selected_features": ["ndvi", "ndwi", "soil_vv_db"],
},
request_only=True,
),
],
)
def post(self, request):
serializer = RemoteSensingTriggerSerializer(data=request.data)
serializer = RemoteSensingFarmRequestSerializer(data=request.data)
if not serializer.is_valid():
return Response(
{"code": 400, "msg": "داده نامعتبر.", "data": serializer.errors},
@@ -403,37 +396,41 @@ class RemoteSensingAnalysisView(APIView):
)
payload = serializer.validated_data
location = _get_location_by_lat_lon(payload["lat"], payload["lon"], prefetch=True)
farm = SensorData.objects.select_related("center_location").filter(farm_uuid=payload["farm_uuid"]).first()
location = getattr(farm, "center_location", None)
if location is None:
return Response(
{"code": 404, "msg": "location پیدا نشد.", "data": None},
status=status.HTTP_404_NOT_FOUND,
)
block_code = str(payload.get("block_code", "") or "").strip()
temporal_end = timezone.localdate() - timedelta(days=1)
temporal_start = temporal_end - timedelta(days=30)
run = RemoteSensingRun.objects.create(
soil_location=location,
block_code=block_code,
chunk_size_sqm=_resolve_chunk_size_for_location(location, block_code),
temporal_start=payload["start_date"],
temporal_end=payload["end_date"],
block_code="",
chunk_size_sqm=_resolve_chunk_size_for_location(location, ""),
temporal_start=temporal_start,
temporal_end=temporal_end,
status=RemoteSensingRun.STATUS_PENDING,
metadata={
"requested_via": "api",
"status_label": "pending",
"cluster_count": payload.get("cluster_count"),
"selected_features": payload.get("selected_features") or [],
"requested_cluster_count": None,
"selected_features": list(DEFAULT_CLUSTER_FEATURES),
"farm_uuid": str(payload["farm_uuid"]),
"scope": "all_blocks",
},
)
task_result = run_remote_sensing_analysis_task.delay(
soil_location_id=location.id,
block_code=block_code,
temporal_start=payload["start_date"].isoformat(),
temporal_end=payload["end_date"].isoformat(),
block_code="",
temporal_start=temporal_start.isoformat(),
temporal_end=temporal_end.isoformat(),
force_refresh=payload.get("force_refresh", False),
run_id=run.id,
cluster_count=payload.get("cluster_count"),
selected_features=payload.get("selected_features"),
cluster_count=None,
selected_features=list(DEFAULT_CLUSTER_FEATURES),
)
run.metadata = {**(run.metadata or {}), "task_id": task_result.id}
run.save(update_fields=["metadata", "updated_at"])
@@ -443,11 +440,11 @@ class RemoteSensingAnalysisView(APIView):
"status": "processing",
"source": "processing",
"location": location_data,
"block_code": block_code,
"block_code": "",
"chunk_size_sqm": run.chunk_size_sqm,
"temporal_extent": {
"start_date": payload["start_date"].isoformat(),
"end_date": payload["end_date"].isoformat(),
"start_date": temporal_start.isoformat(),
"end_date": temporal_end.isoformat(),
},
"summary": _empty_remote_sensing_summary(),
"cells": [],
@@ -460,15 +457,11 @@ class RemoteSensingAnalysisView(APIView):
)
@extend_schema(
tags=["Soil Data"],
tags=["Location Data"],
summary="خواندن نتایج cache شده سنجش‌ازدور و subdivision",
description="فقط نتایج ذخیره‌شده remote sensing و clustering را برمی‌گرداند و هیچ پردازش sync اجرا نمی‌کند.",
parameters=[
{"name": "lat", "in": "query", "required": True, "schema": {"type": "number"}},
{"name": "lon", "in": "query", "required": True, "schema": {"type": "number"}},
{"name": "block_code", "in": "query", "required": False, "schema": {"type": "string"}},
{"name": "start_date", "in": "query", "required": True, "schema": {"type": "string", "format": "date"}},
{"name": "end_date", "in": "query", "required": True, "schema": {"type": "string", "format": "date"}},
{"name": "farm_uuid", "in": "query", "required": True, "schema": {"type": "string", "format": "uuid"}},
{"name": "page", "in": "query", "required": False, "schema": {"type": "integer", "default": 1}},
{"name": "page_size", "in": "query", "required": False, "schema": {"type": "integer", "default": 100}},
],
@@ -488,7 +481,7 @@ class RemoteSensingAnalysisView(APIView):
},
)
def get(self, request):
serializer = RemoteSensingResultQuerySerializer(data=request.query_params)
serializer = RemoteSensingFarmRequestSerializer(data=request.query_params)
if not serializer.is_valid():
return Response(
{"code": 400, "msg": "داده نامعتبر.", "data": serializer.errors},
@@ -496,31 +489,34 @@ class RemoteSensingAnalysisView(APIView):
)
payload = serializer.validated_data
location = _get_location_by_lat_lon(payload["lat"], payload["lon"], prefetch=True)
farm = SensorData.objects.select_related("center_location").filter(farm_uuid=payload["farm_uuid"]).first()
location = getattr(farm, "center_location", None)
if location is None:
return Response(
{"code": 404, "msg": "location پیدا نشد.", "data": None},
status=status.HTTP_404_NOT_FOUND,
)
block_code = str(payload.get("block_code", "") or "").strip()
temporal_end = timezone.localdate() - timedelta(days=1)
temporal_start = temporal_end - timedelta(days=30)
block_code = ""
observations = _get_remote_sensing_observations(
location=location,
block_code=block_code,
start_date=payload["start_date"],
end_date=payload["end_date"],
start_date=temporal_start,
end_date=temporal_end,
)
run = _get_latest_remote_sensing_run(
location=location,
block_code=block_code,
start_date=payload["start_date"],
end_date=payload["end_date"],
start_date=temporal_start,
end_date=temporal_end,
)
subdivision_result = _get_remote_sensing_subdivision_result(
location=location,
block_code=block_code,
start_date=payload["start_date"],
end_date=payload["end_date"],
start_date=temporal_start,
end_date=temporal_end,
)
if not observations.exists():
@@ -532,11 +528,11 @@ class RemoteSensingAnalysisView(APIView):
"status": "processing" if processing else "not_found",
"source": "processing" if processing else "database",
"location": SoilLocationResponseSerializer(location).data,
"block_code": block_code,
"block_code": "",
"chunk_size_sqm": getattr(run, "chunk_size_sqm", None),
"temporal_extent": {
"start_date": payload["start_date"].isoformat(),
"end_date": payload["end_date"].isoformat(),
"start_date": temporal_start.isoformat(),
"end_date": temporal_end.isoformat(),
},
"summary": _empty_remote_sensing_summary(),
"cells": [],
@@ -576,11 +572,11 @@ class RemoteSensingAnalysisView(APIView):
"status": "success",
"source": "database",
"location": SoilLocationResponseSerializer(location).data,
"block_code": block_code,
"block_code": "",
"chunk_size_sqm": observations.first().cell.chunk_size_sqm,
"temporal_extent": {
"start_date": payload["start_date"].isoformat(),
"end_date": payload["end_date"].isoformat(),
"start_date": temporal_start.isoformat(),
"end_date": temporal_end.isoformat(),
},
"summary": _build_remote_sensing_summary(observations),
"cells": cells_data,
@@ -597,54 +593,22 @@ class RemoteSensingAnalysisView(APIView):
class RemoteSensingRunStatusView(APIView):
@extend_schema(
tags=["Soil Data"],
tags=["Location Data"],
summary="وضعیت run تحلیل سنجش‌ازدور",
description="وضعیت async pipeline را با شناسه run برمی‌گرداند.",
responses={
200: build_response(
RemoteSensingRunStatusEnvelopeSerializer,
"وضعیت run بازگردانده شد.",
),
404: build_response(
SoilErrorResponseSerializer,
"run موردنظر پیدا نشد.",
),
},
)
def get(self, request, run_id):
run = RemoteSensingRun.objects.filter(pk=run_id).select_related("soil_location").first()
if run is None:
return Response(
{"code": 404, "msg": "run پیدا نشد.", "data": None},
status=status.HTTP_404_NOT_FOUND,
)
task_id = (run.metadata or {}).get("task_id")
response_payload = {
"status": RemoteSensingRunSerializer(run).data["status_label"],
"source": "database",
"run": RemoteSensingRunSerializer(run).data,
"task_id": task_id,
}
return Response(
{"code": 200, "msg": "success", "data": response_payload},
status=status.HTTP_200_OK,
)
class RemoteSensingRunResultView(APIView):
@extend_schema(
tags=["Soil Data"],
summary="نتیجه نهایی run تحلیل سنجش‌ازدور",
description="نتایج observation و subdivision داده‌محور را با شناسه run برمی‌گرداند.",
description="وضعیت async pipeline را با task_id از نوع UUID برمی‌گرداند. این task_id همان شناسه تسک Celery ذخیره‌شده در metadata.run است.",
parameters=[
{"name": "page", "in": "query", "required": False, "schema": {"type": "integer", "default": 1}},
{"name": "page_size", "in": "query", "required": False, "schema": {"type": "integer", "default": 100}},
OpenApiParameter(
name="run_id",
type={"type": "string", "format": "uuid"},
location=OpenApiParameter.PATH,
required=True,
description="شناسه UUID تسک async (task_id).",
),
],
responses={
200: build_response(
RemoteSensingRunResultEnvelopeSerializer,
"نتیجه run بازگردانده شد.",
RemoteSensingRunStatusEnvelopeSerializer,
"وضعیت run بازگردانده شد و بعد از اتمام، نتیجه نهایی نیز از همین route برگردانده می‌شود.",
),
404: build_response(
SoilErrorResponseSerializer,
@@ -655,93 +619,92 @@ class RemoteSensingRunResultView(APIView):
def get(self, request, run_id):
page = _safe_positive_int(request.query_params.get("page"), default=1)
page_size = min(_safe_positive_int(request.query_params.get("page_size"), default=100), MAX_REMOTE_SENSING_PAGE_SIZE)
run = (
RemoteSensingRun.objects.filter(pk=run_id)
.select_related("soil_location")
.first()
)
run = RemoteSensingRun.objects.filter(metadata__task_id=str(run_id)).select_related("soil_location").first()
if run is None:
return Response(
{"code": 404, "msg": "run پیدا نشد.", "data": None},
{"code": 404, "msg": "run با این task_id پیدا نشد.", "data": None},
status=status.HTTP_404_NOT_FOUND,
)
location = _get_location_by_lat_lon(run.soil_location.latitude, run.soil_location.longitude, prefetch=True)
observations = _get_remote_sensing_observations(
location=run.soil_location,
block_code=run.block_code,
start_date=run.temporal_start,
end_date=run.temporal_end,
)
subdivision_result = getattr(run, "subdivision_result", None)
if not observations.exists():
response_payload = {
"status": RemoteSensingRunSerializer(run).data["status_label"],
"source": "processing" if run.status in {RemoteSensingRun.STATUS_PENDING, RemoteSensingRun.STATUS_RUNNING} else "database",
"location": SoilLocationResponseSerializer(location).data,
"block_code": run.block_code,
"chunk_size_sqm": run.chunk_size_sqm,
"temporal_extent": {
"start_date": run.temporal_start.isoformat() if run.temporal_start else None,
"end_date": run.temporal_end.isoformat() if run.temporal_end else None,
},
"summary": _empty_remote_sensing_summary(),
"cells": [],
"run": RemoteSensingRunSerializer(run).data,
"subdivision_result": None,
}
return Response(
{"code": 200, "msg": "success", "data": response_payload},
status=status.HTTP_200_OK,
)
paginated_observations = _paginate_observations(
observations,
page=page,
page_size=page_size,
)
paginated_assignments = []
pagination = {"cells": paginated_observations["pagination"]}
if subdivision_result is not None:
paginated = _paginate_assignments(
subdivision_result,
page=page,
page_size=page_size,
)
paginated_assignments = paginated["items"]
pagination["assignments"] = paginated["pagination"]
subdivision_data = None
if subdivision_result is not None:
subdivision_data = RemoteSensingSubdivisionResultSerializer(
subdivision_result,
context={"paginated_assignments": paginated_assignments},
).data
response_payload = {
"status": RemoteSensingRunSerializer(run).data["status_label"],
"source": "database",
"location": SoilLocationResponseSerializer(location).data,
"block_code": run.block_code,
"chunk_size_sqm": run.chunk_size_sqm,
"temporal_extent": {
"start_date": run.temporal_start.isoformat() if run.temporal_start else None,
"end_date": run.temporal_end.isoformat() if run.temporal_end else None,
},
"summary": _build_remote_sensing_summary(observations),
"cells": RemoteSensingCellObservationSerializer(paginated_observations["items"], many=True).data,
"run": RemoteSensingRunSerializer(run).data,
"subdivision_result": subdivision_data,
}
if pagination is not None:
response_payload["pagination"] = pagination
response_payload = _build_remote_sensing_run_status_payload(run, page=page, page_size=page_size)
return Response(
{"code": 200, "msg": "success", "data": response_payload},
status=status.HTTP_200_OK,
)
def _build_remote_sensing_run_status_payload(run: RemoteSensingRun, *, page: int, page_size: int) -> dict:
run_data = RemoteSensingRunSerializer(run).data
task_id = (run.metadata or {}).get("task_id")
if run.status in {RemoteSensingRun.STATUS_PENDING, RemoteSensingRun.STATUS_RUNNING}:
return {
"status": run_data["status_label"],
"source": "database",
"run": run_data,
"task_id": task_id,
}
location = _get_location_by_lat_lon(run.soil_location.latitude, run.soil_location.longitude, prefetch=True)
observations = _get_remote_sensing_observations(
location=run.soil_location,
block_code=run.block_code,
start_date=run.temporal_start,
end_date=run.temporal_end,
)
subdivision_result = getattr(run, "subdivision_result", None)
response_payload = {
"status": run_data["status_label"],
"source": "database",
"run": run_data,
"task_id": task_id,
"location": SoilLocationResponseSerializer(location).data,
"block_code": run.block_code,
"chunk_size_sqm": run.chunk_size_sqm,
"temporal_extent": {
"start_date": run.temporal_start.isoformat() if run.temporal_start else None,
"end_date": run.temporal_end.isoformat() if run.temporal_end else None,
},
"summary": _empty_remote_sensing_summary(),
"cells": [],
"subdivision_result": None,
}
if not observations.exists():
return response_payload
paginated_observations = _paginate_observations(
observations,
page=page,
page_size=page_size,
)
paginated_assignments = []
pagination = {"cells": paginated_observations["pagination"]}
if subdivision_result is not None:
paginated = _paginate_assignments(
subdivision_result,
page=page,
page_size=page_size,
)
paginated_assignments = paginated["items"]
pagination["assignments"] = paginated["pagination"]
response_payload["summary"] = _build_remote_sensing_summary(observations)
response_payload["cells"] = RemoteSensingCellObservationSerializer(
paginated_observations["items"],
many=True,
).data
response_payload["pagination"] = pagination
if subdivision_result is not None:
response_payload["subdivision_result"] = RemoteSensingSubdivisionResultSerializer(
subdivision_result,
context={"paginated_assignments": paginated_assignments},
).data
return response_payload
def _get_location_by_lat_lon(lat, lon, *, prefetch: bool = False):
lat_rounded = round(lat, 6)
lon_rounded = round(lon, 6)
@@ -869,18 +832,18 @@ def _clear_block_analysis_state(
subdivision.elbow_plot = None
def _resolve_chunk_size_for_location(location: SoilLocation, block_code: str) -> int | None:
def _resolve_chunk_size_for_location(location: SoilLocation, block_code: str) -> int:
if block_code:
subdivision = location.block_subdivisions.filter(block_code=block_code).first()
if subdivision is not None:
return subdivision.chunk_size_sqm
return int(subdivision.chunk_size_sqm or 900)
block_layout = location.block_layout or {}
if not block_code:
return block_layout.get("analysis_grid_summary", {}).get("chunk_size_sqm")
return int(block_layout.get("analysis_grid_summary", {}).get("chunk_size_sqm") or 900)
for block in block_layout.get("blocks", []):
if block.get("block_code") == block_code:
return block.get("analysis_grid_summary", {}).get("chunk_size_sqm")
return None
return int(block.get("analysis_grid_summary", {}).get("chunk_size_sqm") or 900)
return 900
def _get_remote_sensing_observations(*, location, block_code: str, start_date, end_date):