This commit is contained in:
2026-05-10 02:02:48 +03:30
parent cead7dafe2
commit 2d1f7da89e
30 changed files with 1195 additions and 320 deletions
+24 -3
View File
@@ -19,9 +19,10 @@ AVALAI_BASE_URL=https://api.avalai.ir/v1
GAPGPT_API_KEY=sk-ZeFmDwROcQ2rYOFmUxHLjIwMTSUdo2qNc3Uraug9dOK2ihn5 GAPGPT_API_KEY=sk-ZeFmDwROcQ2rYOFmUxHLjIwMTSUdo2qNc3Uraug9dOK2ihn5
GAPGPT_BASE_URL=https://api.gapgpt.app/v1 GAPGPT_BASE_URL=https://api.gapgpt.app/v1
# Weather API (Open-Meteo) # Weather API (OpenWeather One Call 3.0)
WEATHER_API_BASE_URL=https://api.open-meteo.com/v1/forecast WEATHER_API_BASE_URL=https://api.openweathermap.org/data/3.0/onecall
WEATHER_API_KEY= WEATHER_API_KEY=
WEATHER_PROXY_URL=socks5h://host.docker.internal:10808
# Soil data provider: soilgrids | mock # Soil data provider: soilgrids | mock
@@ -29,6 +30,26 @@ SOIL_DATA_PROVIDER=soilgrids
SOIL_MOCK_DELAY_SECONDS=0.8 SOIL_MOCK_DELAY_SECONDS=0.8
SOILGRIDS_TIMEOUT_SECONDS=60 SOILGRIDS_TIMEOUT_SECONDS=60
WEATHER_DATA_PROVIDER=open-meteo WEATHER_DATA_PROVIDER=openweather
WEATHER_MOCK_DELAY_SECONDS=0.8 WEATHER_MOCK_DELAY_SECONDS=0.8
WEATHER_TIMEOUT_SECONDS=60 WEATHER_TIMEOUT_SECONDS=60
# Optional: route all outbound app traffic through proxychains inside containers
ENABLE_PROXYCHAINS=0
PROXYCHAINS_PROXY_TYPE=socks4
PROXYCHAINS_PROXY_HOST=host.docker.internal
PROXYCHAINS_PROXY_PORT=10808
PROXYCHAINS_CHAIN_MODE=strict_chain
# openEO remote sensing auth
OPENEO_BACKEND_URL=https://openeofed.dataspace.copernicus.eu
OPENEO_TIMEOUT_SECONDS=60
# Use `password` when filling OPENEO_USERNAME/OPENEO_PASSWORD instead of client credentials.
OPENEO_AUTH_METHOD=client_credentials
OPENEO_AUTH_CLIENT_ID=
OPENEO_AUTH_CLIENT_SECRET=
OPENEO_AUTH_PROVIDER_ID=
OPENEO_USERNAME=
OPENEO_PASSWORD=
OPENEO_ALLOW_INTERACTIVE_OIDC=0
OPENEO_PROXY_URL=socks5h://host.docker.internal:10808
+1
View File
@@ -19,6 +19,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
default-libmysqlclient-dev \ default-libmysqlclient-dev \
build-essential \ build-essential \
pkg-config \ pkg-config \
proxychains4 \
&& rm -rf /var/lib/apt/lists/* && rm -rf /var/lib/apt/lists/*
COPY requirements.txt constraints.txt ./ COPY requirements.txt constraints.txt ./
+1
View File
@@ -19,6 +19,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
default-libmysqlclient-dev \ default-libmysqlclient-dev \
build-essential \ build-essential \
pkg-config \ pkg-config \
proxychains4 \
&& rm -rf /var/lib/apt/lists/* && rm -rf /var/lib/apt/lists/*
COPY requirements.txt constraints.txt /app/ COPY requirements.txt constraints.txt /app/
+2 -2
View File
@@ -67,7 +67,7 @@ def build_envelope_serializer(
def build_task_queue_data_serializer(name, extra_fields=None): def build_task_queue_data_serializer(name, extra_fields=None):
fields = { fields = {
"__module__": __name__, "__module__": __name__,
"task_id": serializers.CharField(), "task_id": serializers.UUIDField(),
"status_url": serializers.CharField(), "status_url": serializers.CharField(),
} }
if extra_fields: if extra_fields:
@@ -86,7 +86,7 @@ def build_task_status_data_serializer(name, result_schema=None):
(serializers.Serializer,), (serializers.Serializer,),
{ {
"__module__": __name__, "__module__": __name__,
"task_id": serializers.CharField(), "task_id": serializers.UUIDField(),
"status": serializers.CharField(), "status": serializers.CharField(),
"message": serializers.CharField(required=False), "message": serializers.CharField(required=False),
"progress": serializers.DictField( "progress": serializers.DictField(
+68
View File
@@ -0,0 +1,68 @@
from __future__ import annotations
import os
from typing import Any
from urllib.parse import urlsplit
def _is_truthy(value: str | None) -> bool:
return (value or "").strip().lower() in {"1", "true", "yes", "on"}
def _normalize_proxy_scheme(scheme: str) -> str:
normalized = (scheme or "").strip().lower()
if normalized == "socks4":
return "socks4a"
if normalized == "socks5":
return "socks5h"
return normalized
def build_proxy_url_from_proxychains_env(*, require_enabled: bool = True) -> str | None:
if require_enabled and not _is_truthy(os.environ.get("ENABLE_PROXYCHAINS")):
return None
scheme = _normalize_proxy_scheme(os.environ.get("PROXYCHAINS_PROXY_TYPE", "socks4"))
host = (os.environ.get("PROXYCHAINS_PROXY_HOST") or "").strip()
port = (os.environ.get("PROXYCHAINS_PROXY_PORT") or "").strip()
if not host or not port:
return None
return f"{scheme}://{host}:{port}"
def _same_proxy_endpoint(left: str, right: str) -> bool:
left_parts = urlsplit(left)
right_parts = urlsplit(right)
return (
left_parts.hostname == right_parts.hostname
and left_parts.port == right_parts.port
)
def resolve_requests_proxy_url(proxy_url: str | None) -> str:
value = (proxy_url or "").strip()
if not value:
return ""
proxychains_url = build_proxy_url_from_proxychains_env()
if proxychains_url and _same_proxy_endpoint(value, proxychains_url):
# Avoid double-proxying requests when the whole process is already wrapped by proxychains.
return ""
return value
def build_requests_proxies(proxy_url: str | None) -> dict[str, str] | None:
value = resolve_requests_proxy_url(proxy_url)
if not value:
return None
return {"http": value, "https": value}
def apply_requests_proxy(session: Any, proxy_url: str | None) -> Any:
proxies = build_requests_proxies(proxy_url)
if not proxies:
return session
session.proxies.update(proxies)
session.trust_env = False
return session
+1 -1
View File
@@ -10,7 +10,7 @@ urlpatterns = [
# --- App APIs --- # --- App APIs ---
path("api/rag/", include("rag.urls")), path("api/rag/", include("rag.urls")),
path("api/farm-alerts/", include("farm_alerts.urls")), path("api/farm-alerts/", include("farm_alerts.urls")),
path("api/soil-data/", include("location_data.urls")), path("api/location-data/", include("location_data.urls")),
path("api/soile/", include("soile.urls")), path("api/soile/", include("soile.urls")),
path("api/farm-data/", include("farm_data.urls")), path("api/farm-data/", include("farm_data.urls")),
path("api/weather/", include("weather.urls")), path("api/weather/", include("weather.urls")),
+2 -1
View File
@@ -7,6 +7,7 @@ from math import exp
from typing import Any from typing import Any
import logging import logging
from django.apps import apps
from django.core.paginator import EmptyPage, Paginator from django.core.paginator import EmptyPage, Paginator
from farm_data.models import SensorData from farm_data.models import SensorData
@@ -275,7 +276,7 @@ def _resolve_plant_simulation_defaults(plant: Any) -> tuple[dict[str, Any] | Non
def build_growth_context(payload: dict[str, Any]) -> GrowthSimulationContext: def build_growth_context(payload: dict[str, Any]) -> GrowthSimulationContext:
plant_name = payload["plant_name"] plant_name = apps.get_app_config("plant").resolve_plant_name(payload["plant_name"]) or payload["plant_name"]
from plant.models import Plant from plant.models import Plant
plant = Plant.objects.filter(name=plant_name).first() plant = Plant.objects.filter(name=plant_name).first()
+57 -6
View File
@@ -2,6 +2,8 @@ from __future__ import annotations
import json import json
from django.apps import apps
from rest_framework import serializers from rest_framework import serializers
@@ -18,8 +20,38 @@ class QueryJSONField(serializers.JSONField):
return super().to_internal_value(data) return super().to_internal_value(data)
class GrowthSimulationRequestSerializer(serializers.Serializer): class PlantNameAliasMixin:
plant_name = serializers.CharField(help_text="نام گیاه") plant_name_field = "plant_name"
plant_alias_fields = ("crop", "crop_name")
def _get_raw_plant_name(self, attrs):
value = attrs.get(self.plant_name_field)
if value not in (None, ""):
return value
for alias in self.plant_alias_fields:
alias_value = self.initial_data.get(alias) if hasattr(self, "initial_data") else None
if alias_value not in (None, ""):
return alias_value
return value
def _resolve_plant_name(self, attrs, *, required: bool) -> dict:
raw_value = self._get_raw_plant_name(attrs)
if raw_value in (None, ""):
if required:
raise serializers.ValidationError(
{self.plant_name_field: "یکی از plant_name، crop یا crop_name باید ارسال شود."}
)
attrs[self.plant_name_field] = ""
return attrs
resolved_value = apps.get_app_config("plant").resolve_plant_name(str(raw_value))
attrs[self.plant_name_field] = resolved_value or str(raw_value).strip()
return attrs
class GrowthSimulationRequestSerializer(PlantNameAliasMixin, serializers.Serializer):
plant_name = serializers.CharField(required=False, allow_blank=True, help_text="نام گیاه")
dynamic_parameters = serializers.ListField( dynamic_parameters = serializers.ListField(
child=serializers.CharField(), child=serializers.CharField(),
allow_empty=False, allow_empty=False,
@@ -36,6 +68,7 @@ class GrowthSimulationRequestSerializer(serializers.Serializer):
page_size = serializers.IntegerField(required=False, min_value=1, max_value=50) page_size = serializers.IntegerField(required=False, min_value=1, max_value=50)
def validate(self, attrs): def validate(self, attrs):
attrs = self._resolve_plant_name(attrs, required=True)
if not attrs.get("farm_uuid") and not attrs.get("weather"): if not attrs.get("farm_uuid") and not attrs.get("weather"):
raise serializers.ValidationError( raise serializers.ValidationError(
"یکی از farm_uuid یا weather باید ارسال شود." "یکی از farm_uuid یا weather باید ارسال شود."
@@ -44,7 +77,7 @@ class GrowthSimulationRequestSerializer(serializers.Serializer):
class GrowthSimulationQueuedSerializer(serializers.Serializer): class GrowthSimulationQueuedSerializer(serializers.Serializer):
task_id = serializers.CharField() task_id = serializers.UUIDField()
status_url = serializers.CharField() status_url = serializers.CharField()
plant_name = serializers.CharField() plant_name = serializers.CharField()
@@ -92,12 +125,15 @@ class GrowthSimulationResultSerializer(serializers.Serializer):
class CurrentFarmChartRequestSerializer(serializers.Serializer): class CurrentFarmChartRequestSerializer(PlantNameAliasMixin, serializers.Serializer):
farm_uuid = serializers.UUIDField(help_text="شناسه یکتای مزرعه") farm_uuid = serializers.UUIDField(help_text="شناسه یکتای مزرعه")
plant_name = serializers.CharField(required=False, allow_blank=True, help_text="نام گیاه") plant_name = serializers.CharField(required=False, allow_blank=True, help_text="نام گیاه")
irrigation_recommendation = serializers.JSONField(required=False) irrigation_recommendation = serializers.JSONField(required=False)
fertilization_recommendation = serializers.JSONField(required=False) fertilization_recommendation = serializers.JSONField(required=False)
def validate(self, attrs):
return self._resolve_plant_name(attrs, required=False)
class CurrentFarmChartResponseSerializer(serializers.Serializer): class CurrentFarmChartResponseSerializer(serializers.Serializer):
farm_uuid = serializers.CharField(allow_null=True) farm_uuid = serializers.CharField(allow_null=True)
@@ -114,12 +150,15 @@ class CurrentFarmChartResponseSerializer(serializers.Serializer):
daily_output = serializers.JSONField() daily_output = serializers.JSONField()
class HarvestPredictionRequestSerializer(serializers.Serializer): class HarvestPredictionRequestSerializer(PlantNameAliasMixin, serializers.Serializer):
farm_uuid = serializers.UUIDField(help_text="شناسه یکتای مزرعه") farm_uuid = serializers.UUIDField(help_text="شناسه یکتای مزرعه")
plant_name = serializers.CharField(required=False, allow_blank=True, help_text="نام گیاه") plant_name = serializers.CharField(required=False, allow_blank=True, help_text="نام گیاه")
irrigation_recommendation = serializers.JSONField(required=False) irrigation_recommendation = serializers.JSONField(required=False)
fertilization_recommendation = serializers.JSONField(required=False) fertilization_recommendation = serializers.JSONField(required=False)
def validate(self, attrs):
return self._resolve_plant_name(attrs, required=False)
class HarvestPredictionResponseSerializer(serializers.Serializer): class HarvestPredictionResponseSerializer(serializers.Serializer):
date = serializers.CharField() date = serializers.CharField()
@@ -131,12 +170,15 @@ class HarvestPredictionResponseSerializer(serializers.Serializer):
gddDetails = serializers.JSONField() gddDetails = serializers.JSONField()
class YieldPredictionRequestSerializer(serializers.Serializer): class YieldPredictionRequestSerializer(PlantNameAliasMixin, serializers.Serializer):
farm_uuid = serializers.UUIDField(help_text="شناسه یکتای مزرعه") farm_uuid = serializers.UUIDField(help_text="شناسه یکتای مزرعه")
plant_name = serializers.CharField(required=False, allow_blank=True, help_text="نام گیاه") plant_name = serializers.CharField(required=False, allow_blank=True, help_text="نام گیاه")
irrigation_recommendation = serializers.JSONField(required=False) irrigation_recommendation = serializers.JSONField(required=False)
fertilization_recommendation = serializers.JSONField(required=False) fertilization_recommendation = serializers.JSONField(required=False)
def validate(self, attrs):
return self._resolve_plant_name(attrs, required=False)
class YieldPredictionResponseSerializer(serializers.Serializer): class YieldPredictionResponseSerializer(serializers.Serializer):
farm_uuid = serializers.CharField() farm_uuid = serializers.CharField()
@@ -172,6 +214,15 @@ class YieldHarvestSummaryQuerySerializer(serializers.Serializer):
help_text="برنامه کودهی به صورت JSON برای تزریق به PCSE.", help_text="برنامه کودهی به صورت JSON برای تزریق به PCSE.",
) )
def validate(self, attrs):
raw_crop_name = attrs.get("crop_name")
if raw_crop_name in (None, "") and hasattr(self, "initial_data"):
raw_crop_name = self.initial_data.get("plant_name") or self.initial_data.get("crop")
if raw_crop_name not in (None, ""):
resolved_crop_name = apps.get_app_config("plant").resolve_plant_name(str(raw_crop_name))
attrs["crop_name"] = resolved_crop_name or str(raw_crop_name).strip()
return attrs
class YieldHarvestSummaryResponseSerializer(serializers.Serializer): class YieldHarvestSummaryResponseSerializer(serializers.Serializer):
farm_uuid = serializers.CharField() farm_uuid = serializers.CharField()
@@ -111,6 +111,24 @@ class PlantGrowthSimulationApiTests(TestCase):
self.assertEqual(response.json()["data"]["task_id"], "growth-task-1") self.assertEqual(response.json()["data"]["task_id"], "growth-task-1")
self.assertEqual(mock_delay.call_args.args[0]["irrigation_recommendation"]["events"][0]["amount"], 2.5) self.assertEqual(mock_delay.call_args.args[0]["irrigation_recommendation"]["events"][0]["amount"], 2.5)
@patch("crop_simulation.views.run_growth_simulation_task.delay")
def test_queue_api_accepts_crop_alias(self, mock_delay):
mock_delay.return_value = SimpleNamespace(id="growth-task-2")
response = self.client.post(
"/growth/",
data={
"crop": "tomato",
"dynamic_parameters": ["DVS", "LAI"],
"weather": self.weather,
},
format="json",
)
self.assertEqual(response.status_code, 202)
self.assertEqual(mock_delay.call_args.args[0]["plant_name"], self.plant.name)
self.assertEqual(response.json()["data"]["plant_name"], self.plant.name)
def test_queue_api_returns_400_for_missing_weather_and_farm_uuid(self): def test_queue_api_returns_400_for_missing_weather_and_farm_uuid(self):
response = self.client.post( response = self.client.post(
"/growth/", "/growth/",
@@ -275,6 +293,44 @@ class PlantGrowthSimulationApiTests(TestCase):
self.assertEqual(response.status_code, 400) self.assertEqual(response.status_code, 400)
self.assertEqual(response.json()["code"], 400) self.assertEqual(response.json()["code"], 400)
@patch("crop_simulation.views.apps.get_app_config")
def test_current_farm_chart_api_accepts_crop_name_alias(self, mock_get_app_config):
captured = {}
def simulate(**kwargs):
captured.update(kwargs)
return {
"farm_uuid": kwargs["farm_uuid"],
"plant_name": kwargs["plant_name"],
"engine": "growth_projection",
"model_name": "growth_projection_v1",
"scenario_id": 12,
"simulation_warning": None,
"categories": [],
"series": [],
"summary": [],
"current_state": {},
"metrics": {},
"daily_output": [],
}
mock_get_app_config.return_value = SimpleNamespace(
get_current_farm_chart_simulator=lambda: SimpleNamespace(simulate=simulate)
)
response = self.client.post(
"/current-farm-chart/",
data={
"farm_uuid": "550e8400-e29b-41d4-a716-446655440000",
"crop_name": "tomato",
},
format="json",
)
self.assertEqual(response.status_code, 200)
self.assertEqual(captured["plant_name"], self.plant.name)
self.assertEqual(response.json()["data"]["plant_name"], self.plant.name)
@patch("crop_simulation.views.apps.get_app_config") @patch("crop_simulation.views.apps.get_app_config")
def test_current_farm_chart_api_returns_500_when_simulator_fails(self, mock_get_app_config): def test_current_farm_chart_api_returns_500_when_simulator_fails(self, mock_get_app_config):
mock_simulator = SimpleNamespace( mock_simulator = SimpleNamespace(
@@ -342,6 +398,40 @@ class PlantGrowthSimulationApiTests(TestCase):
self.assertEqual(response.status_code, 400) self.assertEqual(response.status_code, 400)
self.assertEqual(response.json()["code"], 400) self.assertEqual(response.json()["code"], 400)
@patch("crop_simulation.views.apps.get_app_config")
def test_harvest_prediction_api_accepts_crop_alias(self, mock_get_app_config):
captured = {}
def get_harvest_prediction(**kwargs):
captured.update(kwargs)
return {
"date": "2026-05-14",
"dateFormatted": "14 May 2026",
"daysUntil": 43,
"description": "ok",
"optimalWindowStart": "2026-05-11",
"optimalWindowEnd": "2026-05-17",
"gddDetails": {},
}
mock_get_app_config.return_value = SimpleNamespace(
get_harvest_prediction_service=lambda: SimpleNamespace(
get_harvest_prediction=get_harvest_prediction
)
)
response = self.client.post(
"/harvest-prediction/",
data={
"farm_uuid": "550e8400-e29b-41d4-a716-446655440000",
"crop": "tomato",
},
format="json",
)
self.assertEqual(response.status_code, 200)
self.assertEqual(captured["plant_name"], self.plant.name)
@patch("crop_simulation.views.apps.get_app_config") @patch("crop_simulation.views.apps.get_app_config")
def test_harvest_prediction_api_returns_500_when_service_fails(self, mock_get_app_config): def test_harvest_prediction_api_returns_500_when_service_fails(self, mock_get_app_config):
class BrokenService: class BrokenService:
@@ -409,6 +499,44 @@ class PlantGrowthSimulationApiTests(TestCase):
self.assertEqual(response.status_code, 400) self.assertEqual(response.status_code, 400)
self.assertEqual(response.json()["code"], 400) self.assertEqual(response.json()["code"], 400)
@patch("crop_simulation.views.apps.get_app_config")
def test_yield_prediction_api_accepts_crop_alias(self, mock_get_app_config):
captured = {}
def get_yield_prediction(**kwargs):
captured.update(kwargs)
return {
"farm_uuid": kwargs["farm_uuid"],
"plant_name": kwargs["plant_name"],
"predictedYieldTons": 5.4,
"predictedYieldRaw": 5400.0,
"unit": "تن",
"sourceUnit": "kg/ha",
"simulationEngine": "growth_projection",
"simulationModel": "growth_projection_v1",
"scenarioId": 12,
"simulationWarning": None,
"supportingMetrics": {},
}
mock_get_app_config.return_value = SimpleNamespace(
get_yield_prediction_service=lambda: SimpleNamespace(
get_yield_prediction=get_yield_prediction
)
)
response = self.client.post(
"/yield-prediction/",
data={
"farm_uuid": "550e8400-e29b-41d4-a716-446655440000",
"crop": "tomato",
},
format="json",
)
self.assertEqual(response.status_code, 200)
self.assertEqual(captured["plant_name"], self.plant.name)
@patch("crop_simulation.views.apps.get_app_config") @patch("crop_simulation.views.apps.get_app_config")
def test_yield_prediction_api_returns_500_when_service_fails(self, mock_get_app_config): def test_yield_prediction_api_returns_500_when_service_fails(self, mock_get_app_config):
class BrokenService: class BrokenService:
@@ -493,3 +621,26 @@ class PlantGrowthSimulationApiTests(TestCase):
self.assertEqual(response.status_code, 400) self.assertEqual(response.status_code, 400)
self.assertEqual(response.json()["code"], 400) self.assertEqual(response.json()["code"], 400)
@patch("crop_simulation.views.YieldHarvestSummaryService")
def test_yield_harvest_summary_api_accepts_plant_name_alias(self, mock_service_cls):
mock_service_cls.return_value.get_summary.return_value = {
"farm_uuid": "550e8400-e29b-41d4-a716-446655440000",
"season_highlights_card": {},
"yield_prediction": {},
"harvest_prediction_card": {},
"harvest_readiness_zones": {},
"yield_quality_bands": {},
"harvest_operations_card": {},
"yield_prediction_chart": {},
}
response = self.client.get(
"/yield-harvest-summary/?farm_uuid=550e8400-e29b-41d4-a716-446655440000&plant_name=tomato"
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
mock_service_cls.return_value.get_summary.call_args.kwargs["crop_name"],
self.plant.name,
)
+14
View File
@@ -56,6 +56,11 @@ services:
QDRANT_HOST: ai-qdrant QDRANT_HOST: ai-qdrant
QDRANT_PORT: 6333 QDRANT_PORT: 6333
DEBUG: "False" DEBUG: "False"
ENABLE_PROXYCHAINS: ${ENABLE_PROXYCHAINS:-1}
PROXYCHAINS_PROXY_TYPE: ${PROXYCHAINS_PROXY_TYPE:-socks4}
PROXYCHAINS_PROXY_HOST: ${PROXYCHAINS_PROXY_HOST:-host.docker.internal}
PROXYCHAINS_PROXY_PORT: ${PROXYCHAINS_PROXY_PORT:-10808}
PROXYCHAINS_CHAIN_MODE: ${PROXYCHAINS_CHAIN_MODE:-strict_chain}
depends_on: depends_on:
db: db:
condition: service_healthy condition: service_healthy
@@ -64,6 +69,8 @@ services:
networks: networks:
- crop_network - crop_network
extra_hosts:
- "host.docker.internal:host-gateway"
celery: celery:
build: build:
@@ -89,6 +96,11 @@ services:
CELERY_RESULT_BACKEND: redis://ai-redis:6379/0 CELERY_RESULT_BACKEND: redis://ai-redis:6379/0
SKIP_MIGRATE: "1" SKIP_MIGRATE: "1"
DEBUG: "False" DEBUG: "False"
ENABLE_PROXYCHAINS: ${ENABLE_PROXYCHAINS:-1}
PROXYCHAINS_PROXY_TYPE: ${PROXYCHAINS_PROXY_TYPE:-socks4}
PROXYCHAINS_PROXY_HOST: ${PROXYCHAINS_PROXY_HOST:-host.docker.internal}
PROXYCHAINS_PROXY_PORT: ${PROXYCHAINS_PROXY_PORT:-10808}
PROXYCHAINS_CHAIN_MODE: ${PROXYCHAINS_CHAIN_MODE:-strict_chain}
depends_on: depends_on:
db: db:
condition: service_healthy condition: service_healthy
@@ -96,6 +108,8 @@ services:
condition: service_started condition: service_started
networks: networks:
- crop_network - crop_network
extra_hosts:
- "host.docker.internal:host-gateway"
volumes: volumes:
+14
View File
@@ -76,6 +76,11 @@ services:
CELERY_RESULT_BACKEND: redis://ai-redis:6379/0 CELERY_RESULT_BACKEND: redis://ai-redis:6379/0
QDRANT_HOST: ai-qdrant QDRANT_HOST: ai-qdrant
QDRANT_PORT: 6333 QDRANT_PORT: 6333
ENABLE_PROXYCHAINS: ${ENABLE_PROXYCHAINS:-0}
PROXYCHAINS_PROXY_TYPE: ${PROXYCHAINS_PROXY_TYPE:-socks4}
PROXYCHAINS_PROXY_HOST: ${PROXYCHAINS_PROXY_HOST:-host.docker.internal}
PROXYCHAINS_PROXY_PORT: ${PROXYCHAINS_PROXY_PORT:-10808}
PROXYCHAINS_CHAIN_MODE: ${PROXYCHAINS_CHAIN_MODE:-strict_chain}
depends_on: depends_on:
db: db:
condition: service_healthy condition: service_healthy
@@ -85,6 +90,8 @@ services:
condition: service_started condition: service_started
networks: networks:
- crop_network - crop_network
extra_hosts:
- "host.docker.internal:host-gateway"
celery: celery:
build: build:
@@ -106,6 +113,11 @@ services:
CELERY_BROKER_URL: redis://ai-redis:6379/0 CELERY_BROKER_URL: redis://ai-redis:6379/0
CELERY_RESULT_BACKEND: redis://ai-redis:6379/0 CELERY_RESULT_BACKEND: redis://ai-redis:6379/0
SKIP_MIGRATE: "1" SKIP_MIGRATE: "1"
ENABLE_PROXYCHAINS: ${ENABLE_PROXYCHAINS:-0}
PROXYCHAINS_PROXY_TYPE: ${PROXYCHAINS_PROXY_TYPE:-socks4}
PROXYCHAINS_PROXY_HOST: ${PROXYCHAINS_PROXY_HOST:-host.docker.internal}
PROXYCHAINS_PROXY_PORT: ${PROXYCHAINS_PROXY_PORT:-10808}
PROXYCHAINS_CHAIN_MODE: ${PROXYCHAINS_CHAIN_MODE:-strict_chain}
depends_on: depends_on:
db: db:
condition: service_healthy condition: service_healthy
@@ -113,6 +125,8 @@ services:
condition: service_started condition: service_started
networks: networks:
- crop_network - crop_network
extra_hosts:
- "host.docker.internal:host-gateway"
volumes: volumes:
ai_mysql_data: ai_mysql_data:
+67 -9
View File
@@ -1,8 +1,59 @@
#!/bin/sh #!/bin/sh
set -e set -e
PROXYCHAINS_CONFIG_FILE="${PROXYCHAINS_CONFIG_FILE:-/etc/proxychains.conf}"
setup_proxychains() {
if [ "${ENABLE_PROXYCHAINS}" != "1" ]; then
echo "proxychains disabled (ENABLE_PROXYCHAINS=${ENABLE_PROXYCHAINS:-0})"
return 0
fi
if ! command -v proxychains4 >/dev/null 2>&1; then
echo "proxychains4 is not installed but ENABLE_PROXYCHAINS=1 was set." >&2
exit 1
fi
proxy_type="${PROXYCHAINS_PROXY_TYPE:-socks4}"
proxy_host="${PROXYCHAINS_PROXY_HOST:-host.docker.internal}"
proxy_port="${PROXYCHAINS_PROXY_PORT:-10808}"
chain_mode="${PROXYCHAINS_CHAIN_MODE:-strict_chain}"
proxy_ip="$(getent hosts "${proxy_host}" | awk 'NR==1 {print $1}')"
if [ -z "${proxy_ip}" ]; then
echo "Could not resolve proxy host: ${proxy_host}" >&2
exit 1
fi
cat > "${PROXYCHAINS_CONFIG_FILE}" <<EOF
# Auto-generated by entrypoint.sh
${chain_mode}
proxy_dns
remote_dns_subnet 224
tcp_read_time_out 15000
tcp_connect_time_out 8000
localnet 127.0.0.0/255.0.0.0
localnet 10.0.0.0/255.0.0.0
localnet 172.16.0.0/255.240.0.0
localnet 192.168.0.0/255.255.0.0
localnet 169.254.0.0/255.255.0.0
[ProxyList]
${proxy_type} ${proxy_ip} ${proxy_port}
EOF
echo "proxychains enabled via ${proxy_type} ${proxy_host} (${proxy_ip}):${proxy_port}"
}
run_cmd() {
if [ "${ENABLE_PROXYCHAINS}" = "1" ]; then
proxychains4 -f "${PROXYCHAINS_CONFIG_FILE}" "$@"
else
"$@"
fi
}
wait_for_db() { wait_for_db() {
python - <<'PY' run_cmd python - <<'PY'
import os import os
import socket import socket
import sys import sys
@@ -26,26 +77,33 @@ sys.exit(1)
PY PY
} }
setup_proxychains
if [ "${SKIP_MIGRATE}" != "1" ]; then if [ "${SKIP_MIGRATE}" != "1" ]; then
wait_for_db wait_for_db
echo "Running migrations..." echo "Running migrations..."
python manage.py repair_location_tables run_cmd python manage.py repair_location_tables
python manage.py migrate --noinput run_cmd python manage.py migrate --noinput
echo "Migrations done." echo "Migrations done."
fi fi
if [ -n "${DEVELOP}" ] && [ "${SKIP_MIGRATE}" != "1" ]; then if [ -n "${DEVELOP}" ] && [ "${SKIP_MIGRATE}" != "1" ]; then
echo "DEVELOP is set. Seeding demo plant, location_data, weather_data, and farm_data..." echo "DEVELOP is set. Seeding demo plant, weather_data, and farm_data..."
python manage.py seed_plants run_cmd python manage.py seed_plants
python manage.py seed_location_data run_cmd python manage.py seed_weather_data
python manage.py seed_weather_data run_cmd python manage.py seed_farm_data
python manage.py seed_farm_data
echo "Demo seeders done." echo "Demo seeders done."
fi fi
echo "Checking openEO authentication..."
run_cmd python manage.py verify_openeo_auth --skip-if-unconfigured
echo "Collecting static files..." echo "Collecting static files..."
python manage.py collectstatic --noinput run_cmd python manage.py collectstatic --noinput
echo "Static files ready." echo "Static files ready."
echo "Starting command: $*" echo "Starting command: $*"
if [ "${ENABLE_PROXYCHAINS}" = "1" ]; then
exec proxychains4 -f "${PROXYCHAINS_CONFIG_FILE}" "$@"
fi
exec "$@" exec "$@"
@@ -15,6 +15,16 @@ from weather.models import WeatherForecast
DEMO_FARM_UUID = UUID("11111111-1111-1111-1111-111111111111") DEMO_FARM_UUID = UUID("11111111-1111-1111-1111-111111111111")
DEMO_LATITUDE = "50.000000" DEMO_LATITUDE = "50.000000"
DEMO_LONGITUDE = "50.000000" DEMO_LONGITUDE = "50.000000"
DEMO_FARM_BOUNDARY = {
"type": "Polygon",
"coordinates": [[
[49.9995, 49.9995],
[50.0005, 49.9995],
[50.0005, 50.0005],
[49.9995, 50.0005],
[49.9995, 49.9995],
]],
}
DEMO_SENSOR_PAYLOAD = { DEMO_SENSOR_PAYLOAD = {
"sensor-7-1": { "sensor-7-1": {
"soil_moisture": 42.3, "soil_moisture": 42.3,
@@ -39,7 +49,20 @@ class Command(BaseCommand):
location, _ = SoilLocation.objects.get_or_create( location, _ = SoilLocation.objects.get_or_create(
latitude=DEMO_LATITUDE, latitude=DEMO_LATITUDE,
longitude=DEMO_LONGITUDE, longitude=DEMO_LONGITUDE,
defaults={"farm_boundary": DEMO_FARM_BOUNDARY},
) )
changed_fields = []
if location.farm_boundary != DEMO_FARM_BOUNDARY:
location.farm_boundary = DEMO_FARM_BOUNDARY
changed_fields.append("farm_boundary")
if not location.input_block_count:
location.input_block_count = 1
changed_fields.append("input_block_count")
if not location.block_layout:
location.set_input_block_count(1)
changed_fields.extend(["input_block_count", "block_layout"])
if changed_fields:
location.save(update_fields=[*dict.fromkeys(changed_fields), "updated_at"])
weather_forecast = ( weather_forecast = (
WeatherForecast.objects.filter(location=location) WeatherForecast.objects.filter(location=location)
.order_by("-forecast_date", "-id") .order_by("-forecast_date", "-id")
+26 -2
View File
@@ -9,6 +9,7 @@ from django.conf import settings
from django.apps import apps from django.apps import apps
from django.db import transaction from django.db import transaction
from django.utils.dateparse import parse_datetime from django.utils.dateparse import parse_datetime
from django.utils import timezone
import requests import requests
@@ -576,10 +577,33 @@ def resolve_weather_for_location(location: SoilLocation) -> WeatherForecast | No
def ensure_location_and_weather_data(location: SoilLocation) -> tuple[SoilLocation, WeatherForecast | None]: def ensure_location_and_weather_data(location: SoilLocation) -> tuple[SoilLocation, WeatherForecast | None]:
""" """
در فاز فعلی برای location_data و بلوک‌ها هیچ ریکوئست خارجی زده نمی‌شود forecast آب‌وهوا را در صورت نبود/قدیمی بودن refresh می‌کند تا
و فقط دادههای محلی موجود برگردانده می‌شوند. سرویس‌های downstream بهجای دیتای seed/mock از داده provider فعال استفاده کنند.
""" """
weather_forecast = resolve_weather_for_location(location) weather_forecast = resolve_weather_for_location(location)
needs_refresh = weather_forecast is None
if weather_forecast is not None:
today = timezone.localdate()
has_upcoming_forecast = WeatherForecast.objects.filter(
location=location,
forecast_date__gte=today,
).exists()
fetched_at = getattr(weather_forecast, "fetched_at", None)
is_stale = fetched_at is None or (timezone.now() - fetched_at).total_seconds() >= 3 * 60 * 60
needs_refresh = (not has_upcoming_forecast) or is_stale
if needs_refresh:
try:
weather_result = apps.get_app_config("weather").update_weather_for_location(location)
except Exception as exc:
raise ExternalDataSyncError(f"Weather sync failed: {exc}") from exc
if weather_result.get("status") == "error":
raise ExternalDataSyncError(weather_result.get("error") or "Weather sync failed.")
weather_forecast = resolve_weather_for_location(location)
return location, weather_forecast return location, weather_forecast
@@ -83,7 +83,7 @@ class ReportingAndAiJourneyTests(IntegrationAPITestCase):
def test_reporting_endpoints_read_from_persisted_farm_context(self) -> None: def test_reporting_endpoints_read_from_persisted_farm_context(self) -> None:
soil_response = self.client.get( soil_response = self.client.get(
"/api/soil-data/", "/api/location-data/",
data={"lat": f"{self.primary_lat:.6f}", "lon": f"{self.primary_lon:.6f}"}, data={"lat": f"{self.primary_lat:.6f}", "lon": f"{self.primary_lon:.6f}"},
) )
self.assertEqual(soil_response.status_code, 200) self.assertEqual(soil_response.status_code, 200)
@@ -168,7 +168,7 @@ class ReportingAndAiJourneyTests(IntegrationAPITestCase):
self.assertEqual(anomaly_response.json()["data"]["knowledge_base"], "soil_anomaly") self.assertEqual(anomaly_response.json()["data"]["knowledge_base"], "soil_anomaly")
ndvi_response = self.client.post( ndvi_response = self.client.post(
"/api/soil-data/ndvi-health/", "/api/location-data/ndvi-health/",
data={"farm_uuid": str(self.farm_uuid)}, data={"farm_uuid": str(self.farm_uuid)},
format="json", format="json",
) )
@@ -0,0 +1,47 @@
from django.core.management.base import BaseCommand, CommandError
from config.proxy import build_proxy_url_from_proxychains_env, resolve_requests_proxy_url
from location_data.openeo_service import (
OpenEOAuthenticationError,
OpenEOConnectionSettings,
OpenEOServiceError,
connect_openeo,
is_openeo_auth_configured,
)
class Command(BaseCommand):
help = "Verify openEO connectivity and authentication using the current environment."
def add_arguments(self, parser):
parser.add_argument(
"--skip-if-unconfigured",
action="store_true",
help="Exit successfully when the required auth environment variables are missing.",
)
def handle(self, *args, **options):
settings = OpenEOConnectionSettings.from_env()
if not is_openeo_auth_configured(settings):
message = "openEO auth check skipped because the required credentials are not configured."
if options["skip_if_unconfigured"]:
self.stdout.write(self.style.WARNING(message))
return
raise CommandError(message)
self.stdout.write(f"Verifying openEO auth against {settings.backend_url}...")
requests_proxy_url = resolve_requests_proxy_url(settings.proxy_url)
proxychains_url = build_proxy_url_from_proxychains_env()
if requests_proxy_url:
self.stdout.write(f"Using requests proxy for openEO auth: {requests_proxy_url}")
elif proxychains_url:
self.stdout.write(f"Using proxychains for openEO auth: {proxychains_url}")
try:
connect_openeo(settings)
except OpenEOAuthenticationError as exc:
raise CommandError(str(exc)) from exc
except OpenEOServiceError as exc:
raise CommandError(str(exc)) from exc
self.stdout.write(self.style.SUCCESS("openEO authentication succeeded."))
@@ -60,6 +60,6 @@ class Migration(migrations.Migration):
), ),
migrations.AddIndex( migrations.AddIndex(
model_name="remotesensingclusterassignment", model_name="remotesensingclusterassignment",
index=models.Index(fields=["result", "cluster_label"], name="rs_cluster_assign_result_label_idx"), index=models.Index(fields=["result", "cluster_label"], name="rs_ca_result_label_idx"),
), ),
] ]
@@ -0,0 +1,9 @@
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("location_data", "0012_remote_sensing_subdivision_models"),
]
operations = []
@@ -0,0 +1,10 @@
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("location_data", "0013_rename_cluster_assignment_index"),
("location_data", "0014_blocksubdivision_chunk_size_30m"),
]
operations = []
+1 -1
View File
@@ -480,7 +480,7 @@ class RemoteSensingClusterAssignment(models.Model):
indexes = [ indexes = [
models.Index( models.Index(
fields=["result", "cluster_label"], fields=["result", "cluster_label"],
name="rs_cluster_assign_result_label_idx", name="rs_ca_result_label_idx",
) )
] ]
verbose_name = "remote sensing cluster assignment" verbose_name = "remote sensing cluster assignment"
+130 -15
View File
@@ -7,11 +7,16 @@ from datetime import date
from decimal import Decimal from decimal import Decimal
from typing import Any from typing import Any
import requests
from config.proxy import apply_requests_proxy, build_proxy_url_from_proxychains_env
from .models import AnalysisGridCell from .models import AnalysisGridCell
DEFAULT_OPENEO_BACKEND_URL = "https://openeofed.dataspace.copernicus.eu" DEFAULT_OPENEO_BACKEND_URL = "https://openeofed.dataspace.copernicus.eu"
DEFAULT_OPENEO_PROVIDER = "openeo" DEFAULT_OPENEO_PROVIDER = "openeo"
DEFAULT_OPENEO_PROXY_URL = "socks5h://host.docker.internal:10808"
SENTINEL2_COLLECTION = "SENTINEL2_L2A" SENTINEL2_COLLECTION = "SENTINEL2_L2A"
SENTINEL3_LST_COLLECTION = "SENTINEL3_SLSTR_L2_LST" SENTINEL3_LST_COLLECTION = "SENTINEL3_SLSTR_L2_LST"
@@ -42,22 +47,39 @@ class OpenEOExecutionError(OpenEOServiceError):
"""Raised when a metric process graph can not be executed successfully.""" """Raised when a metric process graph can not be executed successfully."""
class TimeoutOverrideSession(requests.Session):
"""Requests session that enforces a minimum timeout for all outbound calls."""
def __init__(self, timeout_seconds: float):
super().__init__()
self.timeout_seconds = timeout_seconds
def request(self, method, url, **kwargs):
timeout = kwargs.get("timeout")
if timeout is None or timeout < self.timeout_seconds:
kwargs["timeout"] = self.timeout_seconds
return super().request(method, url, **kwargs)
@dataclass(frozen=True) @dataclass(frozen=True)
class OpenEOConnectionSettings: class OpenEOConnectionSettings:
backend_url: str = DEFAULT_OPENEO_BACKEND_URL backend_url: str = DEFAULT_OPENEO_BACKEND_URL
auth_method: str = "client_credentials" auth_method: str = "client_credentials"
timeout_seconds: float = 60.0
client_id: str = "" client_id: str = ""
client_secret: str = "" client_secret: str = ""
provider_id: str = "" provider_id: str = ""
username: str = "" username: str = ""
password: str = "" password: str = ""
allow_interactive_oidc: bool = False allow_interactive_oidc: bool = False
proxy_url: str = ""
@classmethod @classmethod
def from_env(cls) -> "OpenEOConnectionSettings": def from_env(cls) -> "OpenEOConnectionSettings":
return cls( return cls(
backend_url=os.environ.get("OPENEO_BACKEND_URL", DEFAULT_OPENEO_BACKEND_URL).strip(), backend_url=os.environ.get("OPENEO_BACKEND_URL", DEFAULT_OPENEO_BACKEND_URL).strip(),
auth_method=os.environ.get("OPENEO_AUTH_METHOD", "client_credentials").strip().lower(), auth_method=os.environ.get("OPENEO_AUTH_METHOD", "client_credentials").strip().lower(),
timeout_seconds=float(os.environ.get("OPENEO_TIMEOUT_SECONDS", "60").strip() or "60"),
client_id=os.environ.get("OPENEO_AUTH_CLIENT_ID", "").strip(), client_id=os.environ.get("OPENEO_AUTH_CLIENT_ID", "").strip(),
client_secret=os.environ.get("OPENEO_AUTH_CLIENT_SECRET", "").strip(), client_secret=os.environ.get("OPENEO_AUTH_CLIENT_SECRET", "").strip(),
provider_id=os.environ.get("OPENEO_AUTH_PROVIDER_ID", "").strip(), provider_id=os.environ.get("OPENEO_AUTH_PROVIDER_ID", "").strip(),
@@ -65,9 +87,40 @@ class OpenEOConnectionSettings:
password=os.environ.get("OPENEO_PASSWORD", "").strip(), password=os.environ.get("OPENEO_PASSWORD", "").strip(),
allow_interactive_oidc=os.environ.get("OPENEO_ALLOW_INTERACTIVE_OIDC", "0").strip().lower() allow_interactive_oidc=os.environ.get("OPENEO_ALLOW_INTERACTIVE_OIDC", "0").strip().lower()
in {"1", "true", "yes", "on"}, in {"1", "true", "yes", "on"},
proxy_url=_resolve_openeo_proxy_url_from_env(),
) )
def _resolve_openeo_proxy_url_from_env() -> str:
configured_proxy_url = os.environ.get("OPENEO_PROXY_URL", DEFAULT_OPENEO_PROXY_URL).strip()
if configured_proxy_url and configured_proxy_url != DEFAULT_OPENEO_PROXY_URL:
return configured_proxy_url
# Keep openEO traffic proxied even when process-wide proxychains is disabled.
derived_proxy_url = build_proxy_url_from_proxychains_env(require_enabled=False)
if derived_proxy_url:
return derived_proxy_url
return configured_proxy_url
def is_openeo_auth_configured(settings: OpenEOConnectionSettings | None = None) -> bool:
settings = settings or OpenEOConnectionSettings.from_env()
if settings.auth_method == "client_credentials":
return bool(settings.client_id and settings.client_secret)
if settings.auth_method == "password":
return bool(settings.username and settings.password)
if settings.auth_method == "oidc":
return settings.allow_interactive_oidc
return False
def build_openeo_requests_session(settings: OpenEOConnectionSettings) -> requests.Session:
session = TimeoutOverrideSession(settings.timeout_seconds)
return apply_requests_proxy(session, settings.proxy_url)
def connect_openeo(settings: OpenEOConnectionSettings | None = None): def connect_openeo(settings: OpenEOConnectionSettings | None = None):
""" """
Build an authenticated openEO connection using environment-driven configuration. Build an authenticated openEO connection using environment-driven configuration.
@@ -77,36 +130,98 @@ def connect_openeo(settings: OpenEOConnectionSettings | None = None):
settings = settings or OpenEOConnectionSettings.from_env() settings = settings or OpenEOConnectionSettings.from_env()
try: try:
import openeo import openeo
from openeo.rest.auth.oidc import (
OidcClientCredentialsAuthenticator,
OidcClientInfo,
OidcProviderInfo,
OidcResourceOwnerPasswordAuthenticator,
)
except ImportError as exc: # pragma: no cover - runtime dependency guard except ImportError as exc: # pragma: no cover - runtime dependency guard
raise OpenEOServiceError("The `openeo` Python client is required for remote sensing jobs.") from exc raise OpenEOServiceError("The `openeo` Python client is required for remote sensing jobs.") from exc
connection = openeo.connect(settings.backend_url) session = build_openeo_requests_session(settings)
connection = openeo.connect(
settings.backend_url,
session=session,
default_timeout=settings.timeout_seconds,
)
def resolve_oidc_context(
provider_id: str | None,
client_id: str | None,
client_secret: str | None,
) -> tuple[str, OidcClientInfo]:
resolved_provider_id, _ = connection._get_oidc_provider(provider_id, parse_info=False)
providers_payload = connection.get("/credentials/oidc", expected_status=200).json()
provider_map = {provider["id"]: provider for provider in providers_payload["providers"]}
provider_data = provider_map.get(resolved_provider_id)
if not provider_data:
raise OpenEOAuthenticationError(
f"OIDC provider metadata for {resolved_provider_id!r} was not returned by the backend."
)
provider_info = OidcProviderInfo(
provider_id=provider_data["id"],
title=provider_data["title"],
issuer=provider_data["issuer"],
scopes=provider_data.get("scopes"),
default_clients=provider_data.get("default_clients"),
requests_session=session,
)
if not client_id:
raise OpenEOAuthenticationError(
"OPENEO_AUTH_CLIENT_ID must be configured for this openEO auth flow."
)
return resolved_provider_id, OidcClientInfo(
client_id=client_id,
client_secret=client_secret,
provider=provider_info,
)
try: try:
if settings.auth_method == "client_credentials": if settings.auth_method == "client_credentials":
if not settings.client_id or not settings.client_secret: if not settings.client_id or not settings.client_secret:
raise OpenEOAuthenticationError( raise OpenEOAuthenticationError(
"OPENEO_AUTH_CLIENT_ID and OPENEO_AUTH_CLIENT_SECRET must be configured." "OPENEO_AUTH_CLIENT_ID and OPENEO_AUTH_CLIENT_SECRET must be configured."
) )
auth_kwargs = { provider_id, client_info = resolve_oidc_context(
"client_id": settings.client_id, settings.provider_id or None,
"client_secret": settings.client_secret, settings.client_id,
} settings.client_secret,
if settings.provider_id: )
auth_kwargs["provider_id"] = settings.provider_id authenticator = OidcClientCredentialsAuthenticator(
return connection.authenticate_oidc_client_credentials(**auth_kwargs) client_info=client_info,
requests_session=session,
)
return connection._authenticate_oidc(
authenticator,
provider_id=provider_id,
store_refresh_token=False,
oidc_auth_renewer=authenticator,
)
if settings.auth_method == "password": if settings.auth_method == "password":
if not settings.username or not settings.password: if not settings.username or not settings.password:
raise OpenEOAuthenticationError( raise OpenEOAuthenticationError(
"OPENEO_USERNAME and OPENEO_PASSWORD must be configured for password auth." "OPENEO_USERNAME and OPENEO_PASSWORD must be configured for password auth."
) )
auth_kwargs = { provider_id, client_info = resolve_oidc_context(
"username": settings.username, settings.provider_id or None,
"password": settings.password, settings.client_id or None,
} settings.client_secret or None,
if settings.provider_id: )
auth_kwargs["provider_id"] = settings.provider_id authenticator = OidcResourceOwnerPasswordAuthenticator(
return connection.authenticate_oidc_resource_owner_password_credentials(**auth_kwargs) client_info=client_info,
username=settings.username,
password=settings.password,
requests_session=session,
)
return connection._authenticate_oidc(
authenticator,
provider_id=provider_id,
store_refresh_token=False,
)
if settings.auth_method == "oidc": if settings.auth_method == "oidc":
if not settings.allow_interactive_oidc: if not settings.allow_interactive_oidc:
+12 -38
View File
@@ -1,6 +1,5 @@
from rest_framework import serializers from rest_framework import serializers
from .data_driven_subdivision import SUPPORTED_CLUSTER_FEATURES
from .models import ( from .models import (
AnalysisGridObservation, AnalysisGridObservation,
BlockSubdivision, BlockSubdivision,
@@ -108,7 +107,7 @@ class SoilDataTaskResponseSerializer(serializers.Serializer):
"""سریالایزر خروجی وقتی تسک در صف قرار گرفته (۲۰۲).""" """سریالایزر خروجی وقتی تسک در صف قرار گرفته (۲۰۲)."""
source = serializers.CharField(default="task") source = serializers.CharField(default="task")
task_id = serializers.CharField() task_id = serializers.UUIDField()
lon = serializers.FloatField(source="longitude") lon = serializers.FloatField(source="longitude")
lat = serializers.FloatField(source="latitude") lat = serializers.FloatField(source="latitude")
status_url = serializers.CharField(required=False) status_url = serializers.CharField(required=False)
@@ -135,42 +134,9 @@ class NdviHealthResponseSerializer(serializers.Serializer):
healthData = NdviHealthDataItemSerializer(many=True) healthData = NdviHealthDataItemSerializer(many=True)
class RemoteSensingTriggerSerializer(serializers.Serializer): class RemoteSensingFarmRequestSerializer(serializers.Serializer):
lon = serializers.DecimalField(max_digits=9, decimal_places=6, required=True) farm_uuid = serializers.UUIDField(required=True, help_text="شناسه یکتای مزرعه")
lat = serializers.DecimalField(max_digits=9, decimal_places=6, required=True)
block_code = serializers.CharField(required=False, allow_blank=True, default="", max_length=64)
start_date = serializers.DateField(required=True)
end_date = serializers.DateField(required=True)
force_refresh = serializers.BooleanField(required=False, default=False) force_refresh = serializers.BooleanField(required=False, default=False)
cluster_count = serializers.IntegerField(required=False, min_value=1, allow_null=True, default=None)
selected_features = serializers.ListField(
child=serializers.CharField(max_length=64),
required=False,
allow_empty=False,
)
def validate(self, attrs):
if attrs["start_date"] > attrs["end_date"]:
raise serializers.ValidationError("start_date نمی‌تواند بعد از end_date باشد.")
selected_features = attrs.get("selected_features") or []
invalid_features = sorted(
feature_name
for feature_name in selected_features
if feature_name not in SUPPORTED_CLUSTER_FEATURES
)
if invalid_features:
raise serializers.ValidationError(
{
"selected_features": [
"ویژگی‌های نامعتبر برای خوشه‌بندی: "
+ ", ".join(invalid_features)
]
}
)
return attrs
class RemoteSensingResultQuerySerializer(RemoteSensingTriggerSerializer):
page = serializers.IntegerField(required=False, min_value=1, default=1) page = serializers.IntegerField(required=False, min_value=1, default=1)
page_size = serializers.IntegerField(required=False, min_value=1, max_value=200, default=100) page_size = serializers.IntegerField(required=False, min_value=1, max_value=200, default=100)
@@ -323,7 +289,15 @@ class RemoteSensingRunStatusResponseSerializer(serializers.Serializer):
status = serializers.CharField() status = serializers.CharField()
source = serializers.CharField() source = serializers.CharField()
run = RemoteSensingRunSerializer() run = RemoteSensingRunSerializer()
task_id = serializers.CharField(allow_blank=True, allow_null=True, required=False) task_id = serializers.UUIDField(allow_null=True, required=False)
location = SoilLocationResponseSerializer(required=False)
block_code = serializers.CharField(allow_blank=True, required=False)
chunk_size_sqm = serializers.IntegerField(allow_null=True, required=False)
temporal_extent = serializers.JSONField(required=False)
summary = RemoteSensingSummarySerializer(required=False)
cells = RemoteSensingCellObservationSerializer(many=True, required=False)
subdivision_result = RemoteSensingSubdivisionResultSerializer(allow_null=True, required=False)
pagination = serializers.JSONField(required=False)
class RemoteSensingRunResultResponseSerializer(serializers.Serializer): class RemoteSensingRunResultResponseSerializer(serializers.Serializer):
+95
View File
@@ -1,9 +1,18 @@
from decimal import Decimal from decimal import Decimal
from io import StringIO
import os
from unittest.mock import Mock, patch
from django.core.management import call_command
from django.test import SimpleTestCase from django.test import SimpleTestCase
from config.proxy import resolve_requests_proxy_url
from location_data.openeo_service import ( from location_data.openeo_service import (
OpenEOConnectionSettings,
_resolve_openeo_proxy_url_from_env,
build_empty_metric_payload, build_empty_metric_payload,
connect_openeo,
is_openeo_auth_configured,
linear_to_db, linear_to_db,
merge_metric_results, merge_metric_results,
parse_aggregate_spatial_response, parse_aggregate_spatial_response,
@@ -64,3 +73,89 @@ class OpenEOServiceParsingTests(SimpleTestCase):
self.assertEqual(target["cell-1"]["ndvi"], 0.5) self.assertEqual(target["cell-1"]["ndvi"], 0.5)
self.assertEqual(target["cell-2"]["ndwi"], 0.2) self.assertEqual(target["cell-2"]["ndwi"], 0.2)
self.assertIn("soil_vv_db", target["cell-2"]) self.assertIn("soil_vv_db", target["cell-2"])
class OpenEOConnectionTests(SimpleTestCase):
def test_default_openeo_proxy_url_uses_proxychains_endpoint_without_wrapping_process(self):
with patch.dict(
os.environ,
{
"ENABLE_PROXYCHAINS": "0",
"PROXYCHAINS_PROXY_TYPE": "socks4",
"PROXYCHAINS_PROXY_HOST": "host.docker.internal",
"PROXYCHAINS_PROXY_PORT": "10808",
"OPENEO_PROXY_URL": "socks5h://host.docker.internal:10808",
},
clear=False,
):
self.assertEqual(
_resolve_openeo_proxy_url_from_env(),
"socks4a://host.docker.internal:10808",
)
def test_requests_proxy_is_disabled_when_proxychains_targets_same_endpoint(self):
with patch.dict(
os.environ,
{
"ENABLE_PROXYCHAINS": "1",
"PROXYCHAINS_PROXY_TYPE": "socks4",
"PROXYCHAINS_PROXY_HOST": "host.docker.internal",
"PROXYCHAINS_PROXY_PORT": "10808",
},
clear=False,
):
self.assertEqual(
resolve_requests_proxy_url("socks5h://host.docker.internal:10808"),
"",
)
def test_is_openeo_auth_configured_for_client_credentials(self):
self.assertTrue(
is_openeo_auth_configured(
OpenEOConnectionSettings(
auth_method="client_credentials",
client_id="client-id",
client_secret="client-secret",
)
)
)
def test_is_openeo_auth_configured_for_password(self):
self.assertTrue(
is_openeo_auth_configured(
OpenEOConnectionSettings(
auth_method="password",
username="user@example.com",
password="secret",
)
)
)
def test_verify_openeo_auth_command_skips_when_unconfigured(self):
stdout = StringIO()
call_command("verify_openeo_auth", "--skip-if-unconfigured", stdout=stdout)
self.assertIn("openEO auth check skipped", stdout.getvalue())
def test_connect_openeo_applies_proxy_to_session(self):
connection = Mock()
connection.authenticate_oidc_resource_owner_password_credentials.return_value = connection
openeo_module = Mock()
openeo_module.connect.return_value = connection
settings = OpenEOConnectionSettings(
backend_url="https://openeofed.dataspace.copernicus.eu",
auth_method="password",
timeout_seconds=123,
username="user@example.com",
password="secret",
proxy_url="socks5h://127.0.0.1:10808",
)
with patch.dict("sys.modules", {"openeo": openeo_module}):
connect_openeo(settings)
self.assertEqual(openeo_module.connect.call_args.kwargs["default_timeout"], 123)
session = openeo_module.connect.call_args.kwargs["session"]
self.assertEqual(session.proxies["https"], "socks5h://127.0.0.1:10808")
self.assertFalse(session.trust_env)
+54 -49
View File
@@ -1,10 +1,13 @@
from datetime import date from datetime import date, timedelta
from types import SimpleNamespace from types import SimpleNamespace
from unittest.mock import patch from unittest.mock import patch
from django.test import TestCase, override_settings from django.test import TestCase, override_settings
from django.utils import timezone
from rest_framework.test import APIClient from rest_framework.test import APIClient
from farm_data.models import SensorData
from location_data.data_driven_subdivision import DEFAULT_CLUSTER_FEATURES
from location_data.models import ( from location_data.models import (
AnalysisGridCell, AnalysisGridCell,
AnalysisGridObservation, AnalysisGridObservation,
@@ -39,6 +42,13 @@ class RemoteSensingApiTests(TestCase):
) )
self.location.set_input_block_count(1) self.location.set_input_block_count(1)
self.location.save(update_fields=["input_block_count", "block_layout", "updated_at"]) self.location.save(update_fields=["input_block_count", "block_layout", "updated_at"])
self.farm = SensorData.objects.create(
farm_uuid="11111111-1111-1111-1111-111111111111",
center_location=self.location,
payload={},
)
self.temporal_end = timezone.localdate() - timedelta(days=1)
self.temporal_start = self.temporal_end - timedelta(days=30)
self.subdivision = BlockSubdivision.objects.create( self.subdivision = BlockSubdivision.objects.create(
soil_location=self.location, soil_location=self.location,
block_code="block-1", block_code="block-1",
@@ -51,10 +61,7 @@ class RemoteSensingApiTests(TestCase):
response = self.client.post( response = self.client.post(
"/remote-sensing/", "/remote-sensing/",
data={ data={
"lat": 35.7000, "farm_uuid": "22222222-2222-2222-2222-222222222222",
"lon": 51.4000,
"start_date": "2025-01-01",
"end_date": "2025-01-31",
}, },
format="json", format="json",
) )
@@ -64,16 +71,12 @@ class RemoteSensingApiTests(TestCase):
@patch("location_data.views.run_remote_sensing_analysis_task.delay") @patch("location_data.views.run_remote_sensing_analysis_task.delay")
def test_post_remote_sensing_enqueues_task_and_returns_processing(self, mock_delay): def test_post_remote_sensing_enqueues_task_and_returns_processing(self, mock_delay):
mock_delay.return_value = SimpleNamespace(id="task-123") mock_delay.return_value = SimpleNamespace(id="e723ba3e-c53c-401b-b3a0-5f7013c7b401")
response = self.client.post( response = self.client.post(
"/remote-sensing/", "/remote-sensing/",
data={ data={
"lat": 35.6892, "farm_uuid": str(self.farm.farm_uuid),
"lon": 51.3890,
"block_code": "block-1",
"start_date": "2025-01-01",
"end_date": "2025-01-31",
"force_refresh": False, "force_refresh": False,
}, },
format="json", format="json",
@@ -83,36 +86,34 @@ class RemoteSensingApiTests(TestCase):
payload = response.json()["data"] payload = response.json()["data"]
self.assertEqual(payload["status"], "processing") self.assertEqual(payload["status"], "processing")
self.assertEqual(payload["source"], "processing") self.assertEqual(payload["source"], "processing")
self.assertEqual(payload["task_id"], "task-123") self.assertEqual(payload["task_id"], "e723ba3e-c53c-401b-b3a0-5f7013c7b401")
self.assertEqual(payload["block_code"], "block-1") self.assertEqual(payload["block_code"], "")
self.assertEqual(payload["summary"]["cell_count"], 0) self.assertEqual(payload["summary"]["cell_count"], 0)
run = RemoteSensingRun.objects.get(id=payload["run"]["id"]) run = RemoteSensingRun.objects.get(id=payload["run"]["id"])
self.assertEqual(run.block_code, "block-1") self.assertEqual(run.block_code, "")
self.assertEqual(run.temporal_start, self.temporal_start)
self.assertEqual(run.temporal_end, self.temporal_end)
self.assertEqual(run.status, RemoteSensingRun.STATUS_PENDING) self.assertEqual(run.status, RemoteSensingRun.STATUS_PENDING)
self.assertEqual(run.metadata["stage"], "queued") self.assertEqual(run.metadata["stage"], "queued")
self.assertEqual(run.metadata["selected_features"], []) self.assertEqual(run.metadata["selected_features"], DEFAULT_CLUSTER_FEATURES)
mock_delay.assert_called_once() mock_delay.assert_called_once()
def test_get_remote_sensing_returns_processing_when_run_exists_without_results(self): def test_get_remote_sensing_returns_processing_when_run_exists_without_results(self):
RemoteSensingRun.objects.create( RemoteSensingRun.objects.create(
soil_location=self.location, soil_location=self.location,
block_subdivision=self.subdivision, block_subdivision=self.subdivision,
block_code="block-1", block_code="",
chunk_size_sqm=900, chunk_size_sqm=900,
temporal_start=date(2025, 1, 1), temporal_start=self.temporal_start,
temporal_end=date(2025, 1, 31), temporal_end=self.temporal_end,
status=RemoteSensingRun.STATUS_RUNNING, status=RemoteSensingRun.STATUS_RUNNING,
metadata={"task_id": "task-123"}, metadata={"task_id": "e723ba3e-c53c-401b-b3a0-5f7013c7b401"},
) )
response = self.client.get( response = self.client.get(
"/remote-sensing/", "/remote-sensing/",
data={ data={
"lat": 35.6892, "farm_uuid": str(self.farm.farm_uuid),
"lon": 51.3890,
"block_code": "block-1",
"start_date": "2025-01-01",
"end_date": "2025-01-31",
}, },
) )
@@ -127,16 +128,16 @@ class RemoteSensingApiTests(TestCase):
run = RemoteSensingRun.objects.create( run = RemoteSensingRun.objects.create(
soil_location=self.location, soil_location=self.location,
block_subdivision=self.subdivision, block_subdivision=self.subdivision,
block_code="block-1", block_code="",
chunk_size_sqm=900, chunk_size_sqm=900,
temporal_start=date(2025, 1, 1), temporal_start=self.temporal_start,
temporal_end=date(2025, 1, 31), temporal_end=self.temporal_end,
status=RemoteSensingRun.STATUS_SUCCESS, status=RemoteSensingRun.STATUS_SUCCESS,
) )
cell = AnalysisGridCell.objects.create( cell = AnalysisGridCell.objects.create(
soil_location=self.location, soil_location=self.location,
block_subdivision=self.subdivision, block_subdivision=self.subdivision,
block_code="block-1", block_code="",
cell_code="cell-1", cell_code="cell-1",
chunk_size_sqm=900, chunk_size_sqm=900,
geometry=self.boundary, geometry=self.boundary,
@@ -146,8 +147,8 @@ class RemoteSensingApiTests(TestCase):
AnalysisGridObservation.objects.create( AnalysisGridObservation.objects.create(
cell=cell, cell=cell,
run=run, run=run,
temporal_start=date(2025, 1, 1), temporal_start=self.temporal_start,
temporal_end=date(2025, 1, 31), temporal_end=self.temporal_end,
ndvi=0.61, ndvi=0.61,
ndwi=0.22, ndwi=0.22,
lst_c=24.5, lst_c=24.5,
@@ -161,11 +162,7 @@ class RemoteSensingApiTests(TestCase):
response = self.client.get( response = self.client.get(
"/remote-sensing/", "/remote-sensing/",
data={ data={
"lat": 35.6892, "farm_uuid": str(self.farm.farm_uuid),
"lon": 51.3890,
"block_code": "block-1",
"start_date": "2025-01-01",
"end_date": "2025-01-31",
}, },
) )
@@ -183,15 +180,19 @@ class RemoteSensingApiTests(TestCase):
run = RemoteSensingRun.objects.create( run = RemoteSensingRun.objects.create(
soil_location=self.location, soil_location=self.location,
block_subdivision=self.subdivision, block_subdivision=self.subdivision,
block_code="block-1", block_code="",
chunk_size_sqm=900, chunk_size_sqm=900,
temporal_start=date(2025, 1, 1), temporal_start=self.temporal_start,
temporal_end=date(2025, 1, 31), temporal_end=self.temporal_end,
status=RemoteSensingRun.STATUS_SUCCESS, status=RemoteSensingRun.STATUS_SUCCESS,
metadata={"stage": "completed", "selected_features": ["ndvi"]}, metadata={"stage": "completed", "selected_features": ["ndvi"]},
) )
response = self.client.get(f"/remote-sensing/runs/{run.id}/status/") task_id = "e723ba3e-c53c-401b-b3a0-5f7013c7b401"
run.metadata = {**run.metadata, "task_id": task_id}
run.save(update_fields=["metadata", "updated_at"])
response = self.client.get(f"/remote-sensing/runs/{task_id}/status/")
self.assertEqual(response.status_code, 200) self.assertEqual(response.status_code, 200)
payload = response.json()["data"] payload = response.json()["data"]
@@ -204,17 +205,17 @@ class RemoteSensingApiTests(TestCase):
run = RemoteSensingRun.objects.create( run = RemoteSensingRun.objects.create(
soil_location=self.location, soil_location=self.location,
block_subdivision=self.subdivision, block_subdivision=self.subdivision,
block_code="block-1", block_code="",
chunk_size_sqm=900, chunk_size_sqm=900,
temporal_start=date(2025, 1, 1), temporal_start=self.temporal_start,
temporal_end=date(2025, 1, 31), temporal_end=self.temporal_end,
status=RemoteSensingRun.STATUS_SUCCESS, status=RemoteSensingRun.STATUS_SUCCESS,
metadata={"stage": "completed"}, metadata={"stage": "completed"},
) )
cell = AnalysisGridCell.objects.create( cell = AnalysisGridCell.objects.create(
soil_location=self.location, soil_location=self.location,
block_subdivision=self.subdivision, block_subdivision=self.subdivision,
block_code="block-1", block_code="",
cell_code="cell-1", cell_code="cell-1",
chunk_size_sqm=900, chunk_size_sqm=900,
geometry=self.boundary, geometry=self.boundary,
@@ -224,8 +225,8 @@ class RemoteSensingApiTests(TestCase):
AnalysisGridObservation.objects.create( AnalysisGridObservation.objects.create(
cell=cell, cell=cell,
run=run, run=run,
temporal_start=date(2025, 1, 1), temporal_start=self.temporal_start,
temporal_end=date(2025, 1, 31), temporal_end=self.temporal_end,
ndvi=0.61, ndvi=0.61,
ndwi=0.22, ndwi=0.22,
lst_c=24.5, lst_c=24.5,
@@ -239,10 +240,10 @@ class RemoteSensingApiTests(TestCase):
soil_location=self.location, soil_location=self.location,
run=run, run=run,
block_subdivision=self.subdivision, block_subdivision=self.subdivision,
block_code="block-1", block_code="",
chunk_size_sqm=900, chunk_size_sqm=900,
temporal_start=date(2025, 1, 1), temporal_start=self.temporal_start,
temporal_end=date(2025, 1, 31), temporal_end=self.temporal_end,
cluster_count=1, cluster_count=1,
selected_features=["ndvi"], selected_features=["ndvi"],
metadata={"used_cell_count": 1, "skipped_cell_count": 0}, metadata={"used_cell_count": 1, "skipped_cell_count": 0},
@@ -255,7 +256,11 @@ class RemoteSensingApiTests(TestCase):
scaled_feature_values={"ndvi": 0.0}, scaled_feature_values={"ndvi": 0.0},
) )
response = self.client.get(f"/remote-sensing/runs/{run.id}/result/", data={"page": 1, "page_size": 10}) task_id = "e723ba3e-c53c-401b-b3a0-5f7013c7b401"
run.metadata = {**run.metadata, "task_id": task_id}
run.save(update_fields=["metadata", "updated_at"])
response = self.client.get(f"/remote-sensing/runs/{task_id}/status/", data={"page": 1, "page_size": 10})
self.assertEqual(response.status_code, 200) self.assertEqual(response.status_code, 200)
payload = response.json()["data"] payload = response.json()["data"]
+1 -3
View File
@@ -3,7 +3,6 @@ from django.urls import path
from .views import ( from .views import (
NdviHealthView, NdviHealthView,
RemoteSensingAnalysisView, RemoteSensingAnalysisView,
RemoteSensingRunResultView,
RemoteSensingRunStatusView, RemoteSensingRunStatusView,
SoilDataView, SoilDataView,
) )
@@ -11,7 +10,6 @@ from .views import (
urlpatterns = [ urlpatterns = [
path("", SoilDataView.as_view(), name="soil-data"), path("", SoilDataView.as_view(), name="soil-data"),
path("remote-sensing/", RemoteSensingAnalysisView.as_view(), name="remote-sensing"), path("remote-sensing/", RemoteSensingAnalysisView.as_view(), name="remote-sensing"),
path("remote-sensing/runs/<int:run_id>/status/", RemoteSensingRunStatusView.as_view(), name="remote-sensing-run-status"), path("remote-sensing/runs/<uuid:run_id>/status/", RemoteSensingRunStatusView.as_view(), name="remote-sensing-run-status"),
path("remote-sensing/runs/<int:run_id>/result/", RemoteSensingRunResultView.as_view(), name="remote-sensing-run-result"),
path("ndvi-health/", NdviHealthView.as_view(), name="ndvi-health"), path("ndvi-health/", NdviHealthView.as_view(), name="ndvi-health"),
] ]
+147 -184
View File
@@ -1,10 +1,14 @@
from datetime import timedelta
from django.apps import apps from django.apps import apps
from django.core.paginator import EmptyPage, Paginator from django.core.paginator import EmptyPage, Paginator
from django.db.models import Avg from django.db.models import Avg
from django.db import transaction from django.db import transaction
from django.utils import timezone
from rest_framework import status from rest_framework import status
from drf_spectacular.utils import ( from drf_spectacular.utils import (
OpenApiExample, OpenApiExample,
OpenApiParameter,
OpenApiResponse, OpenApiResponse,
extend_schema, extend_schema,
inline_serializer, inline_serializer,
@@ -25,19 +29,20 @@ from .models import (
RemoteSensingSubdivisionResult, RemoteSensingSubdivisionResult,
SoilLocation, SoilLocation,
) )
from farm_data.models import SensorData
from .data_driven_subdivision import DEFAULT_CLUSTER_FEATURES
from .serializers import ( from .serializers import (
BlockSubdivisionSerializer, BlockSubdivisionSerializer,
NdviHealthRequestSerializer, NdviHealthRequestSerializer,
NdviHealthResponseSerializer, NdviHealthResponseSerializer,
RemoteSensingCellObservationSerializer, RemoteSensingCellObservationSerializer,
RemoteSensingResponseSerializer, RemoteSensingResponseSerializer,
RemoteSensingResultQuerySerializer, RemoteSensingFarmRequestSerializer,
RemoteSensingRunResultResponseSerializer,
RemoteSensingRunSerializer, RemoteSensingRunSerializer,
RemoteSensingRunStatusResponseSerializer, RemoteSensingRunStatusResponseSerializer,
RemoteSensingSummarySerializer, RemoteSensingSummarySerializer,
RemoteSensingSubdivisionResultSerializer, RemoteSensingSubdivisionResultSerializer,
RemoteSensingTriggerSerializer,
SoilDataRequestSerializer, SoilDataRequestSerializer,
SoilLocationResponseSerializer, SoilLocationResponseSerializer,
) )
@@ -90,7 +95,7 @@ RemoteSensingQueuedEnvelopeSerializer = build_envelope_serializer(
"summary": RemoteSensingSummarySerializer(), "summary": RemoteSensingSummarySerializer(),
"cells": drf_serializers.JSONField(), "cells": drf_serializers.JSONField(),
"run": drf_serializers.JSONField(allow_null=True), "run": drf_serializers.JSONField(allow_null=True),
"task_id": drf_serializers.CharField(), "task_id": drf_serializers.UUIDField(),
}, },
), ),
) )
@@ -98,19 +103,13 @@ RemoteSensingRunStatusEnvelopeSerializer = build_envelope_serializer(
"RemoteSensingRunStatusEnvelopeSerializer", "RemoteSensingRunStatusEnvelopeSerializer",
RemoteSensingRunStatusResponseSerializer, RemoteSensingRunStatusResponseSerializer,
) )
RemoteSensingRunResultEnvelopeSerializer = build_envelope_serializer(
"RemoteSensingRunResultEnvelopeSerializer",
RemoteSensingRunResultResponseSerializer,
)
class SoilDataView(APIView): class SoilDataView(APIView):
""" """
ثبت مختصات گوشههای مزرعه و بلوکهای تعریفشده توسط کشاورز. ثبت مختصات گوشههای مزرعه و بلوکهای تعریفشده توسط کشاورز.
""" """
@extend_schema( @extend_schema(
tags=["Soil Data"], tags=["Location Data"],
summary="خواندن ساختار مزرعه و بلوک‌ها (GET)", summary="خواندن ساختار مزرعه و بلوک‌ها (GET)",
description="با ارسال lat و lon، ساختار ذخیره‌شده مزرعه، بلوک‌ها و آخرین خلاصه سنجش‌ازدور هر بلوک بازگردانده می‌شود.", description="با ارسال lat و lon، ساختار ذخیره‌شده مزرعه، بلوک‌ها و آخرین خلاصه سنجش‌ازدور هر بلوک بازگردانده می‌شود.",
parameters=[ parameters=[
@@ -175,7 +174,7 @@ class SoilDataView(APIView):
) )
@extend_schema( @extend_schema(
tags=["Soil Data"], tags=["Location Data"],
summary="ثبت مزرعه و بلوک‌های کشاورز (POST)", summary="ثبت مزرعه و بلوک‌های کشاورز (POST)",
description="مختصات گوشه‌های مزرعه و boundary هر بلوک کشاورز ذخیره می‌شود. هیچ subdivision سنکرونی اجرا نمی‌شود.", description="مختصات گوشه‌های مزرعه و boundary هر بلوک کشاورز ذخیره می‌شود. هیچ subdivision سنکرونی اجرا نمی‌شود.",
request=SoilDataRequestSerializer, request=SoilDataRequestSerializer,
@@ -306,7 +305,7 @@ class SoilDataView(APIView):
class NdviHealthView(APIView): class NdviHealthView(APIView):
@extend_schema( @extend_schema(
tags=["Soil Data"], tags=["Location Data"],
summary="دریافت NDVI سلامت مزرعه", summary="دریافت NDVI سلامت مزرعه",
description="با دریافت farm_uuid، داده NDVI سلامت پوشش گیاهی مزرعه را به صورت مستقل از dashboard برمی گرداند.", description="با دریافت farm_uuid، داده NDVI سلامت پوشش گیاهی مزرعه را به صورت مستقل از dashboard برمی گرداند.",
request=NdviHealthRequestSerializer, request=NdviHealthRequestSerializer,
@@ -359,10 +358,10 @@ class NdviHealthView(APIView):
class RemoteSensingAnalysisView(APIView): class RemoteSensingAnalysisView(APIView):
@extend_schema( @extend_schema(
tags=["Soil Data"], tags=["Location Data"],
summary="اجرای async تحلیل سنجش‌ازدور و subdivision داده‌محور", summary="اجرای async تحلیل سنجش‌ازدور و subdivision داده‌محور",
description="برای location موجود، pipeline کامل grid + openEO + observation persistence + KMeans clustering در Celery صف می‌شود و sync اجرا نمی‌شود.", description="برای location موجود، pipeline کامل grid + openEO + observation persistence + KMeans clustering در Celery صف می‌شود و sync اجرا نمی‌شود.",
request=RemoteSensingTriggerSerializer, request=RemoteSensingFarmRequestSerializer,
responses={ responses={
202: build_response( 202: build_response(
RemoteSensingQueuedEnvelopeSerializer, RemoteSensingQueuedEnvelopeSerializer,
@@ -381,21 +380,15 @@ class RemoteSensingAnalysisView(APIView):
OpenApiExample( OpenApiExample(
"نمونه درخواست remote sensing", "نمونه درخواست remote sensing",
value={ value={
"lat": 35.6892, "farm_uuid": "11111111-1111-1111-1111-111111111111",
"lon": 51.3890,
"block_code": "block-1",
"start_date": "2025-01-01",
"end_date": "2025-01-31",
"force_refresh": False, "force_refresh": False,
"cluster_count": 3,
"selected_features": ["ndvi", "ndwi", "soil_vv_db"],
}, },
request_only=True, request_only=True,
), ),
], ],
) )
def post(self, request): def post(self, request):
serializer = RemoteSensingTriggerSerializer(data=request.data) serializer = RemoteSensingFarmRequestSerializer(data=request.data)
if not serializer.is_valid(): if not serializer.is_valid():
return Response( return Response(
{"code": 400, "msg": "داده نامعتبر.", "data": serializer.errors}, {"code": 400, "msg": "داده نامعتبر.", "data": serializer.errors},
@@ -403,37 +396,41 @@ class RemoteSensingAnalysisView(APIView):
) )
payload = serializer.validated_data payload = serializer.validated_data
location = _get_location_by_lat_lon(payload["lat"], payload["lon"], prefetch=True) farm = SensorData.objects.select_related("center_location").filter(farm_uuid=payload["farm_uuid"]).first()
location = getattr(farm, "center_location", None)
if location is None: if location is None:
return Response( return Response(
{"code": 404, "msg": "location پیدا نشد.", "data": None}, {"code": 404, "msg": "location پیدا نشد.", "data": None},
status=status.HTTP_404_NOT_FOUND, status=status.HTTP_404_NOT_FOUND,
) )
block_code = str(payload.get("block_code", "") or "").strip() temporal_end = timezone.localdate() - timedelta(days=1)
temporal_start = temporal_end - timedelta(days=30)
run = RemoteSensingRun.objects.create( run = RemoteSensingRun.objects.create(
soil_location=location, soil_location=location,
block_code=block_code, block_code="",
chunk_size_sqm=_resolve_chunk_size_for_location(location, block_code), chunk_size_sqm=_resolve_chunk_size_for_location(location, ""),
temporal_start=payload["start_date"], temporal_start=temporal_start,
temporal_end=payload["end_date"], temporal_end=temporal_end,
status=RemoteSensingRun.STATUS_PENDING, status=RemoteSensingRun.STATUS_PENDING,
metadata={ metadata={
"requested_via": "api", "requested_via": "api",
"status_label": "pending", "status_label": "pending",
"cluster_count": payload.get("cluster_count"), "requested_cluster_count": None,
"selected_features": payload.get("selected_features") or [], "selected_features": list(DEFAULT_CLUSTER_FEATURES),
"farm_uuid": str(payload["farm_uuid"]),
"scope": "all_blocks",
}, },
) )
task_result = run_remote_sensing_analysis_task.delay( task_result = run_remote_sensing_analysis_task.delay(
soil_location_id=location.id, soil_location_id=location.id,
block_code=block_code, block_code="",
temporal_start=payload["start_date"].isoformat(), temporal_start=temporal_start.isoformat(),
temporal_end=payload["end_date"].isoformat(), temporal_end=temporal_end.isoformat(),
force_refresh=payload.get("force_refresh", False), force_refresh=payload.get("force_refresh", False),
run_id=run.id, run_id=run.id,
cluster_count=payload.get("cluster_count"), cluster_count=None,
selected_features=payload.get("selected_features"), selected_features=list(DEFAULT_CLUSTER_FEATURES),
) )
run.metadata = {**(run.metadata or {}), "task_id": task_result.id} run.metadata = {**(run.metadata or {}), "task_id": task_result.id}
run.save(update_fields=["metadata", "updated_at"]) run.save(update_fields=["metadata", "updated_at"])
@@ -443,11 +440,11 @@ class RemoteSensingAnalysisView(APIView):
"status": "processing", "status": "processing",
"source": "processing", "source": "processing",
"location": location_data, "location": location_data,
"block_code": block_code, "block_code": "",
"chunk_size_sqm": run.chunk_size_sqm, "chunk_size_sqm": run.chunk_size_sqm,
"temporal_extent": { "temporal_extent": {
"start_date": payload["start_date"].isoformat(), "start_date": temporal_start.isoformat(),
"end_date": payload["end_date"].isoformat(), "end_date": temporal_end.isoformat(),
}, },
"summary": _empty_remote_sensing_summary(), "summary": _empty_remote_sensing_summary(),
"cells": [], "cells": [],
@@ -460,15 +457,11 @@ class RemoteSensingAnalysisView(APIView):
) )
@extend_schema( @extend_schema(
tags=["Soil Data"], tags=["Location Data"],
summary="خواندن نتایج cache شده سنجش‌ازدور و subdivision", summary="خواندن نتایج cache شده سنجش‌ازدور و subdivision",
description="فقط نتایج ذخیره‌شده remote sensing و clustering را برمی‌گرداند و هیچ پردازش sync اجرا نمی‌کند.", description="فقط نتایج ذخیره‌شده remote sensing و clustering را برمی‌گرداند و هیچ پردازش sync اجرا نمی‌کند.",
parameters=[ parameters=[
{"name": "lat", "in": "query", "required": True, "schema": {"type": "number"}}, {"name": "farm_uuid", "in": "query", "required": True, "schema": {"type": "string", "format": "uuid"}},
{"name": "lon", "in": "query", "required": True, "schema": {"type": "number"}},
{"name": "block_code", "in": "query", "required": False, "schema": {"type": "string"}},
{"name": "start_date", "in": "query", "required": True, "schema": {"type": "string", "format": "date"}},
{"name": "end_date", "in": "query", "required": True, "schema": {"type": "string", "format": "date"}},
{"name": "page", "in": "query", "required": False, "schema": {"type": "integer", "default": 1}}, {"name": "page", "in": "query", "required": False, "schema": {"type": "integer", "default": 1}},
{"name": "page_size", "in": "query", "required": False, "schema": {"type": "integer", "default": 100}}, {"name": "page_size", "in": "query", "required": False, "schema": {"type": "integer", "default": 100}},
], ],
@@ -488,7 +481,7 @@ class RemoteSensingAnalysisView(APIView):
}, },
) )
def get(self, request): def get(self, request):
serializer = RemoteSensingResultQuerySerializer(data=request.query_params) serializer = RemoteSensingFarmRequestSerializer(data=request.query_params)
if not serializer.is_valid(): if not serializer.is_valid():
return Response( return Response(
{"code": 400, "msg": "داده نامعتبر.", "data": serializer.errors}, {"code": 400, "msg": "داده نامعتبر.", "data": serializer.errors},
@@ -496,31 +489,34 @@ class RemoteSensingAnalysisView(APIView):
) )
payload = serializer.validated_data payload = serializer.validated_data
location = _get_location_by_lat_lon(payload["lat"], payload["lon"], prefetch=True) farm = SensorData.objects.select_related("center_location").filter(farm_uuid=payload["farm_uuid"]).first()
location = getattr(farm, "center_location", None)
if location is None: if location is None:
return Response( return Response(
{"code": 404, "msg": "location پیدا نشد.", "data": None}, {"code": 404, "msg": "location پیدا نشد.", "data": None},
status=status.HTTP_404_NOT_FOUND, status=status.HTTP_404_NOT_FOUND,
) )
block_code = str(payload.get("block_code", "") or "").strip() temporal_end = timezone.localdate() - timedelta(days=1)
temporal_start = temporal_end - timedelta(days=30)
block_code = ""
observations = _get_remote_sensing_observations( observations = _get_remote_sensing_observations(
location=location, location=location,
block_code=block_code, block_code=block_code,
start_date=payload["start_date"], start_date=temporal_start,
end_date=payload["end_date"], end_date=temporal_end,
) )
run = _get_latest_remote_sensing_run( run = _get_latest_remote_sensing_run(
location=location, location=location,
block_code=block_code, block_code=block_code,
start_date=payload["start_date"], start_date=temporal_start,
end_date=payload["end_date"], end_date=temporal_end,
) )
subdivision_result = _get_remote_sensing_subdivision_result( subdivision_result = _get_remote_sensing_subdivision_result(
location=location, location=location,
block_code=block_code, block_code=block_code,
start_date=payload["start_date"], start_date=temporal_start,
end_date=payload["end_date"], end_date=temporal_end,
) )
if not observations.exists(): if not observations.exists():
@@ -532,11 +528,11 @@ class RemoteSensingAnalysisView(APIView):
"status": "processing" if processing else "not_found", "status": "processing" if processing else "not_found",
"source": "processing" if processing else "database", "source": "processing" if processing else "database",
"location": SoilLocationResponseSerializer(location).data, "location": SoilLocationResponseSerializer(location).data,
"block_code": block_code, "block_code": "",
"chunk_size_sqm": getattr(run, "chunk_size_sqm", None), "chunk_size_sqm": getattr(run, "chunk_size_sqm", None),
"temporal_extent": { "temporal_extent": {
"start_date": payload["start_date"].isoformat(), "start_date": temporal_start.isoformat(),
"end_date": payload["end_date"].isoformat(), "end_date": temporal_end.isoformat(),
}, },
"summary": _empty_remote_sensing_summary(), "summary": _empty_remote_sensing_summary(),
"cells": [], "cells": [],
@@ -576,11 +572,11 @@ class RemoteSensingAnalysisView(APIView):
"status": "success", "status": "success",
"source": "database", "source": "database",
"location": SoilLocationResponseSerializer(location).data, "location": SoilLocationResponseSerializer(location).data,
"block_code": block_code, "block_code": "",
"chunk_size_sqm": observations.first().cell.chunk_size_sqm, "chunk_size_sqm": observations.first().cell.chunk_size_sqm,
"temporal_extent": { "temporal_extent": {
"start_date": payload["start_date"].isoformat(), "start_date": temporal_start.isoformat(),
"end_date": payload["end_date"].isoformat(), "end_date": temporal_end.isoformat(),
}, },
"summary": _build_remote_sensing_summary(observations), "summary": _build_remote_sensing_summary(observations),
"cells": cells_data, "cells": cells_data,
@@ -597,54 +593,22 @@ class RemoteSensingAnalysisView(APIView):
class RemoteSensingRunStatusView(APIView): class RemoteSensingRunStatusView(APIView):
@extend_schema( @extend_schema(
tags=["Soil Data"], tags=["Location Data"],
summary="وضعیت run تحلیل سنجش‌ازدور", summary="وضعیت run تحلیل سنجش‌ازدور",
description="وضعیت async pipeline را با شناسه run برمی‌گرداند.", description="وضعیت async pipeline را با task_id از نوع UUID برمی‌گرداند. این task_id همان شناسه تسک Celery ذخیره‌شده در metadata.run است.",
responses={
200: build_response(
RemoteSensingRunStatusEnvelopeSerializer,
"وضعیت run بازگردانده شد.",
),
404: build_response(
SoilErrorResponseSerializer,
"run موردنظر پیدا نشد.",
),
},
)
def get(self, request, run_id):
run = RemoteSensingRun.objects.filter(pk=run_id).select_related("soil_location").first()
if run is None:
return Response(
{"code": 404, "msg": "run پیدا نشد.", "data": None},
status=status.HTTP_404_NOT_FOUND,
)
task_id = (run.metadata or {}).get("task_id")
response_payload = {
"status": RemoteSensingRunSerializer(run).data["status_label"],
"source": "database",
"run": RemoteSensingRunSerializer(run).data,
"task_id": task_id,
}
return Response(
{"code": 200, "msg": "success", "data": response_payload},
status=status.HTTP_200_OK,
)
class RemoteSensingRunResultView(APIView):
@extend_schema(
tags=["Soil Data"],
summary="نتیجه نهایی run تحلیل سنجش‌ازدور",
description="نتایج observation و subdivision داده‌محور را با شناسه run برمی‌گرداند.",
parameters=[ parameters=[
{"name": "page", "in": "query", "required": False, "schema": {"type": "integer", "default": 1}}, OpenApiParameter(
{"name": "page_size", "in": "query", "required": False, "schema": {"type": "integer", "default": 100}}, name="run_id",
type={"type": "string", "format": "uuid"},
location=OpenApiParameter.PATH,
required=True,
description="شناسه UUID تسک async (task_id).",
),
], ],
responses={ responses={
200: build_response( 200: build_response(
RemoteSensingRunResultEnvelopeSerializer, RemoteSensingRunStatusEnvelopeSerializer,
"نتیجه run بازگردانده شد.", "وضعیت run بازگردانده شد و بعد از اتمام، نتیجه نهایی نیز از همین route برگردانده می‌شود.",
), ),
404: build_response( 404: build_response(
SoilErrorResponseSerializer, SoilErrorResponseSerializer,
@@ -655,93 +619,92 @@ class RemoteSensingRunResultView(APIView):
def get(self, request, run_id): def get(self, request, run_id):
page = _safe_positive_int(request.query_params.get("page"), default=1) page = _safe_positive_int(request.query_params.get("page"), default=1)
page_size = min(_safe_positive_int(request.query_params.get("page_size"), default=100), MAX_REMOTE_SENSING_PAGE_SIZE) page_size = min(_safe_positive_int(request.query_params.get("page_size"), default=100), MAX_REMOTE_SENSING_PAGE_SIZE)
run = ( run = RemoteSensingRun.objects.filter(metadata__task_id=str(run_id)).select_related("soil_location").first()
RemoteSensingRun.objects.filter(pk=run_id)
.select_related("soil_location")
.first()
)
if run is None: if run is None:
return Response( return Response(
{"code": 404, "msg": "run پیدا نشد.", "data": None}, {"code": 404, "msg": "run با این task_id پیدا نشد.", "data": None},
status=status.HTTP_404_NOT_FOUND, status=status.HTTP_404_NOT_FOUND,
) )
location = _get_location_by_lat_lon(run.soil_location.latitude, run.soil_location.longitude, prefetch=True) response_payload = _build_remote_sensing_run_status_payload(run, page=page, page_size=page_size)
observations = _get_remote_sensing_observations(
location=run.soil_location,
block_code=run.block_code,
start_date=run.temporal_start,
end_date=run.temporal_end,
)
subdivision_result = getattr(run, "subdivision_result", None)
if not observations.exists():
response_payload = {
"status": RemoteSensingRunSerializer(run).data["status_label"],
"source": "processing" if run.status in {RemoteSensingRun.STATUS_PENDING, RemoteSensingRun.STATUS_RUNNING} else "database",
"location": SoilLocationResponseSerializer(location).data,
"block_code": run.block_code,
"chunk_size_sqm": run.chunk_size_sqm,
"temporal_extent": {
"start_date": run.temporal_start.isoformat() if run.temporal_start else None,
"end_date": run.temporal_end.isoformat() if run.temporal_end else None,
},
"summary": _empty_remote_sensing_summary(),
"cells": [],
"run": RemoteSensingRunSerializer(run).data,
"subdivision_result": None,
}
return Response(
{"code": 200, "msg": "success", "data": response_payload},
status=status.HTTP_200_OK,
)
paginated_observations = _paginate_observations(
observations,
page=page,
page_size=page_size,
)
paginated_assignments = []
pagination = {"cells": paginated_observations["pagination"]}
if subdivision_result is not None:
paginated = _paginate_assignments(
subdivision_result,
page=page,
page_size=page_size,
)
paginated_assignments = paginated["items"]
pagination["assignments"] = paginated["pagination"]
subdivision_data = None
if subdivision_result is not None:
subdivision_data = RemoteSensingSubdivisionResultSerializer(
subdivision_result,
context={"paginated_assignments": paginated_assignments},
).data
response_payload = {
"status": RemoteSensingRunSerializer(run).data["status_label"],
"source": "database",
"location": SoilLocationResponseSerializer(location).data,
"block_code": run.block_code,
"chunk_size_sqm": run.chunk_size_sqm,
"temporal_extent": {
"start_date": run.temporal_start.isoformat() if run.temporal_start else None,
"end_date": run.temporal_end.isoformat() if run.temporal_end else None,
},
"summary": _build_remote_sensing_summary(observations),
"cells": RemoteSensingCellObservationSerializer(paginated_observations["items"], many=True).data,
"run": RemoteSensingRunSerializer(run).data,
"subdivision_result": subdivision_data,
}
if pagination is not None:
response_payload["pagination"] = pagination
return Response( return Response(
{"code": 200, "msg": "success", "data": response_payload}, {"code": 200, "msg": "success", "data": response_payload},
status=status.HTTP_200_OK, status=status.HTTP_200_OK,
) )
def _build_remote_sensing_run_status_payload(run: RemoteSensingRun, *, page: int, page_size: int) -> dict:
run_data = RemoteSensingRunSerializer(run).data
task_id = (run.metadata or {}).get("task_id")
if run.status in {RemoteSensingRun.STATUS_PENDING, RemoteSensingRun.STATUS_RUNNING}:
return {
"status": run_data["status_label"],
"source": "database",
"run": run_data,
"task_id": task_id,
}
location = _get_location_by_lat_lon(run.soil_location.latitude, run.soil_location.longitude, prefetch=True)
observations = _get_remote_sensing_observations(
location=run.soil_location,
block_code=run.block_code,
start_date=run.temporal_start,
end_date=run.temporal_end,
)
subdivision_result = getattr(run, "subdivision_result", None)
response_payload = {
"status": run_data["status_label"],
"source": "database",
"run": run_data,
"task_id": task_id,
"location": SoilLocationResponseSerializer(location).data,
"block_code": run.block_code,
"chunk_size_sqm": run.chunk_size_sqm,
"temporal_extent": {
"start_date": run.temporal_start.isoformat() if run.temporal_start else None,
"end_date": run.temporal_end.isoformat() if run.temporal_end else None,
},
"summary": _empty_remote_sensing_summary(),
"cells": [],
"subdivision_result": None,
}
if not observations.exists():
return response_payload
paginated_observations = _paginate_observations(
observations,
page=page,
page_size=page_size,
)
paginated_assignments = []
pagination = {"cells": paginated_observations["pagination"]}
if subdivision_result is not None:
paginated = _paginate_assignments(
subdivision_result,
page=page,
page_size=page_size,
)
paginated_assignments = paginated["items"]
pagination["assignments"] = paginated["pagination"]
response_payload["summary"] = _build_remote_sensing_summary(observations)
response_payload["cells"] = RemoteSensingCellObservationSerializer(
paginated_observations["items"],
many=True,
).data
response_payload["pagination"] = pagination
if subdivision_result is not None:
response_payload["subdivision_result"] = RemoteSensingSubdivisionResultSerializer(
subdivision_result,
context={"paginated_assignments": paginated_assignments},
).data
return response_payload
def _get_location_by_lat_lon(lat, lon, *, prefetch: bool = False): def _get_location_by_lat_lon(lat, lon, *, prefetch: bool = False):
lat_rounded = round(lat, 6) lat_rounded = round(lat, 6)
lon_rounded = round(lon, 6) lon_rounded = round(lon, 6)
@@ -869,18 +832,18 @@ def _clear_block_analysis_state(
subdivision.elbow_plot = None subdivision.elbow_plot = None
def _resolve_chunk_size_for_location(location: SoilLocation, block_code: str) -> int | None: def _resolve_chunk_size_for_location(location: SoilLocation, block_code: str) -> int:
if block_code: if block_code:
subdivision = location.block_subdivisions.filter(block_code=block_code).first() subdivision = location.block_subdivisions.filter(block_code=block_code).first()
if subdivision is not None: if subdivision is not None:
return subdivision.chunk_size_sqm return int(subdivision.chunk_size_sqm or 900)
block_layout = location.block_layout or {} block_layout = location.block_layout or {}
if not block_code: if not block_code:
return block_layout.get("analysis_grid_summary", {}).get("chunk_size_sqm") return int(block_layout.get("analysis_grid_summary", {}).get("chunk_size_sqm") or 900)
for block in block_layout.get("blocks", []): for block in block_layout.get("blocks", []):
if block.get("block_code") == block_code: if block.get("block_code") == block_code:
return block.get("analysis_grid_summary", {}).get("chunk_size_sqm") return int(block.get("analysis_grid_summary", {}).get("chunk_size_sqm") or 900)
return None return 900
def _get_remote_sensing_observations(*, location, block_code: str, start_date, end_date): def _get_remote_sensing_observations(*, location, block_code: str, start_date, end_date):
+1
View File
@@ -24,6 +24,7 @@ redis>=5.0,<5.1
# === HTTP & AI === # === HTTP & AI ===
requests>=2.31,<2.32 requests>=2.31,<2.32
PySocks>=1.7,<2
httpx>=0.27,<0.28 httpx>=0.27,<0.28
openai>=1.0,<1.40 openai>=1.0,<1.40
openeo>=0.29,<0.40 openeo>=0.29,<0.40
+173 -2
View File
@@ -5,15 +5,18 @@ import math
import random import random
import time import time
from abc import ABC, abstractmethod from abc import ABC, abstractmethod
from datetime import date, timedelta from datetime import date, datetime, timedelta, timezone
try: try:
import requests import requests
except ImportError: # pragma: no cover - handled when live adapter is used except ImportError: # pragma: no cover - handled when live adapter is used
requests = None requests = None
from config.proxy import build_requests_proxies
DEFAULT_FORECAST_DAYS = 7 DEFAULT_FORECAST_DAYS = 7
DEFAULT_WEATHER_PROXY_URL = "socks5h://host.docker.internal:10808"
DAILY_FIELDS = [ DAILY_FIELDS = [
"temperature_2m_max", "temperature_2m_max",
"temperature_2m_min", "temperature_2m_min",
@@ -43,10 +46,11 @@ class BaseWeatherAdapter(ABC):
class OpenMeteoWeatherAdapter(BaseWeatherAdapter): class OpenMeteoWeatherAdapter(BaseWeatherAdapter):
source_name = "open-meteo" source_name = "open-meteo"
def __init__(self, base_url: str, api_key: str = "", timeout: float = 60): def __init__(self, base_url: str, api_key: str = "", timeout: float = 60, proxy_url: str = ""):
self.base_url = base_url self.base_url = base_url
self.api_key = api_key self.api_key = api_key
self.timeout = timeout self.timeout = timeout
self.proxy_url = proxy_url
def fetch_forecast(self, latitude: float, longitude: float, days: int = DEFAULT_FORECAST_DAYS) -> dict: def fetch_forecast(self, latitude: float, longitude: float, days: int = DEFAULT_FORECAST_DAYS) -> dict:
if requests is None: if requests is None:
@@ -67,12 +71,155 @@ class OpenMeteoWeatherAdapter(BaseWeatherAdapter):
self.base_url, self.base_url,
params=params, params=params,
headers=headers, headers=headers,
proxies=build_requests_proxies(self.proxy_url),
timeout=self.timeout, timeout=self.timeout,
) )
response.raise_for_status() response.raise_for_status()
return response.json() return response.json()
class OpenWeatherOneCallAdapter(BaseWeatherAdapter):
source_name = "openweather"
def __init__(self, base_url: str, api_key: str, timeout: float = 60, proxy_url: str = ""):
self.base_url = base_url
self.api_key = api_key
self.timeout = timeout
self.proxy_url = proxy_url
def fetch_forecast(self, latitude: float, longitude: float, days: int = DEFAULT_FORECAST_DAYS) -> dict:
if requests is None:
raise RuntimeError("requests package is required for OpenWeatherOneCallAdapter")
if not self.api_key:
raise RuntimeError("WEATHER_API_KEY is required for OpenWeatherOneCallAdapter")
params = {
"lat": latitude,
"lon": longitude,
"appid": self.api_key,
"exclude": "minutely,hourly,alerts",
"units": "metric",
}
response = requests.get(
self.base_url,
params=params,
headers={"accept": "application/json"},
proxies=build_requests_proxies(self.proxy_url),
timeout=self.timeout,
)
response.raise_for_status()
payload = response.json()
return self._to_open_meteo_shape(payload, days=days)
def _to_open_meteo_shape(self, payload: dict, *, days: int) -> dict:
daily_items = list(payload.get("daily") or [])[:days]
daily = {field: [] for field in DAILY_FIELDS}
daily["time"] = []
for item in daily_items:
temp = item.get("temp") or {}
humidity = item.get("humidity")
wind_speed_ms = item.get("wind_speed")
rain = item.get("rain")
snow = item.get("snow")
precipitation = _safe_float(rain, 0.0) + _safe_float(snow, 0.0)
weather_id = None
weather = item.get("weather") or []
if weather and isinstance(weather[0], dict):
weather_id = weather[0].get("id")
daily["time"].append(datetime.fromtimestamp(int(item["dt"]), tz=timezone.utc).date().isoformat())
temp_min = _safe_float(temp.get("min"))
temp_max = _safe_float(temp.get("max"))
temp_day = _safe_float(temp.get("day"))
daily["temperature_2m_min"].append(temp_min)
daily["temperature_2m_max"].append(temp_max)
daily["temperature_2m_mean"].append(
temp_day if temp_day is not None else _mean_of_pair(temp_min, temp_max)
)
daily["precipitation_sum"].append(round(precipitation, 2))
daily["precipitation_probability_max"].append(round(_safe_float(item.get("pop"), 0.0) * 100.0, 1))
daily["relative_humidity_2m_mean"].append(_safe_float(humidity))
daily["wind_speed_10m_max"].append(
round(_safe_float(wind_speed_ms, 0.0) * 3.6, 2)
)
daily["et0_fao_evapotranspiration"].append(
self._estimate_et0(
temp_min=temp_min,
temp_max=temp_max,
humidity=_safe_float(humidity, 55.0),
wind_speed_ms=_safe_float(wind_speed_ms, 0.0),
uvi=_safe_float(item.get("uvi"), 0.0),
clouds=_safe_float(item.get("clouds"), 0.0),
precipitation=precipitation,
)
)
daily["weather_code"].append(self._map_weather_code(weather_id))
return {
"latitude": payload.get("lat"),
"longitude": payload.get("lon"),
"timezone": payload.get("timezone"),
"daily": daily,
}
def _estimate_et0(
self,
*,
temp_min: float | None,
temp_max: float | None,
humidity: float,
wind_speed_ms: float,
uvi: float,
clouds: float,
precipitation: float,
) -> float:
temp_mean = _mean_of_pair(temp_min, temp_max) or 20.0
radiation_factor = max(uvi * 0.22, 0.35)
dryness_factor = max(0.2, 1.0 - (humidity / 100.0))
cloud_factor = max(0.3, 1.0 - (clouds / 140.0))
rain_penalty = min(max(precipitation * 0.04, 0.0), 0.8)
et0 = (
0.9
+ max(temp_mean, 0.0) * 0.11
+ wind_speed_ms * 0.18
+ radiation_factor
+ dryness_factor * 1.6
) * cloud_factor
return round(_clamp(et0 - rain_penalty, 0.3, 11.0), 2)
def _map_weather_code(self, weather_id: int | None) -> int:
if weather_id is None:
return 0
if 200 <= weather_id < 300:
return 95
if 300 <= weather_id < 400:
return 51
if weather_id in {500, 520}:
return 61
if weather_id in {501, 521, 522, 531}:
return 63
if 502 <= weather_id < 600:
return 65
if weather_id in {600, 615, 620}:
return 71
if 601 <= weather_id < 700:
return 71
if weather_id in {701, 711, 721, 741}:
return 45
if weather_id in {731, 751, 761, 762}:
return 3
if weather_id == 800:
return 0
if weather_id == 801:
return 1
if weather_id == 802:
return 2
if weather_id in {803, 804}:
return 3
return 0
class MockWeatherAdapter(BaseWeatherAdapter): class MockWeatherAdapter(BaseWeatherAdapter):
source_name = "mock" source_name = "mock"
@@ -271,6 +418,14 @@ def get_weather_adapter() -> BaseWeatherAdapter:
base_url=settings.WEATHER_API_BASE_URL, base_url=settings.WEATHER_API_BASE_URL,
api_key=settings.WEATHER_API_KEY, api_key=settings.WEATHER_API_KEY,
timeout=getattr(settings, "WEATHER_TIMEOUT_SECONDS", 60), timeout=getattr(settings, "WEATHER_TIMEOUT_SECONDS", 60),
proxy_url=getattr(settings, "WEATHER_PROXY_URL", DEFAULT_WEATHER_PROXY_URL),
)
if provider == "openweather":
return OpenWeatherOneCallAdapter(
base_url=settings.WEATHER_API_BASE_URL,
api_key=settings.WEATHER_API_KEY,
timeout=getattr(settings, "WEATHER_TIMEOUT_SECONDS", 60),
proxy_url=getattr(settings, "WEATHER_PROXY_URL", DEFAULT_WEATHER_PROXY_URL),
) )
if provider == "mock": if provider == "mock":
if not (getattr(settings, "DEBUG", False) or getattr(settings, "DEVELOP", False)): if not (getattr(settings, "DEBUG", False) or getattr(settings, "DEVELOP", False)):
@@ -279,3 +434,19 @@ def get_weather_adapter() -> BaseWeatherAdapter:
delay_seconds=getattr(settings, "WEATHER_MOCK_DELAY_SECONDS", 0.8) delay_seconds=getattr(settings, "WEATHER_MOCK_DELAY_SECONDS", 0.8)
) )
raise ValueError(f"Unsupported weather data provider: {provider}") raise ValueError(f"Unsupported weather data provider: {provider}")
def _safe_float(value, default=None):
try:
if value in (None, ""):
return default
return float(value)
except (TypeError, ValueError):
return default
def _mean_of_pair(first: float | None, second: float | None) -> float | None:
values = [value for value in (first, second) if value is not None]
if not values:
return None
return round(sum(values) / len(values), 2)
+5
View File
@@ -34,3 +34,8 @@ class WeatherConfig(AppConfig):
def get_weather_data_adapter(self): def get_weather_data_adapter(self):
return self.weather_data_adapter return self.weather_data_adapter
def update_weather_for_location(self, location):
from .services import update_weather_for_location
return update_weather_for_location(location)
+56 -1
View File
@@ -2,9 +2,10 @@ from __future__ import annotations
from django.apps import apps from django.apps import apps
from django.test import SimpleTestCase, TestCase, override_settings from django.test import SimpleTestCase, TestCase, override_settings
from unittest.mock import Mock, patch
from location_data.models import SoilLocation from location_data.models import SoilLocation
from weather.adapters import MockWeatherAdapter, OpenMeteoWeatherAdapter from weather.adapters import MockWeatherAdapter, OpenMeteoWeatherAdapter, OpenWeatherOneCallAdapter
from weather.models import WeatherForecast from weather.models import WeatherForecast
from weather.services import fetch_weather_from_api, update_weather_for_location from weather.services import fetch_weather_from_api, update_weather_for_location
@@ -70,6 +71,60 @@ class WeatherAdapterSelectionTests(SimpleTestCase):
self.assertIsInstance(adapter, OpenMeteoWeatherAdapter) self.assertIsInstance(adapter, OpenMeteoWeatherAdapter)
self.assertEqual(adapter.timeout, 12) self.assertEqual(adapter.timeout, 12)
@override_settings(
WEATHER_DATA_PROVIDER="openweather",
WEATHER_API_BASE_URL="https://api.openweathermap.org/data/3.0/onecall",
WEATHER_API_KEY="test-key",
WEATHER_TIMEOUT_SECONDS=18,
)
def test_app_config_returns_openweather_adapter(self):
config = apps.get_app_config("weather")
config.__dict__.pop("weather_data_adapter", None)
adapter = config.get_weather_data_adapter()
self.assertIsInstance(adapter, OpenWeatherOneCallAdapter)
self.assertEqual(adapter.timeout, 18)
class OpenWeatherOneCallAdapterTests(SimpleTestCase):
@patch("weather.adapters.requests.get")
def test_adapter_maps_openweather_daily_payload_to_internal_shape(self, mock_get):
response = Mock()
response.json.return_value = {
"lat": 35.71,
"lon": 51.4,
"timezone": "Asia/Tehran",
"daily": [
{
"dt": 1775001600,
"temp": {"min": 12.0, "max": 24.0, "day": 19.0},
"humidity": 44,
"wind_speed": 5.0,
"pop": 0.35,
"rain": 2.4,
"clouds": 25,
"uvi": 7.0,
"weather": [{"id": 500, "main": "Rain"}],
}
],
}
mock_get.return_value = response
adapter = OpenWeatherOneCallAdapter(
base_url="https://api.openweathermap.org/data/3.0/onecall",
api_key="test-key",
timeout=10,
)
payload = adapter.fetch_forecast(35.71, 51.4, days=1)
self.assertEqual(payload["daily"]["time"], ["2026-04-01"])
self.assertEqual(payload["daily"]["temperature_2m_mean"], [19.0])
self.assertEqual(payload["daily"]["precipitation_sum"], [2.4])
self.assertEqual(payload["daily"]["precipitation_probability_max"], [35.0])
self.assertEqual(payload["daily"]["wind_speed_10m_max"], [18.0])
self.assertEqual(payload["daily"]["weather_code"], [61])
@override_settings(WEATHER_DATA_PROVIDER="mock", WEATHER_MOCK_DELAY_SECONDS=0) @override_settings(WEATHER_DATA_PROVIDER="mock", WEATHER_MOCK_DELAY_SECONDS=0)
class WeatherServiceTests(TestCase): class WeatherServiceTests(TestCase):