This commit is contained in:
2026-05-14 22:58:32 +03:30
parent a4763265bf
commit 86cea06f3a
12 changed files with 817 additions and 38 deletions
Binary file not shown.
@@ -0,0 +1,31 @@
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("device_hub", "0009_sync_devicecatalog_schema"),
]
operations = [
migrations.AddField(
model_name="farmdevice",
name="cluster_uuid",
field=models.UUIDField(blank=True, db_index=True, null=True),
),
migrations.AddField(
model_name="farmdevice",
name="location_metadata",
field=models.JSONField(blank=True, default=dict),
),
migrations.AddField(
model_name="sensorexternalrequestlog",
name="cluster_uuid",
field=models.UUIDField(blank=True, db_index=True, null=True),
),
migrations.AddField(
model_name="sensorexternalrequestlog",
name="location_metadata",
field=models.JSONField(blank=True, default=dict),
),
]
+12
View File
@@ -60,6 +60,8 @@ class FarmDevice(models.Model):
physical_device_uuid = models.UUIDField(default=uuid_lib.uuid4, unique=True, db_index=True)
name = models.CharField(max_length=255)
sensor_type = models.CharField(max_length=255, blank=True, default="")
cluster_uuid = models.UUIDField(null=True, blank=True, db_index=True)
location_metadata = models.JSONField(default=dict, blank=True)
is_active = models.BooleanField(default=True)
specifications = models.JSONField(default=dict, blank=True)
power_source = models.JSONField(default=dict, blank=True)
@@ -90,11 +92,21 @@ class FarmDevice(models.Model):
return catalog
return None
def get_sensor_key(self):
if self.sensor_catalog and self.sensor_catalog.code:
return self.sensor_catalog.code
return "sensor-7-1"
def get_ai_device_key(self):
return f"device:{self.physical_device_uuid}"
class SensorExternalRequestLog(models.Model):
farm_uuid = models.UUIDField(db_index=True)
sensor_catalog_uuid = models.UUIDField(null=True, blank=True, db_index=True)
physical_device_uuid = models.UUIDField(db_index=True)
cluster_uuid = models.UUIDField(null=True, blank=True, db_index=True)
location_metadata = models.JSONField(default=dict, blank=True)
payload = models.JSONField(default=dict, blank=True)
created_at = models.DateTimeField(auto_now_add=True)
+6
View File
@@ -61,6 +61,8 @@ class FarmDeviceLogSerializer(serializers.ModelSerializer):
"physical_device_uuid",
"name",
"sensor_type",
"cluster_uuid",
"location_metadata",
"is_active",
"specifications",
"power_source",
@@ -105,6 +107,8 @@ class DeviceDetailSerializer(serializers.ModelSerializer):
"physical_device_uuid",
"name",
"sensor_type",
"cluster_uuid",
"location_metadata",
"is_active",
"specifications",
"power_source",
@@ -171,6 +175,8 @@ class SensorExternalRequestLogSerializer(serializers.ModelSerializer):
"farm_uuid",
"sensor_catalog_uuid",
"physical_device_uuid",
"cluster_uuid",
"location_metadata",
"farm_device",
"sensor_catalog",
"payload",
+166 -6
View File
@@ -1,6 +1,7 @@
from copy import deepcopy
from datetime import timedelta
import logging
import uuid
from django.conf import settings
from django.db import OperationalError, ProgrammingError, transaction
@@ -99,14 +100,23 @@ def get_latest_sensor_external_request_log(*, farm_uuid, sensor_catalog_uuid, ph
def create_sensor_external_notification(*, physical_device_uuid, payload=None):
payload = payload or {}
sensor = FarmDevice.objects.select_related("farm", "farm__current_crop_area", "sensor_catalog").filter(physical_device_uuid=physical_device_uuid).first()
runtime_context = build_sensor_runtime_context(sensor=sensor, payload=payload)
return create_sensor_external_notification_for_sensor(sensor=sensor, payload=payload, runtime_context=runtime_context)
def create_sensor_external_notification_for_sensor(*, sensor, payload=None, runtime_context=None):
payload = payload or {}
if sensor is None:
raise ValueError("Physical device not found.")
runtime_context = runtime_context or build_sensor_runtime_context(sensor=sensor, payload=payload)
try:
with transaction.atomic():
SensorExternalRequestLog.objects.create(
farm_uuid=sensor.farm.farm_uuid,
sensor_catalog_uuid=sensor.sensor_catalog.uuid if sensor.sensor_catalog else None,
physical_device_uuid=sensor.physical_device_uuid,
cluster_uuid=runtime_context["cluster_uuid"],
location_metadata=runtime_context["location_metadata"],
payload=payload,
)
return create_notification_for_farm_uuid(
@@ -114,7 +124,14 @@ def create_sensor_external_notification(*, physical_device_uuid, payload=None):
title="Sensor external API request",
message=f"Payload received from device {sensor.physical_device_uuid}.",
level="info",
metadata={"farm_uuid": str(sensor.farm.farm_uuid), "sensor_catalog_uuid": str(sensor.sensor_catalog.uuid) if sensor.sensor_catalog else None, "physical_device_uuid": str(sensor.physical_device_uuid), "payload": payload},
metadata={
"farm_uuid": str(sensor.farm.farm_uuid),
"sensor_catalog_uuid": str(sensor.sensor_catalog.uuid) if sensor.sensor_catalog else None,
"physical_device_uuid": str(sensor.physical_device_uuid),
"cluster_uuid": str(runtime_context["cluster_uuid"]) if runtime_context["cluster_uuid"] else None,
"location_metadata": runtime_context["location_metadata"],
"payload": payload,
},
)
except (ProgrammingError, OperationalError) as exc:
raise ValueError("Sensor external API tables are not migrated.") from exc
@@ -123,15 +140,31 @@ def create_sensor_external_notification(*, physical_device_uuid, payload=None):
def forward_sensor_payload_to_farm_data(*, physical_device_uuid, payload=None):
payload = payload or {}
sensor = FarmDevice.objects.select_related("farm", "farm__current_crop_area", "sensor_catalog").filter(physical_device_uuid=physical_device_uuid).first()
runtime_context = build_sensor_runtime_context(sensor=sensor, payload=payload)
return forward_sensor_payload_to_farm_data_for_sensor(sensor=sensor, payload=payload, runtime_context=runtime_context)
def forward_sensor_payload_to_farm_data_for_sensor(*, sensor, payload=None, runtime_context=None):
payload = payload or {}
if sensor is None:
raise ValueError("Physical device not found.")
farm_boundary = _get_farm_boundary(sensor=sensor)
api_key = getattr(settings, "FARM_DATA_API_KEY", "")
if not api_key:
raise FarmDataForwardError("FARM_DATA_API_KEY is not configured.")
runtime_context = runtime_context or build_sensor_runtime_context(sensor=sensor, payload=payload)
sensor_key = _get_sensor_key(sensor=sensor)
normalized_sensor_payload = _normalize_sensor_payload(sensor_key=sensor_key, sensor_payload=payload)
request_payload = {"farm_uuid": str(sensor.farm.farm_uuid), "farm_boundary": farm_boundary, "sensor_key": sensor_key, "sensor_payload": normalized_sensor_payload}
request_payload = {
"farm_uuid": str(sensor.farm.farm_uuid),
"farm_boundary": farm_boundary,
"sensor_key": sensor_key,
"sensor_payload": _build_ai_sensor_payload(
sensor=sensor,
sensor_key=sensor_key,
sensor_payload=payload,
runtime_context=runtime_context,
),
}
try:
response = external_api_request(
"ai",
@@ -169,10 +202,137 @@ def _normalize_sensor_payload(*, sensor_key, sensor_payload):
return {sensor_key: sensor_payload}
def _build_ai_sensor_payload(*, sensor, sensor_key, sensor_payload, runtime_context=None):
if sensor_payload and not isinstance(sensor_payload, dict):
raise FarmDataForwardError("`payload` must be a JSON object.")
raw_payload = _extract_payload(sensor_payload)
runtime_context = runtime_context or build_sensor_runtime_context(sensor=sensor, payload=sensor_payload)
device_payload = {
"sensor_key": sensor_key,
"physical_device_uuid": str(sensor.physical_device_uuid),
"recorded_at": timezone.now().isoformat(),
"metrics": raw_payload or {},
"metadata": {
"source_service": "backend_device_hub",
"farm_device_uuid": str(sensor.uuid),
"sensor_catalog_uuid": str(sensor.sensor_catalog.uuid) if sensor.sensor_catalog else None,
"sensor_type": sensor.sensor_type or "",
"device_name": sensor.name or "",
"cluster_uuid": str(runtime_context["cluster_uuid"]) if runtime_context["cluster_uuid"] else None,
"location": runtime_context["location_metadata"],
},
}
if runtime_context["cluster_uuid"] is not None:
device_payload["cluster_uuid"] = str(runtime_context["cluster_uuid"])
if runtime_context["location_metadata"].get("zone") is not None:
device_payload["zone"] = runtime_context["location_metadata"]["zone"]
if runtime_context["location_metadata"].get("depth_cm") is not None:
device_payload["depth_cm"] = runtime_context["location_metadata"]["depth_cm"]
return {
sensor.get_ai_device_key(): device_payload
}
def build_sensor_runtime_context(*, sensor, payload=None):
payload = payload or {}
payload_location = _extract_payload_location_metadata(payload)
payload_cluster_uuid = _extract_cluster_uuid(payload)
location_metadata = dict(sensor.location_metadata or {})
location_metadata.update(payload_location)
return {
"cluster_uuid": payload_cluster_uuid or sensor.cluster_uuid,
"location_metadata": location_metadata,
}
def sync_sensor_runtime_context(*, sensor, payload=None):
if sensor is None:
raise ValueError("Physical device not found.")
runtime_context = build_sensor_runtime_context(sensor=sensor, payload=payload)
update_fields = []
if runtime_context["cluster_uuid"] != sensor.cluster_uuid:
sensor.cluster_uuid = runtime_context["cluster_uuid"]
update_fields.append("cluster_uuid")
if runtime_context["location_metadata"] != (sensor.location_metadata or {}):
sensor.location_metadata = runtime_context["location_metadata"]
update_fields.append("location_metadata")
if update_fields:
update_fields.append("updated_at")
sensor.save(update_fields=update_fields)
return runtime_context
def _extract_cluster_uuid(payload):
if not isinstance(payload, dict):
return None
metadata = payload.get("metadata") if isinstance(payload.get("metadata"), dict) else {}
candidates = [
payload.get("cluster_uuid"),
payload.get("clusterId"),
metadata.get("cluster_uuid"),
metadata.get("clusterId"),
]
for candidate in candidates:
parsed = _parse_uuid(candidate)
if parsed is not None:
return parsed
return None
def _extract_payload_location_metadata(payload):
if not isinstance(payload, dict):
return {}
metadata = payload.get("metadata") if isinstance(payload.get("metadata"), dict) else {}
location = payload.get("location") if isinstance(payload.get("location"), dict) else {}
coordinates = payload.get("coordinates") if isinstance(payload.get("coordinates"), dict) else {}
lat = payload.get("lat", payload.get("latitude"))
lon = payload.get("lon", payload.get("lng", payload.get("longitude")))
if lat is None:
lat = location.get("lat", location.get("latitude"))
if lon is None:
lon = location.get("lon", location.get("lng", location.get("longitude")))
if lat is None:
lat = coordinates.get("lat", coordinates.get("latitude"))
if lon is None:
lon = coordinates.get("lon", coordinates.get("lng", coordinates.get("longitude")))
result = {}
if lat is not None:
result["lat"] = lat
if lon is not None:
result["lon"] = lon
for key in ("zone", "depth_cm", "cluster_code", "cluster_label"):
value = payload.get(key, metadata.get(key))
if value not in (None, ""):
result[key] = value
if location:
result["location"] = location
elif coordinates:
result["location"] = coordinates
return result
def _parse_uuid(value):
if value in (None, ""):
return None
try:
return uuid.UUID(str(value))
except (TypeError, ValueError, AttributeError):
return None
def _get_sensor_key(*, sensor):
if sensor.sensor_catalog and sensor.sensor_catalog.code:
return sensor.sensor_catalog.code
return "sensor-7-1"
return sensor.get_sensor_key()
def _get_farm_data_path():
+20 -3
View File
@@ -13,7 +13,7 @@ from soil.serializers import SoilComparisonChartSerializer, SoilRadarChartSerial
from .authentication import SensorExternalAPIKeyAuthentication
from .sensor_serializers import DeviceSummarySerializer, Sensor7In1SummarySerializer, SensorComparisonChartQuerySerializer, SensorComparisonChartResponseSerializer, SensorRadarChartQuerySerializer, SensorRadarChartResponseSerializer, SensorValuesListQuerySerializer, SensorValuesListResponseSerializer
from .serializers import DeviceCatalogSerializer, DeviceCodeListResponseSerializer, DeviceCodeQuerySerializer, DeviceCommandRequestSerializer, DeviceCommandResponseSerializer, DeviceDetailSerializer, DeviceLatestPayloadSerializer, DeviceRangeQuerySerializer, SensorExternalRequestLogQuerySerializer, SensorExternalRequestLogSerializer, SensorExternalRequestSerializer
from .services import DeviceDataUnavailableError, FarmDataForwardError, build_device_comparison_chart, build_device_latest_payload, build_device_radar_chart, build_device_summary, build_device_values_list, create_sensor_external_notification, execute_device_command, forward_sensor_payload_to_farm_data, get_farm_device_by_physical_uuid, get_farm_device_map_for_logs, get_primary_soil_sensor, get_sensor_7_in_1_comparison_chart_data, get_sensor_7_in_1_radar_chart_data, get_sensor_7_in_1_summary_data, get_sensor_comparison_chart_data, get_sensor_external_request_logs_for_farm, get_sensor_radar_chart_data, get_sensor_values_list_data, validate_output_device_catalog
from .services import DeviceDataUnavailableError, FarmDataForwardError, build_device_comparison_chart, build_device_latest_payload, build_device_radar_chart, build_device_summary, build_device_values_list, create_sensor_external_notification_for_sensor, execute_device_command, forward_sensor_payload_to_farm_data_for_sensor, get_farm_device_by_physical_uuid, get_farm_device_map_for_logs, get_primary_soil_sensor, get_sensor_7_in_1_comparison_chart_data, get_sensor_7_in_1_radar_chart_data, get_sensor_7_in_1_summary_data, get_sensor_comparison_chart_data, get_sensor_external_request_logs_for_farm, get_sensor_radar_chart_data, get_sensor_values_list_data, sync_sensor_runtime_context, validate_output_device_catalog
class DeviceCatalogListView(APIView):
@@ -297,9 +297,26 @@ class SensorExternalAPIView(APIView):
def post(self, request):
serializer = SensorExternalRequestSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
sensor = get_farm_device_by_physical_uuid(
physical_device_uuid=serializer.validated_data["uuid"]
)
if sensor is None:
return Response({"code": 404, "msg": "Physical device not found."}, status=status.HTTP_404_NOT_FOUND)
try:
notification = create_sensor_external_notification(physical_device_uuid=serializer.validated_data["uuid"], payload=serializer.validated_data.get("payload"))
forward_sensor_payload_to_farm_data(physical_device_uuid=serializer.validated_data["uuid"], payload=serializer.validated_data.get("payload"))
runtime_context = sync_sensor_runtime_context(
sensor=sensor,
payload=serializer.validated_data.get("payload"),
)
forward_sensor_payload_to_farm_data_for_sensor(
sensor=sensor,
payload=serializer.validated_data.get("payload"),
runtime_context=runtime_context,
)
notification = create_sensor_external_notification_for_sensor(
sensor=sensor,
payload=serializer.validated_data.get("payload"),
runtime_context=runtime_context,
)
except ValueError as exc:
if "not migrated" in str(exc):
return Response({"code": 503, "msg": "Required tables are not ready. Run migrations."}, status=status.HTTP_503_SERVICE_UNAVAILABLE)
+188 -25
View File
@@ -655,6 +655,23 @@
- استفاده از `crop_simulation`
- پیشنهاد بهترین گیاه برای هر cluster
### query params
- `farm_uuid` اجباری است و باید UUID مزرعه باشد.
نمونه:
```http
GET /api/location-data/remote-sensing/cluster-recommendations/?farm_uuid=11111111-1111-1111-1111-111111111111
```
### پیش‌نیازها
- مزرعه باید در `farm_data` وجود داشته باشد.
- برای مزرعه باید حداقل یک گیاه ثبت شده باشد.
- برای `location` همان مزرعه باید خروجی KMeans در `location_data` موجود باشد.
- داده‌های لازم برای ساخت payload شبیه‌سازی باید قابل تولید باشند.
### response موفق `200`
```json
@@ -663,12 +680,113 @@
"msg": "success",
"data": {
"farm_uuid": "11111111-1111-1111-1111-111111111111",
"location_id": 12,
"evaluated_plant_count": 2,
"location_id": 1,
"evaluated_plant_count": 4,
"cluster_count": 2,
"registered_plants": [],
"clusters": [],
"source_metadata": {}
"registered_plants": [
{
"plant_id": 201,
"plant_name": "maize",
"position": 0,
"stage": ""
}
],
"clusters": [
{
"block_code": "",
"cluster_uuid": "daa278cb-cf75-4f17-bc94-bb3a780dd4d4",
"sub_block_code": "cluster-0",
"cluster_label": 0,
"temporal_extent": {
"start_date": "2026-04-11",
"end_date": "2026-05-11"
},
"cluster_block": {
"uuid": "daa278cb-cf75-4f17-bc94-bb3a780dd4d4",
"sub_block_code": "cluster-0",
"cluster_label": 0,
"chunk_size_sqm": 900,
"centroid_lat": "49.999770",
"centroid_lon": "49.999920",
"center_cell_code": "loc-1__block-farm__chunk-900__r0000c0000",
"center_cell_lat": "49.999635",
"center_cell_lon": "49.999710",
"cell_count": 4,
"cell_codes": [
"loc-1__block-farm__chunk-900__r0000c0000"
],
"geometry": {
"type": "Polygon",
"coordinates": []
},
"metadata": {}
},
"satellite_metrics": {
"ndvi": 0.659927,
"ndwi": -0.571487,
"soil_vv_db": -13.73536,
"soil_vv": -13.127913
},
"sensor_metrics": {},
"resolved_metrics": {
"ndvi": 0.659927,
"ndwi": -0.571487,
"soil_vv_db": -13.73536,
"soil_vv": -13.127913
},
"candidate_plants": [
{
"plant_id": 401,
"plant_name": "wheat",
"position": 3,
"stage": "",
"score": 123.4,
"predicted_yield": 123.4,
"predicted_yield_tons": 0.1234,
"biomass": 456.7,
"max_lai": 4.2,
"simulation_engine": "pcse",
"simulation_model_name": "Wofost81_NWLP_CWB_CNB",
"simulation_warning": null,
"supporting_metrics": {
"yield_estimate": 123.4,
"biomass": 456.7,
"max_lai": 4.2
}
}
],
"suggested_plant": {
"plant_id": 401,
"plant_name": "wheat",
"position": 3,
"stage": "",
"score": 123.4,
"predicted_yield": 123.4,
"predicted_yield_tons": 0.1234,
"biomass": 456.7,
"max_lai": 4.2,
"simulation_engine": "pcse",
"simulation_model_name": "Wofost81_NWLP_CWB_CNB",
"simulation_warning": null,
"supporting_metrics": {
"yield_estimate": 123.4,
"biomass": 456.7,
"max_lai": 4.2
}
},
"source_metadata": {
"block_status": "completed",
"aggregation_strategy": "sub_block_mean",
"has_satellite_metrics": true,
"has_sensor_metrics": false
}
}
],
"source_metadata": {
"source": "location_data+kmeans+farm_data+crop_simulation",
"location_id": 1,
"snapshot_block_count": 2
}
}
}
```
@@ -698,55 +816,90 @@
```json
{
"block_code": "block-1",
"block_code": "",
"cluster_uuid": "11111111-1111-1111-1111-111111111111",
"sub_block_code": "cluster-0",
"cluster_label": 0,
"temporal_extent": {
"start_date": "2026-04-12",
"end_date": "2026-05-12"
"start_date": "2026-04-11",
"end_date": "2026-05-11"
},
"cluster_block": {},
"satellite_metrics": {
"ndvi": 0.51,
"ndwi": 0.24,
"soil_vv": 0.13
"ndvi": 0.659927,
"ndwi": -0.571487,
"soil_vv_db": -13.73536,
"soil_vv": -13.127913
},
"sensor_metrics": {},
"resolved_metrics": {
"ndvi": 0.51,
"ndwi": 0.24,
"soil_vv": 0.13
"ndvi": 0.659927,
"ndwi": -0.571487,
"soil_vv_db": -13.73536,
"soil_vv": -13.127913
},
"candidate_plants": [],
"suggested_plant": {},
"suggested_plant": null,
"source_metadata": {}
}
```
فیلدهای مهم داخل هر cluster:
- `block_code`: کد بلوک والد؛ در حالت تحلیل کل مزرعه ممکن است خالی باشد.
- `cluster_uuid`: شناسه یکتای cluster.
- `sub_block_code`: کد خوانا مثل `cluster-0`.
- `cluster_label`: لیبل عددی KMeans.
- `temporal_extent`: بازه زمانی داده remote sensing.
- `cluster_block`: اطلاعات هندسی، centroid، سلول مرکزی و geometry.
- `satellite_metrics`: میانگین متریک‌های ماهواره‌ای cluster.
- `sensor_metrics`: داده سنسوری تجمیع‌شده، اگر وجود داشته باشد.
- `resolved_metrics`: متریک نهایی که برای simulation استفاده شده است.
- `candidate_plants`: همه گیاه‌های ارزیابی‌شده با رتبه‌بندی.
- `suggested_plant`: بهترین گزینه نهایی برای همان cluster.
- `source_metadata`: وضعیت پردازش و strategy تجمیع.
### ساختار هر آیتم `candidate_plants`
```json
{
"plant_id": 101,
"plant_name": "Tomato",
"position": 0,
"stage": "vegetative",
"score": 150.0,
"predicted_yield": 150.0,
"predicted_yield_tons": 0.15,
"biomass": 300.0,
"plant_id": 401,
"plant_name": "wheat",
"position": 3,
"stage": "",
"score": 123.4,
"predicted_yield": 123.4,
"predicted_yield_tons": 0.1234,
"biomass": 456.7,
"max_lai": 4.2,
"simulation_engine": "pcse",
"simulation_model_name": "Wofost81_NWLP_CWB_CNB",
"simulation_warning": null,
"supporting_metrics": {}
"supporting_metrics": {
"yield_estimate": 123.4,
"biomass": 456.7,
"max_lai": 4.2
}
}
```
### `400`
وقتی مزرعه گیاه ثبت‌شده نداشته باشد یا پیش‌نیاز simulation کامل نباشد:
وقتی `farm_uuid` ارسال نشده باشد یا معتبر نباشد:
```json
{
"code": 400,
"msg": "داده نامعتبر.",
"data": {
"farm_uuid": [
"This field is required."
]
}
}
```
وقتی مزرعه گیاه ثبت‌شده نداشته باشد:
```json
{
@@ -756,6 +909,16 @@
}
```
وقتی ساخت simulation یا مقایسه گیاه‌ها fail شود:
```json
{
"code": 400,
"msg": "مقایسه گیاه‌ها با crop_simulation انجام نشد: ...",
"data": null
}
```
### `404`
وقتی مزرعه یا خروجی KMeans پیدا نشود:
@@ -368,17 +368,38 @@ GET /api/location-data/remote-sensing/cluster-blocks/<cluster_uuid>/live/
- `farm_uuid` اجباری
نمونه:
```http
GET /api/location-data/remote-sensing/cluster-recommendations/?farm_uuid=<farm_uuid>
```
### فیلدهای مهم response
- `farm_uuid`
- `location_id`
- `registered_plants`
- `clusters`
- `evaluated_plant_count`
- `cluster_count`
- `source_metadata`
### نکات مهم برای فرانت
- هر آیتم `clusters` دقیقا مربوط به یک cluster از خروجی KMeans است.
- `candidate_plants` لیست کامل رتبه‌بندی است و `suggested_plant` بهترین آیتم همان لیست است.
- `resolved_metrics` همان متریک نهایی است که برای simulation استفاده شده و بهتر است مبنای نمایش KPI باشد.
- `cluster_block` برای رسم روی نقشه و نمایش geometry، centroid و cellها استفاده می‌شود.
- `source_metadata.has_sensor_metrics` مشخص می‌کند آیا باید در UI بخش سنسورها را نمایش دهید یا نه.
### استفاده در فرانت
برای هر cluster این بخش ها مهم هستند:
- `sub_block_code`
- `cluster_label`
- `temporal_extent`
- `cluster_block`
- `satellite_metrics`
- `sensor_metrics`
- `resolved_metrics`
@@ -388,9 +409,11 @@ GET /api/location-data/remote-sensing/cluster-blocks/<cluster_uuid>/live/
### UI پیشنهادی
- کارت cluster با عنوان `sub_block_code` یا `cluster_label`
- بازه زمانی از `temporal_extent.start_date` تا `temporal_extent.end_date`
- KPIهای `resolved_metrics`
- جدول candidateها با score
- highlight کردن `suggested_plant`
- اگر `candidate_plants` خالی بود، state خالی و بدون recommendation نشان دهید
---
+4
View File
@@ -53,6 +53,8 @@ class FarmDeviceSerializer(serializers.ModelSerializer):
"physical_device_uuid",
"name",
"sensor_type",
"cluster_uuid",
"location_metadata",
"is_active",
"specifications",
"power_source",
@@ -116,6 +118,8 @@ class FarmDeviceWriteSerializer(serializers.ModelSerializer):
"physical_device_uuid",
"name",
"sensor_type",
"cluster_uuid",
"location_metadata",
"is_active",
"specifications",
"power_source",
+21 -3
View File
@@ -84,15 +84,32 @@ def sync_farm_data(
if plant_ids:
request_payload["plant_ids"] = [int(plant_id) for plant_id in plant_ids]
if farm.farm_type_id:
request_payload["farm_type_uuid"] = str(farm.farm_type.uuid)
request_payload["farm_type_name"] = farm.farm_type.name
request_payload["farm_type_description"] = farm.farm_type.description
request_payload["farm_type_metadata"] = (
farm.farm_type.metadata if isinstance(farm.farm_type.metadata, dict) else {}
)
resolved_irrigation_method_id = irrigation_method_id
if resolved_irrigation_method_id is None:
resolved_irrigation_method_id = farm.irrigation_method_id
if resolved_irrigation_method_id is not None:
request_payload["irrigation_method_id"] = int(resolved_irrigation_method_id)
if not any(key in request_payload for key in ("sensor_payload", "plant_ids", "irrigation_method_id")):
if not any(
key in request_payload
for key in (
"sensor_payload",
"plant_ids",
"farm_type_uuid",
"farm_type_name",
"irrigation_method_id",
)
):
raise FarmDataSyncError(
"At least one of `sensor_payload`, `plant_ids`, or `irrigation_method_id` is required for farm data sync."
"At least one of `sensor_payload`, `plant_ids`, `farm_type`, or `irrigation_method_id` is required for farm data sync."
)
api_key = getattr(settings, "FARM_DATA_API_KEY", "")
@@ -101,11 +118,12 @@ def sync_farm_data(
raise FarmDataSyncError("FARM_DATA_API_KEY is not configured.")
logger.warning(
"Farm data sync start: farm_uuid=%s sensor_key=%s has_sensor_payload=%s plant_ids=%s irrigation_method_id=%s boundary_type=%s",
"Farm data sync start: farm_uuid=%s sensor_key=%s has_sensor_payload=%s plant_ids=%s farm_type_uuid=%s irrigation_method_id=%s boundary_type=%s",
farm.farm_uuid,
request_payload.get("sensor_key"),
"sensor_payload" in request_payload,
request_payload.get("plant_ids"),
request_payload.get("farm_type_uuid"),
request_payload.get("irrigation_method_id"),
request_payload["farm_boundary"].get("type") if isinstance(request_payload["farm_boundary"], dict) else None,
)
+13 -1
View File
@@ -119,7 +119,19 @@ def serialize_products_for_ai(products=None):
"growth_profile": product.growth_profile if isinstance(product.growth_profile, dict) else {},
"is_active": True,
"updated_at": product.updated_at.isoformat() if product.updated_at else None,
"farm_type": product.farm_type.name if product.farm_type_id else DEFAULT_FARM_TYPE_NAME,
"farm_type": {
"uuid": str(product.farm_type.uuid) if product.farm_type_id else None,
"name": product.farm_type.name if product.farm_type_id else DEFAULT_FARM_TYPE_NAME,
"description": product.farm_type.description if product.farm_type_id else "",
"metadata": (
product.farm_type.metadata
if product.farm_type_id and isinstance(product.farm_type.metadata, dict)
else {}
),
"updated_at": product.farm_type.updated_at.isoformat()
if product.farm_type_id and product.farm_type.updated_at
else None,
},
}
)
return payload
+333
View File
@@ -0,0 +1,333 @@
#!/usr/bin/env bash
set -euo pipefail
MODEL="${OPENAI_MODEL:-gpt-4}"
BASE_URL="${OPENAI_BASE_URL:-${GAPGPT_BASE_URL:-https://api.gapgpt.app/v1}}"
API_KEY="${OPENAI_API_KEY:-${GAPGPT_API_KEY:-}}"
EDITOR_CMD="${EDITOR:-vi}"
COMMIT_MESSAGE=""
BRANCH_NAME=""
FINAL_BRANCH_NAME=""
FINAL_BRANCH_MODE=""
usage() {
cat <<'USAGE'
Usage: ai_git_commit.sh
Required environment variables:
`OPENAI_API_KEY` or `GAPGPT_API_KEY`
Optional environment variables:
`OPENAI_BASE_URL` or `GAPGPT_BASE_URL` (default: https://api.gapgpt.app/v1)
`OPENAI_MODEL` Model name (default: gpt-4)
`EDITOR` Editor used for manual commit message edits
USAGE
}
require_command() {
local command_name="$1"
if ! command -v "$command_name" >/dev/null 2>&1; then
echo "Error: '$command_name' is required but not installed." >&2
exit 1
fi
}
ensure_git_repo() {
git rev-parse --is-inside-work-tree >/dev/null 2>&1 || {
echo "Error: current directory is not a git repository." >&2
exit 1
}
}
get_staged_diff() {
git diff --staged --patch --minimal
}
build_prompt() {
local diff_content="$1"
cat <<PROMPT
You are a senior engineer helping with Git hygiene.
Analyze the staged git diff below and respond with valid JSON only.
Requirements:
- The response must be a JSON object with exactly these keys: commit_message, branch_name.
- commit_message must be a single descriptive paragraph in plain text.
- branch_name must use the format type/short-description.
- branch_name should be lowercase, concise, and use hyphens instead of spaces.
- Do not wrap the JSON in markdown fences.
Staged diff:
${diff_content}
PROMPT
}
call_openai() {
local prompt="$1"
local endpoint="${BASE_URL%/}/chat/completions"
local payload
payload=$(jq -n \
--arg model "$MODEL" \
--arg prompt "$prompt" \
'{
model: $model,
temperature: 0.2,
response_format: {type: "json_object"},
messages: [
{role: "system", content: "You generate commit metadata from git diffs and always return strict JSON."},
{role: "user", content: $prompt}
]
}')
curl --silent --show-error --fail \
--header "Content-Type: application/json" \
--header "Authorization: Bearer ${API_KEY}" \
--data "$payload" \
"$endpoint"
}
extract_ai_content() {
local api_response="$1"
jq -er '.choices[0].message.content' <<<"$api_response"
}
parse_ai_json() {
local ai_json="$1"
COMMIT_MESSAGE=$(jq -er '.commit_message' <<<"$ai_json")
BRANCH_NAME=$(jq -er '.branch_name' <<<"$ai_json")
}
edit_multiline_value() {
local initial_value="$1"
local temp_file
temp_file=$(mktemp)
printf '%s\n' "$initial_value" > "$temp_file"
"$EDITOR_CMD" "$temp_file"
local edited_value
edited_value=$(sed '/^[[:space:]]*$/d' "$temp_file")
rm -f "$temp_file"
if [[ -z "$edited_value" ]]; then
echo "Error: value cannot be empty." >&2
exit 1
fi
printf '%s' "$edited_value"
}
edit_single_line_value() {
local current_value="$1"
local updated_value
read -r -e -i "$current_value" -p "> " updated_value
if [[ -z "$updated_value" ]]; then
echo "Error: value cannot be empty." >&2
exit 1
fi
printf '%s' "$updated_value"
}
confirm_or_edit_commit_message() {
local current_message="$1"
echo
echo "Suggested commit message:"
echo "----------------------------------------"
printf '%s\n' "$current_message"
echo "----------------------------------------"
echo "1) Use as-is"
echo "2) Edit in \$EDITOR (${EDITOR_CMD})"
local choice
read -r -p "Choose an option [1-2]: " choice
case "$choice" in
1) printf '%s' "$current_message" ;;
2) edit_multiline_value "$current_message" ;;
*)
echo "Error: invalid selection." >&2
exit 1
;;
esac
}
list_local_branches() {
git for-each-ref --format='%(refname:short)' refs/heads
}
select_existing_branch() {
mapfile -t branches < <(list_local_branches)
if [[ ${#branches[@]} -eq 0 ]]; then
echo "Error: no local branches found." >&2
exit 1
fi
echo
echo "Select an existing branch:"
select branch in "${branches[@]}"; do
if [[ -n "${branch:-}" ]]; then
printf '%s' "$branch"
return 0
fi
echo "Invalid selection. Try again." >&2
done
}
validate_branch_name() {
local branch_name="$1"
if ! git check-ref-format --branch "$branch_name" >/dev/null 2>&1; then
echo "Error: '$branch_name' is not a valid git branch name." >&2
exit 1
fi
}
choose_branch() {
local suggested_branch="$1"
local branch_choice
local branch_value
local branch_mode
echo
echo "Suggested branch name: $suggested_branch"
echo "1) Create new branch with suggested name"
echo "2) Edit branch name and create new branch"
echo "3) Select an existing branch"
read -r -p "Choose an option [1-3]: " branch_choice
case "$branch_choice" in
1)
branch_value="$suggested_branch"
branch_mode="new"
;;
2)
branch_value=$(edit_single_line_value "$suggested_branch")
branch_mode="new"
;;
3)
branch_value=$(select_existing_branch)
branch_mode="existing"
;;
*)
echo "Error: invalid selection." >&2
exit 1
;;
esac
validate_branch_name "$branch_value"
if git show-ref --verify --quiet "refs/heads/$branch_value"; then
branch_mode="existing"
fi
FINAL_BRANCH_NAME="$branch_value"
FINAL_BRANCH_MODE="$branch_mode"
}
checkout_branch() {
local branch_name="$1"
local branch_mode="$2"
if [[ "$branch_mode" == "new" ]]; then
git checkout -b "$branch_name"
else
git checkout "$branch_name"
fi
}
print_summary() {
local commit_message="$1"
local branch_name="$2"
local branch_mode="$3"
echo
echo "Final plan:"
echo "- Branch: $branch_name ($branch_mode)"
echo "- Commit message:"
printf '%s\n' "$commit_message"
}
main() {
if [[ "${1:-}" == "-h" || "${1:-}" == "--help" ]]; then
usage
exit 0
fi
require_command git
require_command curl
require_command jq
ensure_git_repo
if [[ -z "$API_KEY" ]]; then
echo "Error: set OPENAI_API_KEY or GAPGPT_API_KEY." >&2
exit 1
fi
local staged_diff
staged_diff=$(get_staged_diff)
if [[ -z "$staged_diff" ]]; then
echo "Error: there are no staged changes to analyze." >&2
exit 1
fi
echo "Analyzing staged changes with ${MODEL}..."
local prompt
prompt=$(build_prompt "$staged_diff")
local api_response
if ! api_response=$(call_openai "$prompt"); then
echo "Error: failed to contact the OpenAI-compatible API." >&2
exit 1
fi
local ai_content
if ! ai_content=$(extract_ai_content "$api_response"); then
echo "Error: API response did not include message content." >&2
exit 1
fi
if ! parse_ai_json "$ai_content"; then
echo "Error: AI response was not valid JSON with the required keys." >&2
exit 1
fi
local final_commit_message
final_commit_message=$(confirm_or_edit_commit_message "$COMMIT_MESSAGE")
choose_branch "$BRANCH_NAME"
print_summary "$final_commit_message" "$FINAL_BRANCH_NAME" "$FINAL_BRANCH_MODE"
local final_confirmation
read -r -p "Proceed with checkout and commit? [y/N]: " final_confirmation
if [[ ! "$final_confirmation" =~ ^[Yy]$ ]]; then
echo "Aborted. No branch switch or commit was made."
exit 0
fi
checkout_branch "$FINAL_BRANCH_NAME" "$FINAL_BRANCH_MODE"
local commit_file
commit_file=$(mktemp)
printf '%s\n' "$final_commit_message" > "$commit_file"
git commit -F "$commit_file"
rm -f "$commit_file"
echo
echo "Done: committed on '$FINAL_BRANCH_NAME'."
}
main "$@"