AI UPDATE

This commit is contained in:
2026-03-22 03:08:27 +03:30
parent 3ee14ca977
commit d977a583c6
37 changed files with 3525 additions and 263 deletions
+2 -2
View File
@@ -37,11 +37,11 @@ def get_embedding_client(config: RAGConfig | None = None) -> OpenAI:
def get_chat_client(config: RAGConfig | None = None) -> OpenAI:
"""
ساخت کلاینت OpenAI برای Chat/LLM بر اساس provider فعال.
provider از config.embedding.provider خوانده می‌شود (مشترک بین embedding و chat).
provider از config.llm.provider خوانده می‌شود.
"""
cfg = config or load_rag_config()
llm = cfg.llm
provider = cfg.embedding.provider
provider = llm.provider or cfg.embedding.provider
logger.info(provider)
+66 -25
View File
@@ -4,7 +4,7 @@
import logging
from pathlib import Path
from .config import load_rag_config, RAGConfig
from .config import load_rag_config, RAGConfig, get_service_config, ServiceConfig
from .api_provider import get_chat_client
from .retrieve import search_with_query
from .user_data import build_user_soil_text, build_user_weather_text
@@ -43,6 +43,16 @@ def _load_kb_tone(kb_name: str, config: RAGConfig | None = None) -> str:
return ""
def _load_service_tone(service: ServiceConfig, config: RAGConfig | None = None) -> str:
cfg = config or load_rag_config()
if service.tone_file:
base = Path(__file__).resolve().parent.parent
tone_path = base / service.tone_file
if tone_path.exists():
return tone_path.read_text(encoding="utf-8").strip()
return _load_kb_tone(service.knowledge_base, cfg)
def _detect_kb_intent(query: str) -> str:
"""تشخیص ساده نوع پایگاه دانش مورد نیاز از روی متن سوال."""
q = query.lower()
@@ -59,10 +69,11 @@ def _detect_kb_intent(query: str) -> str:
def build_rag_context(
query: str,
sensor_uuid: str,
sensor_uuid: str | None = None,
config: RAGConfig | None = None,
limit: int = 8,
kb_name: str | None = None,
service_id: str | None = None,
) -> str:
"""
ساخت context برای LLM: دیتای فعلی خاک کاربر + متن‌های مرتبط از RAG.
@@ -76,24 +87,34 @@ def build_rag_context(
len(query or ""),
)
parts: list[str] = []
cfg = config or load_rag_config()
service = get_service_config(service_id, cfg) if service_id else None
include_user_embeddings = service.use_user_embeddings if service else True
resolved_kb_name = kb_name or (service.knowledge_base if service else None)
user_soil = build_user_soil_text(sensor_uuid)
if user_soil and user_soil.strip():
parts.append("[داده‌های فعلی خاک شما]\n" + user_soil.strip())
logger.debug("Included user soil section sensor_uuid=%s", sensor_uuid)
else:
logger.info("No user soil data found sensor_uuid=%s", sensor_uuid)
if include_user_embeddings and sensor_uuid:
user_soil = build_user_soil_text(sensor_uuid)
if user_soil and user_soil.strip():
parts.append("[داده‌های فعلی خاک شما]\n" + user_soil.strip())
logger.debug("Included user soil section sensor_uuid=%s", sensor_uuid)
else:
logger.info("No user soil data found sensor_uuid=%s", sensor_uuid)
weather_text = build_user_weather_text(sensor_uuid)
if weather_text and weather_text.strip():
parts.append("[پیش‌بینی هواشناسی]\n" + weather_text.strip())
logger.debug("Included weather section sensor_uuid=%s", sensor_uuid)
else:
logger.info("No weather data found sensor_uuid=%s", sensor_uuid)
weather_text = build_user_weather_text(sensor_uuid)
if weather_text and weather_text.strip():
parts.append("[پیش‌بینی هواشناسی]\n" + weather_text.strip())
logger.debug("Included weather section sensor_uuid=%s", sensor_uuid)
else:
logger.info("No weather data found sensor_uuid=%s", sensor_uuid)
results = search_with_query(
query, sensor_uuid=sensor_uuid, limit=limit, config=config,
kb_name=kb_name,
query,
sensor_uuid=sensor_uuid,
limit=limit,
config=cfg,
kb_name=resolved_kb_name,
service_id=service_id,
use_user_embeddings=include_user_embeddings,
)
if results:
logger.info("Retrieved RAG results count=%s sensor_uuid=%s", len(results), sensor_uuid)
@@ -109,11 +130,12 @@ def build_rag_context(
def chat_rag_stream(
query: str,
sensor_uuid: str,
sensor_uuid: str | None = None,
config: RAGConfig | None = None,
limit: int = 5,
system_override: str | None = None,
kb_name: str | None = None,
service_id: str | None = None,
):
logger.info(
"chat_rag_stream started sensor_uuid=%s kb_name=%s limit=%s query_len=%s",
@@ -137,24 +159,43 @@ def chat_rag_stream(
تک‌تک deltaهای content به‌صورت رشته
"""
cfg = config or load_rag_config()
client = get_chat_client(cfg)
model = cfg.llm.model
logger.debug("Loaded RAG config with model=%s", model)
resolved_service_id = service_id or kb_name or _detect_kb_intent(query)
service = get_service_config(resolved_service_id, cfg)
service_llm_config = service.llm
service_cfg = RAGConfig(
embedding=cfg.embedding,
qdrant=cfg.qdrant,
chunking=cfg.chunking,
llm=service_llm_config,
knowledge_bases=cfg.knowledge_bases,
services=cfg.services,
chromadb=cfg.chromadb,
)
client = get_chat_client(service_cfg)
model = service_llm_config.model
logger.debug("Loaded service config service_id=%s model=%s", resolved_service_id, model)
detected_kb = kb_name or _detect_kb_intent(query)
logger.info("Using knowledge base=%s", detected_kb)
detected_kb = kb_name or service.knowledge_base
logger.info("Using knowledge base=%s for service_id=%s", detected_kb, resolved_service_id)
context = build_rag_context(
query, sensor_uuid, config=cfg, limit=limit, kb_name=detected_kb,
query,
sensor_uuid,
config=cfg,
limit=limit,
kb_name=detected_kb,
service_id=resolved_service_id,
)
logger.debug("Built context length=%s", len(context))
if system_override is not None:
system_content = system_override
else:
tone = _load_kb_tone(detected_kb, cfg)
tone = _load_service_tone(service, cfg)
if not tone:
tone = _load_tone(cfg)
system_parts = [tone] if tone else []
if service.system_prompt:
system_parts.append(service.system_prompt)
system_parts.append(
"با استفاده از بخش «داده‌های فعلی خاک شما» و «متن‌های مرجع» زیر به سوال کاربر پاسخ بده. "
"برای سوالاتی درباره خاک کاربر (مثل pH، رطوبت، NPK) حتماً از داده‌های فعلی استفاده کن. "
@@ -169,7 +210,7 @@ def chat_rag_stream(
{"role": "system", "content": system_content},
{"role": "user", "content": query},
]
logger.info("Prepared messages for model=%s message=%s", model,messages)
logger.info("Prepared messages for model=%s service_id=%s", model, resolved_service_id)
stream = client.chat.completions.create(
model=model,
+64 -9
View File
@@ -1,5 +1,6 @@
"""
بارگذاری تنظیمات RAG از rag_config.yaml — با پشتیبانی از چند provider و چند پایگاه دانش
بارگذاری تنظیمات RAG از rag_config.yaml — با پشتیبانی از چند provider،
چند پایگاه دانش و چند سرویس.
"""
import os
from dataclasses import dataclass, field
@@ -36,6 +37,7 @@ class ChunkingConfig:
@dataclass
class LLMConfig:
provider: str = "gapgpt"
model: str = "gpt-4o"
base_url: str | None = None
api_key_env: str | None = None
@@ -50,6 +52,17 @@ class KnowledgeBaseConfig:
description: str = ""
@dataclass
class ServiceConfig:
service_id: str
knowledge_base: str
llm: LLMConfig = field(default_factory=LLMConfig)
tone_file: str | None = None
system_prompt: str | None = None
use_user_embeddings: bool = True
description: str = ""
@dataclass
class RAGConfig:
embedding: EmbeddingConfig
@@ -57,9 +70,31 @@ class RAGConfig:
chunking: ChunkingConfig
llm: LLMConfig = field(default_factory=LLMConfig)
knowledge_bases: dict[str, KnowledgeBaseConfig] = field(default_factory=dict)
services: dict[str, ServiceConfig] = field(default_factory=dict)
chromadb: dict[str, Any] = field(default_factory=dict)
def _build_llm_config(data: dict[str, Any] | None, default: LLMConfig | None = None) -> LLMConfig:
llm_data = data or {}
fallback = default or LLMConfig()
return LLMConfig(
provider=llm_data.get("provider", fallback.provider),
model=llm_data.get("model", fallback.model),
base_url=llm_data.get("base_url", fallback.base_url),
api_key_env=llm_data.get("api_key_env", fallback.api_key_env),
avalai_base_url=llm_data.get("avalai_base_url", fallback.avalai_base_url),
avalai_api_key_env=llm_data.get("avalai_api_key_env", fallback.avalai_api_key_env),
)
def get_service_config(service_id: str, config: RAGConfig | None = None) -> ServiceConfig:
cfg = config or load_rag_config()
service = cfg.services.get(service_id)
if service is None:
raise KeyError(f"Unknown service_id: {service_id}")
return service
def load_rag_config(config_path: str | Path | None = None) -> RAGConfig:
"""
بارگذاری تنظیمات از YAML و env.
@@ -101,14 +136,7 @@ def load_rag_config(config_path: str | Path | None = None) -> RAGConfig:
overlap_tokens=ch.get("overlap_tokens", 50),
)
llm_data = data.get("llm", {})
llm = LLMConfig(
model=llm_data.get("model", "gpt-4o"),
base_url=llm_data.get("base_url"),
api_key_env=llm_data.get("api_key_env"),
avalai_base_url=llm_data.get("avalai_base_url"),
avalai_api_key_env=llm_data.get("avalai_api_key_env"),
)
llm = _build_llm_config(data.get("llm", {}))
kb_data = data.get("knowledge_bases", {})
knowledge_bases: dict[str, KnowledgeBaseConfig] = {}
@@ -119,11 +147,38 @@ def load_rag_config(config_path: str | Path | None = None) -> RAGConfig:
description=kb_conf.get("description", ""),
)
services_data = data.get("services", {})
services: dict[str, ServiceConfig] = {}
for service_id, service_conf in services_data.items():
kb_name = service_conf.get("knowledge_base", service_id)
kb_conf = knowledge_bases.get(kb_name)
services[service_id] = ServiceConfig(
service_id=service_id,
knowledge_base=kb_name,
llm=_build_llm_config(service_conf.get("llm"), default=llm),
tone_file=service_conf.get("tone_file") or (kb_conf.tone_file if kb_conf else None),
system_prompt=service_conf.get("system_prompt"),
use_user_embeddings=service_conf.get("use_user_embeddings", True),
description=service_conf.get("description", ""),
)
if not services:
for kb_name, kb_conf in knowledge_bases.items():
services[kb_name] = ServiceConfig(
service_id=kb_name,
knowledge_base=kb_name,
llm=llm,
tone_file=kb_conf.tone_file,
use_user_embeddings=True,
description=kb_conf.description,
)
return RAGConfig(
embedding=embedding,
qdrant=qdrant,
chunking=chunking,
llm=llm,
knowledge_bases=knowledge_bases,
services=services,
chromadb=data.get("chromadb", {}),
)
+22 -4
View File
@@ -1,18 +1,20 @@
"""
بازیابی RAG: embed کوئری و جستجو در vector store
"""
from .config import load_rag_config, RAGConfig
from .config import load_rag_config, RAGConfig, get_service_config
from .embedding import embed_single
from .vector_store import QdrantVectorStore
def search_with_query(
query: str,
sensor_uuid: str,
sensor_uuid: str | None = None,
limit: int = 5,
score_threshold: float | None = None,
config: RAGConfig | None = None,
kb_name: str | None = None,
service_id: str | None = None,
use_user_embeddings: bool | None = None,
) -> list[dict]:
"""
کوئری را embed می‌کند و در vector store جستجو می‌کند.
@@ -27,12 +29,28 @@ def search_with_query(
لیست نتایج با id, score, text, metadata
"""
cfg = config or load_rag_config()
service = get_service_config(service_id, cfg) if service_id else None
resolved_kb_name = kb_name or (service.knowledge_base if service else None)
include_user_embeddings = (
use_user_embeddings
if use_user_embeddings is not None
else (service.use_user_embeddings if service else True)
)
sensor_filters = ["__global__"]
if include_user_embeddings and sensor_uuid:
sensor_filters.insert(0, sensor_uuid)
kb_filters = [resolved_kb_name] if resolved_kb_name else []
if include_user_embeddings:
kb_filters.append("__all__")
query_vector = embed_single(query, config=cfg)
store = QdrantVectorStore(config=cfg)
return store.search(
query_vector=query_vector,
limit=limit,
score_threshold=score_threshold,
sensor_uuid=sensor_uuid,
kb_name=kb_name,
sensor_uuids=sensor_filters,
kb_names=kb_filters,
)
+19 -6
View File
@@ -6,13 +6,14 @@ import json
import logging
from rag.api_provider import get_chat_client
from rag.chat import build_rag_context, _load_kb_tone
from rag.config import load_rag_config, RAGConfig
from rag.chat import build_rag_context, _load_service_tone
from rag.config import load_rag_config, RAGConfig, get_service_config
from rag.user_data import build_plant_text
logger = logging.getLogger(__name__)
KB_NAME = "fertilization"
SERVICE_ID = "fertilization"
DEFAULT_FERTILIZATION_PROMPT = (
"بر اساس داده‌های خاک (NPK، pH)، مشخصات گیاه، مرحله رشد و پایگاه دانش کودهی، "
@@ -56,13 +57,23 @@ def get_fertilization_recommendation(
dict با کلیدهای fertilizer_needed, fertilizer_type, amount_kg_per_hectare, reason, npk_status, raw_response
"""
cfg = config or load_rag_config()
client = get_chat_client(cfg)
model = cfg.llm.model
service = get_service_config(SERVICE_ID, cfg)
service_cfg = RAGConfig(
embedding=cfg.embedding,
qdrant=cfg.qdrant,
chunking=cfg.chunking,
llm=service.llm,
knowledge_bases=cfg.knowledge_bases,
services=cfg.services,
chromadb=cfg.chromadb,
)
client = get_chat_client(service_cfg)
model = service.llm.model
user_query = query or "توصیه کودهی برای مزرعه من چیست؟"
context = build_rag_context(
user_query, sensor_uuid, config=cfg, limit=limit, kb_name=KB_NAME,
user_query, sensor_uuid, config=cfg, limit=limit, kb_name=KB_NAME, service_id=SERVICE_ID,
)
extra_parts: list[str] = []
@@ -73,8 +84,10 @@ def get_fertilization_recommendation(
if extra_parts:
context = "\n\n---\n\n".join(extra_parts) + ("\n\n---\n\n" + context if context else "")
tone = _load_kb_tone(KB_NAME, cfg)
tone = _load_service_tone(service, cfg)
system_parts = [tone] if tone else []
if service.system_prompt:
system_parts.append(service.system_prompt)
system_parts.append(DEFAULT_FERTILIZATION_PROMPT)
if context:
system_parts.append("\n\n" + context)
+73 -9
View File
@@ -5,18 +5,22 @@
import json
import logging
from irrigation.evapotranspiration import calculate_forecast_water_needs, resolve_crop_profile, resolve_kc
from sensor_data.models import SensorData
from rag.api_provider import get_chat_client
from rag.chat import build_rag_context, _load_kb_tone
from rag.config import load_rag_config, RAGConfig
from rag.chat import build_rag_context, _load_service_tone
from rag.config import load_rag_config, RAGConfig, get_service_config
from rag.user_data import build_plant_text, build_irrigation_method_text
from weather.models import WeatherForecast
logger = logging.getLogger(__name__)
KB_NAME = "irrigation"
SERVICE_ID = "irrigation"
DEFAULT_IRRIGATION_PROMPT = (
"بر اساس داده‌های خاک، هواشناسی، مشخصات گیاه، روش آبیاری و پایگاه دانش آبیاری، "
"یک توصیه آبیاری دقیق بده. "
"بر اساس محاسبات نهایی تبخیر-تعرق و نیاز آبی که در ورودی آمده، "
"یک برنامه آبیاری قابل‌فهم برای کشاورز تولید کن. "
"پاسخ حتماً به فرمت JSON با ساختار زیر باشد:\n"
'{\n'
' "plan": {\n'
@@ -28,7 +32,7 @@ DEFAULT_IRRIGATION_PROMPT = (
' }\n'
'}\n'
"فقط JSON خروجی بده، بدون توضیح اضافی. "
"مقادیر عددی را بر اساس شرایط واقعی محاسبه کن."
"از انجام هرگونه محاسبه عددی جدید خودداری کن و فقط از داده‌های ساختاریافته ورودی استفاده کن."
)
@@ -58,13 +62,52 @@ def get_irrigation_recommendation(
dict با کلیدهای irrigation_needed, amount_mm, reason, next_check_date, raw_response
"""
cfg = config or load_rag_config()
client = get_chat_client(cfg)
model = cfg.llm.model
service = get_service_config(SERVICE_ID, cfg)
service_cfg = RAGConfig(
embedding=cfg.embedding,
qdrant=cfg.qdrant,
chunking=cfg.chunking,
llm=service.llm,
knowledge_bases=cfg.knowledge_bases,
services=cfg.services,
chromadb=cfg.chromadb,
)
client = get_chat_client(service_cfg)
model = service.llm.model
user_query = query or "توصیه آبیاری برای مزرعه من چیست؟"
sensor = SensorData.objects.select_related("location").prefetch_related("plants").filter(uuid_sensor=sensor_uuid).first()
plant = None
if sensor is not None and plant_name:
plant = sensor.plants.filter(name=plant_name).first()
elif sensor is not None:
plant = sensor.plants.first()
crop_profile = resolve_crop_profile(plant, growth_stage=growth_stage)
active_kc = resolve_kc(crop_profile, growth_stage=growth_stage)
forecasts = []
daily_water_needs = []
if sensor is not None:
forecasts = list(
WeatherForecast.objects.filter(location=sensor.location, forecast_date__isnull=False)
.order_by("forecast_date")[:7]
)
efficiency_percent = None
if irrigation_method_name:
from irrigation.models import IrrigationMethod
method = IrrigationMethod.objects.filter(name=irrigation_method_name).first()
efficiency_percent = getattr(method, "water_efficiency_percent", None) if method else None
daily_water_needs = calculate_forecast_water_needs(
forecasts=forecasts,
latitude_deg=float(sensor.location.latitude),
crop_profile=crop_profile,
growth_stage=growth_stage,
irrigation_efficiency_percent=efficiency_percent,
)
context = build_rag_context(
user_query, sensor_uuid, config=cfg, limit=limit, kb_name=KB_NAME,
user_query, sensor_uuid, config=cfg, limit=limit, kb_name=KB_NAME, service_id=SERVICE_ID,
)
extra_parts: list[str] = []
@@ -76,11 +119,27 @@ def get_irrigation_recommendation(
method_text = build_irrigation_method_text(irrigation_method_name)
if method_text:
extra_parts.append("[روش آبیاری انتخابی]\n" + method_text)
if daily_water_needs:
total_mm = round(sum(item["gross_irrigation_mm"] for item in daily_water_needs), 2)
schedule_lines = [
f"- {item['forecast_date']}: ET0={item['et0_mm']} mm, ETc={item['etc_mm']} mm, "
f"بارش مؤثر={item['effective_rainfall_mm']} mm, نیاز آبی={item['gross_irrigation_mm']} mm, "
f"زمان پیشنهادی={item['irrigation_timing']}"
for item in daily_water_needs
]
extra_parts.append(
"[خروجی قطعی محاسبات FAO-56]\n"
f"کل نیاز آبی ۷ روز آینده: {total_mm} mm\n"
f"Kc مورد استفاده: {active_kc}\n"
+ "\n".join(schedule_lines)
)
if extra_parts:
context = "\n\n---\n\n".join(extra_parts) + ("\n\n---\n\n" + context if context else "")
tone = _load_kb_tone(KB_NAME, cfg)
tone = _load_service_tone(service, cfg)
system_parts = [tone] if tone else []
if service.system_prompt:
system_parts.append(service.system_prompt)
system_parts.append(DEFAULT_IRRIGATION_PROMPT)
if context:
system_parts.append("\n\n" + context)
@@ -120,4 +179,9 @@ def get_irrigation_recommendation(
}
result["raw_response"] = raw
result["water_balance"] = {
"daily": daily_water_needs,
"crop_profile": crop_profile,
"active_kc": active_kc,
}
return result
+12 -14
View File
@@ -97,6 +97,8 @@ class QdrantVectorStore:
score_threshold: float | None = None,
sensor_uuid: str | None = None,
kb_name: str | None = None,
sensor_uuids: list[str] | None = None,
kb_names: list[str] | None = None,
) -> list[dict]:
"""
جستجوی شباهت بر اساس query vector.
@@ -107,34 +109,30 @@ class QdrantVectorStore:
"""
must_conditions = []
if sensor_uuid:
sensor_values = [value for value in (sensor_uuids or ([sensor_uuid] if sensor_uuid else [])) if value]
if sensor_values:
must_conditions.append(
qmodels.Filter(
should=[
qmodels.FieldCondition(
key="sensor_uuid",
match=qmodels.MatchValue(value=sensor_uuid),
),
qmodels.FieldCondition(
key="sensor_uuid",
match=qmodels.MatchValue(value="__global__"),
),
match=qmodels.MatchValue(value=value),
)
for value in sensor_values
]
)
)
if kb_name:
kb_values = [value for value in (kb_names or ([kb_name] if kb_name else [])) if value]
if kb_values:
must_conditions.append(
qmodels.Filter(
should=[
qmodels.FieldCondition(
key="kb_name",
match=qmodels.MatchValue(value=kb_name),
),
qmodels.FieldCondition(
key="kb_name",
match=qmodels.MatchValue(value="__all__"),
),
match=qmodels.MatchValue(value=value),
)
for value in kb_values
]
)
)
+46 -16
View File
@@ -24,8 +24,8 @@ logger = logging.getLogger(__name__)
class ChatView(APIView):
"""
چت RAG با استریم.
POST با {"message": "متن سوال", "sensor_uuid": "uuid-سنسور"}
sensor_uuid اجباری — هر کاربر فقط به دیتای خودش دسترسی دارد.
POST با {"service_id": "...", "query": "متن سوال", "user_id": "شناسه کاربر"}
service_id اجباری است. user_id فقط برای سرویس‌هایی که user embeddings دارند اجباری می‌شود.
"""
@extend_schema(
@@ -35,8 +35,11 @@ class ChatView(APIView):
request=inline_serializer(
name="ChatRequest",
fields={
"message": drf_serializers.CharField(help_text="متن سوال کاربر"),
"sensor_uuid": drf_serializers.CharField(help_text="شناسه یکتای سنسور"),
"service_id": drf_serializers.CharField(help_text="شناسه سرویس"),
"query": drf_serializers.CharField(required=False, help_text="متن سوال کاربر"),
"message": drf_serializers.CharField(required=False, help_text="نام قبلی فیلد query"),
"user_id": drf_serializers.CharField(required=False, help_text="شناسه کاربر"),
"sensor_uuid": drf_serializers.CharField(required=False, help_text="نام قبلی فیلد user_id"),
},
),
responses={
@@ -50,19 +53,25 @@ class ChatView(APIView):
examples=[
OpenApiExample(
"نمونه درخواست",
value={"message": "وضعیت خاک من چطوره؟", "sensor_uuid": "550e8400-e29b-41d4-a716-446655440000"},
value={
"service_id": "support_bot",
"user_id": "12345",
"query": "How do I reset my password?",
},
request_only=True,
),
],
)
def post(self, request: Request):
from .config import load_rag_config, get_service_config
data = request.data if request.method == "POST" else request.query_params
message = data.get("message")
sensor_uuid = data.get("sensor_uuid")
logging.info("jhh")
service_id = data.get("service_id")
message = data.get("query", data.get("message"))
user_id = data.get("user_id", data.get("sensor_uuid"))
if not message or not isinstance(message, str):
return Response(
{"code": 400, "msg": "پارامتر message الزامی است."},
{"code": 400, "msg": "پارامتر query الزامی است."},
status=status.HTTP_400_BAD_REQUEST,
)
message = str(message).strip()
@@ -71,22 +80,43 @@ class ChatView(APIView):
{"code": 400, "msg": "پیام نباید خالی باشد."},
status=status.HTTP_400_BAD_REQUEST,
)
if not sensor_uuid or not isinstance(sensor_uuid, str):
if not service_id or not isinstance(service_id, str):
return Response(
{"code": 400, "msg": "پارامتر sensor_uuid الزامی است."},
{"code": 400, "msg": "پارامتر service_id الزامی است."},
status=status.HTTP_400_BAD_REQUEST,
)
sensor_uuid = str(sensor_uuid).strip()
if not sensor_uuid:
service_id = str(service_id).strip()
if not service_id:
return Response(
{"code": 400, "msg": "sensor_uuid نباید خالی باشد."},
{"code": 400, "msg": "service_id نباید خالی باشد."},
status=status.HTTP_400_BAD_REQUEST,
)
cfg = load_rag_config()
try:
service = get_service_config(service_id, cfg)
except KeyError:
return Response(
{"code": 400, "msg": f"service_id نامعتبر است: {service_id}"},
status=status.HTTP_400_BAD_REQUEST,
)
if user_id is not None:
user_id = str(user_id).strip()
if not user_id:
user_id = None
if service.use_user_embeddings and not user_id:
return Response(
{"code": 400, "msg": "برای این service_id، پارامتر user_id الزامی است."},
status=status.HTTP_400_BAD_REQUEST,
)
def generate():
try:
for chunk in chat_rag_stream(message, sensor_uuid=sensor_uuid):
for chunk in chat_rag_stream(
message,
sensor_uuid=user_id,
service_id=service_id,
config=cfg,
):
yield chunk
except Exception as e:
yield f"\n[خطا: {e}]"