Spaces:
Sleeping
Sleeping
File size: 933 Bytes
3c25c17 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 | import json
from langchain_openai import ChatOpenAI
from config import Settings, settings as default_settings
class SafeChatOpenAI(ChatOpenAI):
"""ChatOpenAI subclass that handles providers returning string responses."""
def _create_chat_result(self, response, generation_info=None):
if isinstance(response, str):
response = json.loads(response)
return super()._create_chat_result(response, generation_info)
def get_llm(s: Settings | None = None, temperature: float = 0.1) -> SafeChatOpenAI:
s = s or default_settings
if not s.is_llm_configured:
raise RuntimeError(
"LLM not configured. Set LLM_BASE_URL and LLM_MODEL in .env "
"or via the Settings panel in the UI."
)
return SafeChatOpenAI(
base_url=s.llm_base_url,
model=s.llm_model,
api_key=s.llm_api_key or "not-needed",
temperature=temperature,
)
|