Spaces:
Sleeping
Sleeping
| import threading | |
| from typing import Any, List, Optional | |
| from tools.prompts import build_chat_system_prompt | |
| from tools.text import normalize_message | |
| class ChatHandler: | |
| def __init__(self, llm: Any, startup_error: Optional[str] = None): | |
| self._llm = llm | |
| self._startup_error = startup_error | |
| self._lock = threading.Lock() | |
| self._system_prompt = build_chat_system_prompt() | |
| def send(self, message: str, history: Optional[List[Any]]) -> tuple[str, List[Any]]: | |
| safe_history = history or [] | |
| clean = normalize_message(message) | |
| if not clean: | |
| return "", safe_history | |
| if self._startup_error: | |
| return "", safe_history + [ | |
| {"role": "user", "content": clean}, | |
| {"role": "assistant", "content": f"startup_error: {self._startup_error}"}, | |
| ] | |
| if self._llm is None: | |
| return "", safe_history + [ | |
| {"role": "user", "content": clean}, | |
| {"role": "assistant", "content": "startup_error: agent unavailable"}, | |
| ] | |
| with self._lock: | |
| reply = self._llm.chat(self._system_prompt, safe_history, clean) | |
| return "", safe_history + [ | |
| {"role": "user", "content": clean}, | |
| {"role": "assistant", "content": reply}, | |
| ] | |