Spaces:
Sleeping
Sleeping
File size: 1,336 Bytes
537fc7e b4980c3 537fc7e b4980c3 537fc7e b4980c3 537fc7e 114d838 b4980c3 537fc7e 114d838 537fc7e b4980c3 114d838 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 | import threading
from typing import Any, List, Optional
from tools.prompts import build_chat_system_prompt
from tools.text import normalize_message
class ChatHandler:
def __init__(self, llm: Any, startup_error: Optional[str] = None):
self._llm = llm
self._startup_error = startup_error
self._lock = threading.Lock()
self._system_prompt = build_chat_system_prompt()
def send(self, message: str, history: Optional[List[Any]]) -> tuple[str, List[Any]]:
safe_history = history or []
clean = normalize_message(message)
if not clean:
return "", safe_history
if self._startup_error:
return "", safe_history + [
{"role": "user", "content": clean},
{"role": "assistant", "content": f"startup_error: {self._startup_error}"},
]
if self._llm is None:
return "", safe_history + [
{"role": "user", "content": clean},
{"role": "assistant", "content": "startup_error: agent unavailable"},
]
with self._lock:
reply = self._llm.chat(self._system_prompt, safe_history, clean)
return "", safe_history + [
{"role": "user", "content": clean},
{"role": "assistant", "content": reply},
]
|