Spaces:
Running
Running
File size: 1,381 Bytes
b2d5b74 15e92d0 b2d5b74 15e92d0 b2d5b74 a344e68 15e92d0 b2d5b74 678027d b2d5b74 678027d b2d5b74 a344e68 b2d5b74 15e92d0 b2d5b74 678027d a344e68 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 |
"""
🇬🇧 Module: chat.py
Purpose: General chat interface for user questions about investments or portfolios.
🇷🇺 Модуль: chat.py
Назначение: общий чат-помощник для ответов на вопросы об инвестициях и портфелях.
"""
from typing import Generator
from infrastructure.llm_client import llm_service
from prompts.system_prompts import GENERAL_CONTEXT
class ChatAssistant:
"""Handles general user dialogue via LLM."""
def __init__(self, llm=llm_service, model_name: str = "meta-llama/Meta-Llama-3.1-8B-Instruct"):
self.llm = llm
self.model_name = model_name
def run(self, user_input: str) -> Generator[str, None, None]:
"""Stream chat responses."""
if not user_input or not user_input.strip():
yield "❗ Please enter a question for the assistant."
return
yield "⏳ Working..."
partial = ""
messages = [
{"role": "system", "content": GENERAL_CONTEXT},
{"role": "user", "content": user_input},
]
try:
for delta in self.llm.stream_chat(messages=messages, model=self.model_name):
partial += delta
yield partial
except Exception:
yield "❌ Assistant is unavailable right now. Please try again later."
|