FIN_ASSISTANT / application /chat_assistant.py
QAway-to
Fix portfolio caching and stabilize streaming interactions
a344e68
"""
🇬🇧 Module: chat.py
Purpose: General chat interface for user questions about investments or portfolios.
🇷🇺 Модуль: chat.py
Назначение: общий чат-помощник для ответов на вопросы об инвестициях и портфелях.
"""
from typing import Generator
from infrastructure.llm_client import llm_service
from prompts.system_prompts import GENERAL_CONTEXT
class ChatAssistant:
"""Handles general user dialogue via LLM."""
def __init__(self, llm=llm_service, model_name: str = "meta-llama/Meta-Llama-3.1-8B-Instruct"):
self.llm = llm
self.model_name = model_name
def run(self, user_input: str) -> Generator[str, None, None]:
"""Stream chat responses."""
if not user_input or not user_input.strip():
yield "❗ Please enter a question for the assistant."
return
yield "⏳ Working..."
partial = ""
messages = [
{"role": "system", "content": GENERAL_CONTEXT},
{"role": "user", "content": user_input},
]
try:
for delta in self.llm.stream_chat(messages=messages, model=self.model_name):
partial += delta
yield partial
except Exception:
yield "❌ Assistant is unavailable right now. Please try again later."