Russian_AI_QA / app.py
Futyn-Maker's picture
Upload folder using huggingface_hub
6b83723 verified
"""Gradio chatbot interface for the RAG system."""
import argparse
import gradio as gr
import config
from rag import RAGSystem
def create_app(top_k: int = config.DEFAULT_TOP_K,
temperature: float = config.DEFAULT_TEMPERATURE) -> gr.Blocks:
"""Create the Gradio application."""
rag = RAGSystem(top_k=top_k, temperature=temperature)
def respond(message: str, history: list) -> str:
if not message.strip():
return "Пожалуйста, введите вопрос."
result = rag.query(message)
answer = result["answer"]
sources = set()
for doc in result["source_docs"]:
src = doc.metadata.get("source", "")
section = doc.metadata.get("section", "")
if src:
sources.add(f"{src} ({section})" if section else src)
if sources:
answer += "\n\n📎 *Источники: " + ", ".join(sorted(sources)) + "*"
return answer
with gr.Blocks(
title="RAG: Стратегия развития ИИ в России",
theme=gr.themes.Soft(),
) as app:
gr.Markdown(
"# 🤖 Стратегия развития ИИ в России\n"
"Задайте вопрос о Национальной стратегии развития "
"искусственного интеллекта на период до 2030 года."
)
chatbot = gr.Chatbot(
label="Диалог",
height=500,
type="messages",
)
msg = gr.Textbox(
label="Ваш вопрос",
placeholder="Например: Какие цели развития ИИ определены в Стратегии?",
lines=2,
)
with gr.Row():
submit_btn = gr.Button("Отправить", variant="primary")
clear_btn = gr.Button("Очистить")
with gr.Accordion("Параметры", open=False):
gr.Markdown(
f"- **Модель генерации:** `{config.LLM_MODEL}`\n"
f"- **Модель эмбеддингов:** `{config.EMBEDDING_MODEL}`\n"
f"- **Top-K документов:** {top_k}\n"
f"- **Температура:** {temperature}"
)
def user_submit(message, history):
if not message.strip():
return "", history
history = history + [{"role": "user", "content": message}]
answer = respond(message, history)
history = history + [{"role": "assistant", "content": answer}]
return "", history
msg.submit(user_submit, [msg, chatbot], [msg, chatbot])
submit_btn.click(user_submit, [msg, chatbot], [msg, chatbot])
clear_btn.click(lambda: ([], ""), outputs=[chatbot, msg])
return app
def main():
parser = argparse.ArgumentParser(description="RAG Chatbot")
parser.add_argument("--top-k", type=int, default=config.DEFAULT_TOP_K, help="Number of retrieved documents")
parser.add_argument("--temperature", type=float, default=0.1, help="LLM temperature")
parser.add_argument("--port", type=int, default=7860, help="Server port")
parser.add_argument("--share", action="store_true", help="Create public link")
args = parser.parse_args()
app = create_app(top_k=args.top_k, temperature=args.temperature)
app.launch(server_name="0.0.0.0", server_port=args.port, share=args.share)
if __name__ == "__main__":
main()