Spaces:
Running
Running
File size: 1,549 Bytes
d9762cf c00f69c d9762cf c00f69c d9762cf c00f69c d9762cf c00f69c 9cbdc5d d9762cf c00f69c 9cbdc5d c00f69c c1d35cf c00f69c d9762cf c00f69c d9762cf |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 |
import gradio as gr
from rag.rag_production import get_rag_chain
def rag_fn(model_name: str, input: str):
try:
rag_chain = get_rag_chain(model_name=model_name)
response_stream = rag_chain.stream({'input': input})
full_answer = ''
for chunk in response_stream:
if 'answer' in chunk and chunk['answer'] is not None:
answer_piece = chunk['answer']
full_answer += answer_piece
yield full_answer
except Exception as e:
import traceback
print(traceback.format_exc())
yield f"An error occurred: {e}"
interface = gr.Interface(
fn = rag_fn,
inputs = [
gr.Dropdown(choices=['llama-3.3-70b-versatile', 'openai/gpt-oss-120b'], label="MODEL"),
gr.Textbox(label='QUESTION', placeholder="Type your question here...", lines=2),
],
outputs = gr.Textbox(label='ANSWER'),
title = "Legal RAG Chatbot",
description = "Select a model and ask a question to get an answer from the RAG system.\n"
"Note that the Chatbot can only answer questions relating to laws defined in the 'Criminal Code of Vietnam'.\n"
"'llama-3.3-70b-versatile' is recommended for more accurate answers." ,
examples = [
['llama-3.3-70b-versatile', 'What is misprision according to the law?'],
['openai/gpt-oss-120b','What are the penalties for committing espionage or sabotage against Vietnam?']
],
cache_examples=False
)
if __name__ == "__main__":
interface.launch()
|