| import gradio as gr |
| from transformers import pipeline |
| import torch |
| import logging |
|
|
| logging.basicConfig(level=logging.INFO) |
|
|
| qa_pipeline = pipeline( |
| "question-answering", |
| model="nguyenvulebinh/vi-mrc-base", |
| device=0 if torch.cuda.is_available() else -1 |
| ) |
|
|
| def answer_fn(question, context): |
| result = qa_pipeline(question=question, context=context) |
| return result["answer"] |
|
|
| iface = gr.Interface( |
| fn=answer_fn, |
| inputs=["text", "text"], |
| outputs="text", |
| title="AgriBot: Hỏi đáp nông nghiệp" |
| ) |
|
|
| if __name__ == "__main__": |
| iface.launch() |
|
|