Open_Domain_QA / app.py
jaimin's picture
Update app.py
fbe7ca2
raw
history blame contribute delete
600 Bytes
from transformers import AutoModelForQuestionAnswering, AutoTokenizer, pipeline
import gradio as gr
# Creating the Q&A pipeline
nlp = pipeline('question-answering', model='jaimin/QA', tokenizer='jaimin/QA')
def questionAndAnswer(ques,content):
question_set = {'question':ques,'context':content}
results = nlp(question_set)
return results['answer']
interface = gr.Interface(fn=questionAndAnswer,
inputs=["text","text"],
outputs="text",
title='Question&Answer')
interface.launch(inline=False)