ma4389's picture
Upload 2 files
c92c4a2 verified
raw
history blame contribute delete
969 Bytes
import gradio as gr
from transformers import pipeline
# Load pretrained BERT QA model
qa_pipeline = pipeline("question-answering", model="bert-large-uncased-whole-word-masking-finetuned-squad")
# Define QA function
def answer_question(context, question):
if not context.strip() or not question.strip():
return "Please provide both context and question."
result = qa_pipeline(question=question, context=context)
return result["answer"]
# Gradio interface
interface = gr.Interface(
fn=answer_question,
inputs=[
gr.Textbox(label="Paragraph (Context)", lines=10, placeholder="Enter a paragraph about Lenin..."),
gr.Textbox(label="Question", placeholder="Who was Lenin?")
],
outputs=gr.Textbox(label="Answer"),
title="BERT Question Answering",
description="Ask a question based on a custom paragraph using a BERT-based QA model.",
)
if __name__ == "__main__":
interface.launch()