Flant5_test / app.py
Binoddai's picture
Create app.py
55c6217 verified
raw
history blame contribute delete
818 Bytes
import gradio as gr
from transformers import AutoModelForSeq2SeqLM, AutoTokenizer
model_name = "Binoddai/garud-puran-flan-t5-finetuned"
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForSeq2SeqLM.from_pretrained(model_name)
def answer_question(question):
inputs = tokenizer(question, return_tensors="pt")
output = model.generate(**inputs, max_new_tokens=128)
answer = tokenizer.decode(output[0], skip_special_tokens=True)
return answer
iface = gr.Interface(
fn=answer_question,
inputs=gr.Textbox(lines=2, label="Ask a question about Garuda Purana"),
outputs=gr.Textbox(label="Model's Answer"),
title="Garuda Purana Q&A (Flan-T5 Fine-tuned)",
description="Ask any question and get an answer based on the fine-tuned Garuda Purana model."
)
iface.launch()