Sourudra's picture
Update app.py
b964caf verified
import gradio as gr
from transformers import pipeline
from PyPDF2 import PdfReader
# Load a generative model for human-like answers
question_answer_pipeline = pipeline("text2text-generation", model="google/flan-t5-large")
# Function to extract text from a PDF
def extract_text_from_pdf(pdf_file_path):
try:
reader = PdfReader(pdf_file_path)
text = ""
for page in reader.pages:
page_text = page.extract_text()
if page_text: # Check if text is extracted
text += page_text
return text.strip()
except Exception as e:
return f"Error extracting text from PDF: {e}"
# Function to process the context and generate a human-like answer
def get_humanlike_answer(pdf_path, text_input, question):
if pdf_path: # If a PDF is uploaded
context = extract_text_from_pdf(pdf_path)
if context.startswith("Error"):
return context # Return the error message if extraction failed
elif text_input.strip(): # If text is pasted
context = text_input
else:
return "Please upload a PDF or paste text for context."
# Generate a conversational answer
prompt = f"Context: {context}\nQuestion: {question}\nAnswer conversationally:"
try:
response = question_answer_pipeline(prompt, max_length=150, num_return_sequences=1)
return response[0]["generated_text"] if "generated_text" in response[0] else "Error: Could not generate an answer."
except Exception as e:
return f"Error generating answer: {e}"
# Gradio Interface
demo = gr.Interface(
fn=get_humanlike_answer,
inputs=[
gr.File(label="Upload PDF (optional)", type="filepath"),
gr.Textbox(label="Paste Text (optional)", lines=10),
gr.Textbox(label="Ask a Question", lines=1),
],
outputs=gr.Textbox(label="Answer", lines=4),
title="PDF/Text Question Answering System",
description="Upload a PDF or paste text and ask questions. Get human-like answers! If both are provided, the PDF will be used."
)
if __name__ == "__main__":
demo.launch()