jaypt2project / app.py
BoojithDharshan's picture
Update app.py
a4588cb verified
import gradio as gr
import pdfplumber
from transformers import pipeline, AutoTokenizer, AutoModelForCausalLM
import torch
# Load a local or small language model (you can change this to something better if your machine can handle it)
model_name = "gpt2" # Replace with a better model if desired, like 'tiiuae/falcon-rw-1b'
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForCausalLM.from_pretrained(model_name)
generator = pipeline("text-generation", model=model, tokenizer=tokenizer, device=0 if torch.cuda.is_available() else -1)
def extract_text_from_pdf(pdf_file):
text = ""
with pdfplumber.open(pdf_file) as pdf:
for page in pdf.pages:
page_text = page.extract_text()
if page_text:
text += page_text + "\n"
return text
def generate_critique(file):
if file is None:
return "Please upload a PDF file."
extracted_text = extract_text_from_pdf(file)
# Truncate text if too long for the model
extracted_text = extracted_text[:1500]
prompt = f"""
Analyze the following research paper and provide:
1. give critique for the research paper
2. give the critique report in points
3. Section-wise summaries (Abstract, Introduction, Methodology, Results, Conclusion).
4. Identify potential research gaps or areas lacking clarity.
5. Suggest improvements to enhance the research quality.
6. should be short, crisp and precise
Research Paper Content:
{extracted_text}
Critique:
"""
try:
response = generator(prompt, max_length=1024, do_sample=True, temperature=0.7)[0]["generated_text"]
# Trim to only return what's after "Critique:"
return response.split("Critique:")[-1].strip()
except Exception as e:
return f"Error: {str(e)}"
iface = gr.Interface(
fn=generate_critique,
inputs=gr.File(label="Upload PDF"),
outputs=gr.Textbox(label="Critique Output", lines=30),
title="📄 Research Paper Critique Generator (BY jay & karthi )",
description="Upload a research paper PDF and receive a basic AI-generated critique using a local model."
)
iface.launch()