File size: 2,122 Bytes
635c5e7
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
a4588cb
635c5e7
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
import gradio as gr
import pdfplumber
from transformers import pipeline, AutoTokenizer, AutoModelForCausalLM
import torch

# Load a local or small language model (you can change this to something better if your machine can handle it)
model_name = "gpt2"  # Replace with a better model if desired, like 'tiiuae/falcon-rw-1b'
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForCausalLM.from_pretrained(model_name)
generator = pipeline("text-generation", model=model, tokenizer=tokenizer, device=0 if torch.cuda.is_available() else -1)

def extract_text_from_pdf(pdf_file):
    text = ""
    with pdfplumber.open(pdf_file) as pdf:
        for page in pdf.pages:
            page_text = page.extract_text()
            if page_text:
                text += page_text + "\n"
    return text

def generate_critique(file):
    if file is None:
        return "Please upload a PDF file."

    extracted_text = extract_text_from_pdf(file)

    # Truncate text if too long for the model
    extracted_text = extracted_text[:1500]

    prompt = f"""
Analyze the following research paper and provide:
1. give critique for the research paper 
2. give the critique report in points
3. Section-wise summaries (Abstract, Introduction, Methodology, Results, Conclusion).
4. Identify potential research gaps or areas lacking clarity.
5. Suggest improvements to enhance the research quality.
6. should be short, crisp and precise
Research Paper Content:
{extracted_text}
Critique:
"""

    try:
        response = generator(prompt, max_length=1024, do_sample=True, temperature=0.7)[0]["generated_text"]
        # Trim to only return what's after "Critique:"
        return response.split("Critique:")[-1].strip()
    except Exception as e:
        return f"Error: {str(e)}"

iface = gr.Interface(
    fn=generate_critique,
    inputs=gr.File(label="Upload PDF"),
    outputs=gr.Textbox(label="Critique Output", lines=30),
    title="📄 Research Paper Critique Generator (BY jay & karthi )",
    description="Upload a research paper PDF and receive a basic AI-generated critique using a local model."
)
iface.launch()