pratikshahp commited on
Commit
c233fdc
·
verified ·
1 Parent(s): a2a36c7

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -25
app.py CHANGED
@@ -1,8 +1,6 @@
1
  import streamlit as st
2
  import fitz # PyMuPDF
3
- from transformers import AutoTokenizer, AutoModelForCausalLM
4
- from langchain.prompts import PromptTemplate
5
- from langchain.chains import LLMChain
6
 
7
  # Function to extract text from PDF
8
  def extract_text_from_pdf(pdf_file):
@@ -13,27 +11,15 @@ def extract_text_from_pdf(pdf_file):
13
  text += page.get_text()
14
  return text
15
 
16
- # Function to generate MCQs
17
  def generate_mcqs(text, num_questions=5):
18
- # Define a prompt template for generating MCQs
19
- prompt_template = """
20
- Generate {num_questions} multiple-choice questions based on the following text:
21
- {text}
22
- """
23
-
24
- prompt = prompt_template.format(text=text, num_questions=num_questions)
25
-
26
- # Load HuggingFace model
27
- tokenizer = AutoTokenizer.from_pretrained("openai-community/gpt2-large")
28
- llm = AutoModelForCausalLM.from_pretrained("openai-community/gpt2-large")
29
-
30
- # Create an LLM chain with the prompt
31
- chain = LLMChain(prompt_template=PromptTemplate(template=prompt_template), llm=llm)
32
-
33
- # Run the chain to generate MCQs
34
- response = chain.run(text=text, num_questions=num_questions)
35
-
36
- return response
37
 
38
  # Streamlit app interface
39
  st.title("PDF to MCQ Generator")
@@ -50,5 +36,5 @@ if uploaded_file is not None:
50
  mcqs = generate_mcqs(text, num_questions)
51
 
52
  st.write("Generated MCQs:")
53
- st.write(mcqs)
54
-
 
1
  import streamlit as st
2
  import fitz # PyMuPDF
3
+ from transformers import pipeline
 
 
4
 
5
  # Function to extract text from PDF
6
  def extract_text_from_pdf(pdf_file):
 
11
  text += page.get_text()
12
  return text
13
 
14
+ # Function to generate MCQs using Hugging Face model
15
  def generate_mcqs(text, num_questions=5):
16
+ # Define a Hugging Face text generation pipeline
17
+ text_generation = pipeline("text-generation")
18
+
19
+ # Generate MCQs by providing the text as a prompt
20
+ mcqs = text_generation(text, max_length=100, num_return_sequences=num_questions)
21
+
22
+ return mcqs
 
 
 
 
 
 
 
 
 
 
 
 
23
 
24
  # Streamlit app interface
25
  st.title("PDF to MCQ Generator")
 
36
  mcqs = generate_mcqs(text, num_questions)
37
 
38
  st.write("Generated MCQs:")
39
+ for idx, mcq in enumerate(mcqs):
40
+ st.write(f"{idx+1}. {mcq['generated_text']}")