pratikshahp commited on
Commit
8934212
·
verified ·
1 Parent(s): 8ecb308

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -8
app.py CHANGED
@@ -1,10 +1,9 @@
1
  import streamlit as st
2
  import fitz # PyMuPDF
3
  from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
 
4
  # Load model directly
5
  model_name = "openai-community/gpt2"
6
- # Load the LLaMA model and tokenizer
7
- #model_name = "meta-llama/Meta-Llama-Guard-2-8B" # Update this with the correct LLaMA model name
8
  tokenizer = AutoTokenizer.from_pretrained(model_name)
9
  model = AutoModelForCausalLM.from_pretrained(model_name)
10
 
@@ -17,7 +16,7 @@ def extract_text_from_pdf(pdf_file):
17
  text += page.get_text()
18
  return text
19
 
20
- # Function to generate MCQs using the LLaMA model
21
  def generate_mcqs(text, num_questions=5):
22
  if not text.strip():
23
  return ["No text extracted from the PDF. Unable to generate MCQs."]
@@ -30,15 +29,15 @@ def generate_mcqs(text, num_questions=5):
30
  mcqs = []
31
  for _ in range(num_questions):
32
  # Generate a single MCQ at a time
33
- input_text = f"Generate a multiple choice question from the following text: {tokenizer.decode(inputs['input_ids'][0])}"
34
  generated = generator(input_text, max_length=400, num_return_sequences=1)
35
- question_text = generated[0]["generated_text"]
36
 
37
  # Format the MCQ
38
- options = ["Option A: ABC", "Option B: DEF", "Option C: GHI", "Option D: JKL"] # Placeholder options
39
- correct_answer = "Option A: ABC" # Placeholder correct answer for now
40
 
41
- mcq_formatted = f"Q: {question_text}\nOption A: {options[0]}\nOption B: {options[1]}\nOption C: {options[2]}\nOption D: {options[3]}\nCorrect Answer: {correct_answer}"
42
  mcqs.append(mcq_formatted)
43
 
44
  return mcqs
 
1
  import streamlit as st
2
  import fitz # PyMuPDF
3
  from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
4
+
5
  # Load model directly
6
  model_name = "openai-community/gpt2"
 
 
7
  tokenizer = AutoTokenizer.from_pretrained(model_name)
8
  model = AutoModelForCausalLM.from_pretrained(model_name)
9
 
 
16
  text += page.get_text()
17
  return text
18
 
19
+ # Function to generate MCQs using the model
20
  def generate_mcqs(text, num_questions=5):
21
  if not text.strip():
22
  return ["No text extracted from the PDF. Unable to generate MCQs."]
 
29
  mcqs = []
30
  for _ in range(num_questions):
31
  # Generate a single MCQ at a time
32
+ input_text = f"Based on the following text, generate a multiple choice question:\n\n{text}\n\nQuestion:"
33
  generated = generator(input_text, max_length=400, num_return_sequences=1)
34
+ question_text = generated[0]["generated_text"].split("Question:")[1].strip()
35
 
36
  # Format the MCQ
37
+ options = ["Option A: Placeholder A", "Option B: Placeholder B", "Option C: Placeholder C", "Option D: Placeholder D"]
38
+ correct_answer = "Option A: Placeholder A" # Placeholder correct answer for now
39
 
40
+ mcq_formatted = f"Q: {question_text}\n{options[0]}\n{options[1]}\n{options[2]}\n{options[3]}\nCorrect Answer: {correct_answer}"
41
  mcqs.append(mcq_formatted)
42
 
43
  return mcqs