Engineer786 commited on
Commit
7253a6e
Β·
verified Β·
1 Parent(s): 7099e0a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +35 -18
app.py CHANGED
@@ -6,53 +6,70 @@ import os
6
  from groq import Groq
7
 
8
  # Initialize Groq client
9
- GROQ_API_KEY= os.environ.get('GroqApi')
10
  client = Groq(api_key=GROQ_API_KEY)
11
 
12
  # Initialize embedding model
13
  embedding_model = SentenceTransformer('all-MiniLM-L6-v2')
14
 
15
- # Upload PDF
16
- st.title("PDF to Quiz Generator")
 
 
 
17
  uploaded_file = st.file_uploader("Upload a PDF file", type=["pdf"])
18
 
19
  if uploaded_file:
20
- # Extract text from PDF
21
- st.write("Processing PDF...")
22
  reader = PdfReader(uploaded_file)
23
  pdf_text = ""
24
  for page in reader.pages:
25
  pdf_text += page.extract_text()
26
 
27
- # Split text into chunks
28
  chunk_size = 512 # Adjust as needed
29
  text_chunks = [pdf_text[i:i + chunk_size] for i in range(0, len(pdf_text), chunk_size)]
30
 
31
- # Generate embeddings
32
- st.write("Generating embeddings...")
33
  embeddings = embedding_model.encode(text_chunks)
34
 
35
- # Store embeddings in FAISS
36
  dimension = embeddings.shape[1]
37
  index = faiss.IndexFlatL2(dimension)
38
  index.add(embeddings)
39
 
40
- # Generate questions using Groq API
41
- def generate_question(content):
42
  response = client.chat.completions.create(
43
- messages=[{"role": "user", "content": f"Generate a multiple-choice question from: {content}"}],
44
  model="llama3-8b-8192",
45
  )
46
  return response.choices[0].message.content
47
 
48
- # Generate quiz
49
- st.write("Generating quiz...")
50
  quiz = []
51
  for chunk in text_chunks:
52
- question = generate_question(chunk)
53
- quiz.append(question)
 
 
 
 
 
54
 
55
  # Display the quiz
56
- st.write("Here is your quiz:")
57
  for i, q in enumerate(quiz, 1):
58
- st.markdown(f"**Question {i}:** {q}")
 
 
 
 
 
 
 
 
 
 
6
  from groq import Groq
7
 
8
  # Initialize Groq client
9
+ GROQ_API_KEY = os.environ.get('GroqApi')
10
  client = Groq(api_key=GROQ_API_KEY)
11
 
12
  # Initialize embedding model
13
  embedding_model = SentenceTransformer('all-MiniLM-L6-v2')
14
 
15
+ # Streamlit app title and description
16
+ st.title("πŸ“˜ PDF to Quiz Generator")
17
+ st.write("Upload a PDF document, and we'll generate a multiple-choice quiz for you!")
18
+
19
+ # File uploader for PDF
20
  uploaded_file = st.file_uploader("Upload a PDF file", type=["pdf"])
21
 
22
  if uploaded_file:
23
+ # Step 1: Extract text from PDF
24
+ st.write("πŸ”„ Processing PDF...")
25
  reader = PdfReader(uploaded_file)
26
  pdf_text = ""
27
  for page in reader.pages:
28
  pdf_text += page.extract_text()
29
 
30
+ # Step 2: Split text into chunks
31
  chunk_size = 512 # Adjust as needed
32
  text_chunks = [pdf_text[i:i + chunk_size] for i in range(0, len(pdf_text), chunk_size)]
33
 
34
+ # Step 3: Generate embeddings
35
+ st.write("πŸ” Generating embeddings...")
36
  embeddings = embedding_model.encode(text_chunks)
37
 
38
+ # Step 4: Store embeddings in FAISS
39
  dimension = embeddings.shape[1]
40
  index = faiss.IndexFlatL2(dimension)
41
  index.add(embeddings)
42
 
43
+ # Step 5: Generate questions and options using Groq API
44
+ def generate_question_and_options(content):
45
  response = client.chat.completions.create(
46
+ messages=[{"role": "user", "content": f"Generate a multiple-choice question with options from: {content}"}],
47
  model="llama3-8b-8192",
48
  )
49
  return response.choices[0].message.content
50
 
51
+ # Generate the quiz
52
+ st.write("πŸ“ Generating quiz...")
53
  quiz = []
54
  for chunk in text_chunks:
55
+ question_data = generate_question_and_options(chunk)
56
+ # Expecting response in format: "Question: ...\nOptions: A)... B)... C)...\nAnswer: ..."
57
+ question_lines = question_data.split("\n")
58
+ question = question_lines[0].replace("Question: ", "")
59
+ options = [line.split(")")[1].strip() for line in question_lines if line.startswith(("A", "B", "C", "D"))]
60
+ answer = question_lines[-1].replace("Answer: ", "").strip()
61
+ quiz.append({"question": question, "options": options, "answer": answer})
62
 
63
  # Display the quiz
64
+ st.write("🎯 **Your Quiz:**")
65
  for i, q in enumerate(quiz, 1):
66
+ st.markdown(f"### Question {i}: {q['question']}")
67
+ selected_option = st.radio(f"Select your answer for Question {i}:", q['options'], key=f"q{i}")
68
+
69
+ if st.button(f"Submit Answer for Question {i}", key=f"submit_q{i}"):
70
+ if selected_option == q['answer']:
71
+ st.success(f"πŸŽ‰ Correct! The answer is: {q['answer']}", icon="βœ…")
72
+ else:
73
+ st.error(f"❌ Incorrect! The correct answer is: {q['answer']}", icon="🚫")
74
+
75
+ st.write("Thank you for taking the quiz! πŸŽ“")