Engineer786 commited on
Commit
f2ecdcb
·
verified ·
1 Parent(s): 7253a6e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -21
app.py CHANGED
@@ -43,7 +43,7 @@ if uploaded_file:
43
  # Step 5: Generate questions and options using Groq API
44
  def generate_question_and_options(content):
45
  response = client.chat.completions.create(
46
- messages=[{"role": "user", "content": f"Generate a multiple-choice question with options from: {content}"}],
47
  model="llama3-8b-8192",
48
  )
49
  return response.choices[0].message.content
@@ -53,23 +53,6 @@ if uploaded_file:
53
  quiz = []
54
  for chunk in text_chunks:
55
  question_data = generate_question_and_options(chunk)
56
- # Expecting response in format: "Question: ...\nOptions: A)... B)... C)...\nAnswer: ..."
57
- question_lines = question_data.split("\n")
58
- question = question_lines[0].replace("Question: ", "")
59
- options = [line.split(")")[1].strip() for line in question_lines if line.startswith(("A", "B", "C", "D"))]
60
- answer = question_lines[-1].replace("Answer: ", "").strip()
61
- quiz.append({"question": question, "options": options, "answer": answer})
62
-
63
- # Display the quiz
64
- st.write("🎯 **Your Quiz:**")
65
- for i, q in enumerate(quiz, 1):
66
- st.markdown(f"### Question {i}: {q['question']}")
67
- selected_option = st.radio(f"Select your answer for Question {i}:", q['options'], key=f"q{i}")
68
-
69
- if st.button(f"Submit Answer for Question {i}", key=f"submit_q{i}"):
70
- if selected_option == q['answer']:
71
- st.success(f"🎉 Correct! The answer is: {q['answer']}", icon="✅")
72
- else:
73
- st.error(f"❌ Incorrect! The correct answer is: {q['answer']}", icon="🚫")
74
-
75
- st.write("Thank you for taking the quiz! 🎓")
 
43
  # Step 5: Generate questions and options using Groq API
44
  def generate_question_and_options(content):
45
  response = client.chat.completions.create(
46
+ messages=[{"role": "user", "content": f"Generate a multiple-choice question with exactly 4 options from the following text: {content}"}],
47
  model="llama3-8b-8192",
48
  )
49
  return response.choices[0].message.content
 
53
  quiz = []
54
  for chunk in text_chunks:
55
  question_data = generate_question_and_options(chunk)
56
+ try:
57
+ # Parse response into a question, options, and answer
58
+