Mcqs-Generator / app.py
Engineer786's picture
Update app.py
2d00bb4 verified
import os
import streamlit as st
from PyPDF2 import PdfReader
from sentence_transformers import SentenceTransformer
import faiss
import numpy as np
from groq import Groq
# Initialize Groq Client
GROQ_API_KEY = os.environ.get('GroqApi')
client = Groq(api_key=GROQ_API_KEY)
# Initialize Embedding Model
embedding_model = SentenceTransformer('distilbert-base-uncased')
# Streamlit UI
st.title("RAG-based Quiz App")
# File Upload
uploaded_file = st.file_uploader("Upload a PDF", type="pdf")
if uploaded_file is not None:
# Extract Text from PDF
pdf_reader = PdfReader(uploaded_file)
text = " ".join([page.extract_text() for page in pdf_reader.pages])
# Chunking Text
st.write("Processing the PDF...")
chunk_size = 500
chunks = [text[i:i + chunk_size] for i in range(0, len(text), chunk_size)]
# Create Embeddings
embeddings = embedding_model.encode(chunks)
embeddings = np.array(embeddings, dtype="float32")
# FAISS Index
index = faiss.IndexFlatL2(embeddings.shape[1])
index.add(embeddings)
st.success("PDF Processed! Embeddings Created.")
# Generate Questions
st.write("Generating Quiz Questions...")
questions = []
for chunk in chunks[:3]: # Process fewer chunks to improve performance
response = client.chat.completions.create(
messages=[{"role": "user", "content": f"Create a multiple-choice quiz question from this text: {chunk}"}],
model="llama3-8b-8192"
)
question = response.choices[0].message.content
questions.append(question)
st.success("Quiz Questions Generated!")
# Display Quiz
for idx, question in enumerate(questions):
st.write(f"**Question {idx+1}:** {question}")
# Parse Question to Extract Correct Answer (Assuming the API formats it consistently)
# Example format: "Question: ... Options: A) ..., B) ..., C) ..., D) ... Correct: A"
lines = question.split("\n")
options = [line.split(") ")[1] for line in lines if line.strip().startswith(("A", "B", "C", "D"))]
correct_option_line = [line for line in lines if "Correct:" in line]
correct_option = correct_option_line[0].split(": ")[1].strip() if correct_option_line else None
selected_option = st.radio(f"Select your answer for Question {idx+1}", options, key=idx)
if st.button(f"Submit Answer for Question {idx+1}", key=f"submit_{idx}"):
if selected_option == correct_option:
st.success("Correct Answer!")
else:
st.error(f"Wrong Answer! Correct Answer: {correct_option}")
# Highlight Correct and Selected Options
st.write(f"**Correct Option:** {correct_option}")
st.write(f"**Your Selection:** {selected_option}")
# Footer
st.write("App developed and deployed using Hugging Face Spaces.")
# Initialize Embedding Model
embedding_model = SentenceTransformer('distilbert-base-uncased')
# Streamlit UI
st.title("RAG-based Quiz App")
# File Upload
uploaded_file = st.file_uploader("Upload a PDF", type="pdf")
if uploaded_file is not None:
# Extract Text from PDF
pdf_reader = PdfReader(uploaded_file)
text = " ".join([page.extract_text() for page in pdf_reader.pages])
# Chunking Text
st.write("Processing the PDF...")
chunks = [text[i:i+500] for i in range(0, len(text), 500)]
# Create Embeddings
embeddings = embedding_model.encode(chunks)
embeddings = np.array(embeddings, dtype="float32")
# FAISS Index
index = faiss.IndexFlatL2(embeddings.shape[1])
index.add(embeddings)
st.success("PDF Processed! Embeddings Created.")
# Generate Questions
st.write("Generating Quiz Questions...")
questions = []
for chunk in chunks[:5]: # Generate questions for the first few chunks
response = client.chat.completions.create(
messages=[{"role": "user", "content": f"Create a multiple-choice quiz question from this text: {chunk}"}],
model="llama3-8b-8192"
)
question = response.choices[0].message.content
questions.append(question)
st.success("Quiz Questions Generated!")
# Display Quiz
for idx, question in enumerate(questions):
st.write(f"**Question {idx+1}:** {question}")
options = ["Option A", "Option B", "Option C", "Option D"] # Placeholder
selected_option = st.radio(f"Select your answer for Question {idx+1}", options, key=idx)
if st.button(f"Submit Answer for Question {idx+1}", key=f"submit_{idx}"):
# Dummy Logic: Assume Option A is correct for demonstration
correct_option = "Option A"
if selected_option == correct_option:
st.success("Correct Answer!")
else:
st.error(f"Wrong Answer! Correct Answer: {correct_option}")
# Footer
st.write("App developed and deployed using Hugging Face Spaces.")
# Initialize embedding model
embedding_model = SentenceTransformer('all-MiniLM-L6-v2')
# Upload PDF
st.title("PDF to Quiz Generator")
uploaded_file = st.file_uploader("Upload a PDF file", type=["pdf"])
if uploaded_file:
# Extract text from PDF
st.write("Processing PDF...")
reader = PdfReader(uploaded_file)
pdf_text = ""
for page in reader.pages:
pdf_text += page.extract_text()
# Split text into chunks
chunk_size = 512 # Adjust as needed
text_chunks = [pdf_text[i:i + chunk_size] for i in range(0, len(pdf_text), chunk_size)]
# Generate embeddings
st.write("Generating embeddings...")
embeddings = embedding_model.encode(text_chunks)
# Store embeddings in FAISS
dimension = embeddings.shape[1]
index = faiss.IndexFlatL2(dimension)
index.add(embeddings)
# Generate questions using Groq API
def generate_question(content):
response = client.chat.completions.create(
messages=[{"role": "user", "content": f"Generate a multiple-choice question from: {content}"}],
model="llama3-8b-8192",
)
return response.choices[0].message.content
# Generate quiz
st.write("Generating quiz...")
quiz = []
for chunk in text_chunks:
question = generate_question(chunk)
quiz.append(question)
# Display the quiz
st.write("Here is your quiz:")
for i, q in enumerate(quiz, 1):
st.markdown(f"**Question {i}:** {q}")