Spaces:
Sleeping
Sleeping
Commit
·
3ca2bf2
1
Parent(s):
09b8ff3
Upload all files
Browse files- .env +3 -0
- .vscode/settings.json +3 -0
- MCQGen.ipynb +18 -0
- app.py +74 -0
- data.txt +18 -0
- experiment/machinelearning_quiz.csv +7 -0
- experiment/mcqgen.ipynb +1011 -0
- logs/04_22_2024_19_37_02.log +4 -0
- logs/04_22_2024_19_57_57.log +2 -0
- logs/04_22_2024_21_36_57.log +2 -0
- logs/04_27_2024_17_51_55.log +0 -0
- logs/04_27_2024_18_33_22.log +0 -0
- requirements.txt +6 -0
- response.json +33 -0
- setup.py +10 -0
- src/__init__.py +0 -0
- src/__pycache__/__init__.cpython-39.pyc +0 -0
- src/mcqgen/__init__.py +0 -0
- src/mcqgen/__pycache__/__init__.cpython-39.pyc +0 -0
- src/mcqgen/__pycache__/logger.cpython-39.pyc +0 -0
- src/mcqgen/__pycache__/mcqgenerator.cpython-39.pyc +0 -0
- src/mcqgen/__pycache__/utils.cpython-39.pyc +0 -0
- src/mcqgen/logger.py +13 -0
- src/mcqgen/mcqgenerator.py +75 -0
- src/mcqgen/utils.py +44 -0
- test.py +4 -0
.env
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
OPENAI_API_KEY="sk-proj-7HxfmzUbnhToy0w6R0EYT3BlbkFJ9LHWpns3009EQc7B11X4"
|
| 2 |
+
|
| 3 |
+
|
.vscode/settings.json
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"python.REPL.enableREPLSmartSend": false
|
| 3 |
+
}
|
MCQGen.ipynb
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"cells": [
|
| 3 |
+
{
|
| 4 |
+
"cell_type": "code",
|
| 5 |
+
"execution_count": null,
|
| 6 |
+
"metadata": {},
|
| 7 |
+
"outputs": [],
|
| 8 |
+
"source": []
|
| 9 |
+
}
|
| 10 |
+
],
|
| 11 |
+
"metadata": {
|
| 12 |
+
"language_info": {
|
| 13 |
+
"name": "python"
|
| 14 |
+
}
|
| 15 |
+
},
|
| 16 |
+
"nbformat": 4,
|
| 17 |
+
"nbformat_minor": 2
|
| 18 |
+
}
|
app.py
ADDED
|
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import openai
|
| 2 |
+
import json
|
| 3 |
+
import langchain
|
| 4 |
+
import pandas as pd
|
| 5 |
+
from openai import OpenAI
|
| 6 |
+
from langchain_openai import OpenAI, ChatOpenAI
|
| 7 |
+
import os
|
| 8 |
+
from dotenv import load_dotenv
|
| 9 |
+
from src.mcqgen.utils import read_file, get_table_data
|
| 10 |
+
from src.mcqgen.logger import logging
|
| 11 |
+
from src.mcqgen.mcqgenerator import generate_evaluate_chain
|
| 12 |
+
import streamlit as st
|
| 13 |
+
from langchain.llms import openai
|
| 14 |
+
from langchain.prompts import PromptTemplate
|
| 15 |
+
from langchain.chains import LLMChain, SequentialChain
|
| 16 |
+
from langchain.callbacks import get_openai_callback
|
| 17 |
+
import traceback
|
| 18 |
+
|
| 19 |
+
with open('response.json', 'r') as file:
|
| 20 |
+
response_json=json.load(file)
|
| 21 |
+
|
| 22 |
+
st.title("MCQ Creator App with Langchain:")
|
| 23 |
+
|
| 24 |
+
with st.form("user_input"):
|
| 25 |
+
uploaded_file=st.file_uploader("Upload a PDF or txt file")
|
| 26 |
+
|
| 27 |
+
mcq_count=st.number_input("No. of MCQs", min_value=3, max_value=50)
|
| 28 |
+
|
| 29 |
+
subject=st.text_input("Insert a subject", max_chars=20)
|
| 30 |
+
|
| 31 |
+
tone=st.text_input("Difficulty level", max_chars=20, placeholder="Simple")
|
| 32 |
+
|
| 33 |
+
button=st.form_submit_button("Create MCQs")
|
| 34 |
+
|
| 35 |
+
if button and uploaded_file is not None and mcq_count and subject and tone:
|
| 36 |
+
with st.spinner("loading..."):
|
| 37 |
+
try:
|
| 38 |
+
text=read_file(uploaded_file)
|
| 39 |
+
|
| 40 |
+
with get_openai_callback() as cb:
|
| 41 |
+
response=generate_evaluate_chain (
|
| 42 |
+
{"text":text,
|
| 43 |
+
"number":mcq_count,
|
| 44 |
+
"subject":subject,
|
| 45 |
+
"tone":tone,
|
| 46 |
+
"response_json":json.dumps(response_json)}
|
| 47 |
+
)
|
| 48 |
+
except Exception as e:
|
| 49 |
+
traceback.print_exception(type(e), e, e.__traceback__)
|
| 50 |
+
st.error("Error")
|
| 51 |
+
|
| 52 |
+
else:
|
| 53 |
+
print(f"Total Tokens: {cb.total_tokens}")
|
| 54 |
+
print(f"Prompt Tokens: {cb.prompt_tokens}")
|
| 55 |
+
print(f"Completion Tokens: {cb.completion_tokens}")
|
| 56 |
+
print(f"Total Cost: {cb.total_cost}")
|
| 57 |
+
if isinstance(response,dict):
|
| 58 |
+
quiz=response.get("quiz")
|
| 59 |
+
|
| 60 |
+
if quiz is not None:
|
| 61 |
+
print('About to execute probalematic function')
|
| 62 |
+
table_data=get_table_data(quiz)
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
if table_data is not None:
|
| 66 |
+
df=pd.DataFrame(table_data)
|
| 67 |
+
df.index=df.index+1
|
| 68 |
+
st.table(df)
|
| 69 |
+
st.write(response.get("review"))
|
| 70 |
+
else:
|
| 71 |
+
st.error("Error in the table data")
|
| 72 |
+
|
| 73 |
+
else:
|
| 74 |
+
st.write(response)
|
data.txt
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
The near future will be dominated by several missions. Currently in development, are ESA's Euclid mission which will fly NASA furnished detectors and JAXA's XRISM (X-Ray Imaging and Spectroscopy) mission which uses NASA furnished technologies that will help provide breakthroughs in the study of structure formation of the universe, outflows from galaxy nuclei, and dark matter.
|
| 2 |
+
|
| 3 |
+
Completing the missions in development, supporting the operational missions, and funding the research and analysis programs will consume most of the Astrophysics Division resources.
|
| 4 |
+
|
| 5 |
+
In October 2021, NASA selected a new Explorer Mission, the gamma-ray telescope COSI (Compton Spectrometer and Imager). COSI will study the recent history of star birth, star death, and the formation of chemical elements in the Milky Way.
|
| 6 |
+
|
| 7 |
+
In March 2017, NASA selected the Explorer Mission of Opportunity GUSTO (Galactic/Extragalactic ULDB Spectroscopic Terahertz Observatory) to measure emissions from the interstellar medium to help scientists determine the life cycle of interstellar gas in our Milky Way, witness the formation and destruction of star-forming clouds, and understand the dynamics and gas flow in the vicinity of the center of our galaxy.
|
| 8 |
+
|
| 9 |
+
In February 2016, NASA formally started the top Astro2010 decadal recommendation, the Wide Field Infrared Survey Telescope (WFIRST). In spring of 2020, WFIRST was renamed the Nancy Grace Roman Space Telescope. Roman will aid researchers in their efforts to unravel the secrets of dark energy and dark matter, and explore the evolution of the cosmos. It will also discover new worlds outside our solar system and advance the search for worlds that could be suitable for life.
|
| 10 |
+
|
| 11 |
+
The Future
|
| 12 |
+
Since the 2001 decadal survey, the way the universe is viewed has changed dramatically. More than 3800 planets have been discovered orbiting distant stars. Black holes are now known to be present at the center of most galaxies, including the Milky Way galaxy. The age, size and shape of the universe have been mapped based on the primordial radiation left by the big bang. And it has been learned that most of the matter in the universe is dark and invisible, and the universe is not only expanding, but accelerating in an unexpected way.
|
| 13 |
+
|
| 14 |
+
For the long term future, the Astrophysics goals will be guided based on the results of the 2020 Decadal survey Pathways to Discovery in Astronomy and Astrophysics for the 2020s. It identifies the most compelling science goals and presents an ambitious program of ground- and space-based activities for future investment. The report recommends critical near-term actions to support the foundations of the profession as well as the technologies and tools needed to carry out the science.
|
| 15 |
+
|
| 16 |
+
In 2012 the Astrophysics Implementation Plan was released which describes the activities currently being undertaken in response to the decadal survey recommendations within the current budgetary constraints. The plan was updated in 2014, 2016, and in 2018.
|
| 17 |
+
|
| 18 |
+
The Astrophysics roadmap Enduring Quests, Daring Visions was developed by a task force of the Astrophysics Subcommittee (APS) in 2013. The Roadmap presents a 30-year vision for astrophysics using the most recent decadal survey as the starting point.
|
experiment/machinelearning_quiz.csv
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
MCQ,Choices,Correct
|
| 2 |
+
Who coined the term machine learning in 1959?,a:Alan Turing|b:Arthur Samuel|c:Donald Hebb|d:Tom M. Mitchell,b
|
| 3 |
+
What was the earliest machine learning model introduced by Arthur Samuel in the 1950s?,a:Speech recognition system|b:Weather prediction model|c:Checkers winning chance calculator|d:Stock trading algorithm,c
|
| 4 |
+
What is the main purpose of modern-day machine learning?,a:To classify data based on models|b:To make predictions for future outcomes|c:Both a and b|d:None of the above,c
|
| 5 |
+
"What did Raytheon Company develop in the early 1960s for analyzing sonar signals, electrocardiograms, and speech patterns?",a:Cybertron learning machine|b:Speech recognition system|c:Weather prediction model|d:Checkers winning chance calculator,a
|
| 6 |
+
Who proposed the early mathematical models of neural networks to come up with algorithms that mirror human thought processes?,a:Arthur Samuel|b:Donald Hebb|c:Tom M. Mitchell|d:Raytheon Company,b
|
| 7 |
+
What formal definition of machine learning algorithms was provided by Tom M. Mitchell?,a:A computer program learns from experience E with respect to some class of tasks T and performance measure P|b:A computer program can think like a human|c:A computer program can predict the future accurately|d:A computer program can only perform tasks it is explicitly programmed for,a
|
experiment/mcqgen.ipynb
ADDED
|
@@ -0,0 +1,1011 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"cells": [
|
| 3 |
+
{
|
| 4 |
+
"cell_type": "code",
|
| 5 |
+
"execution_count": 1,
|
| 6 |
+
"metadata": {},
|
| 7 |
+
"outputs": [],
|
| 8 |
+
"source": [
|
| 9 |
+
"import openai\n",
|
| 10 |
+
"import json\n",
|
| 11 |
+
"import langchain\n",
|
| 12 |
+
"\n",
|
| 13 |
+
"import pandas as pd\n",
|
| 14 |
+
"from openai import OpenAI\n",
|
| 15 |
+
"\n",
|
| 16 |
+
"from langchain_openai import OpenAI, ChatOpenAI\n",
|
| 17 |
+
"\n",
|
| 18 |
+
"import os\n",
|
| 19 |
+
"from dotenv import load_dotenv"
|
| 20 |
+
]
|
| 21 |
+
},
|
| 22 |
+
{
|
| 23 |
+
"cell_type": "code",
|
| 24 |
+
"execution_count": 2,
|
| 25 |
+
"metadata": {},
|
| 26 |
+
"outputs": [
|
| 27 |
+
{
|
| 28 |
+
"data": {
|
| 29 |
+
"text/plain": [
|
| 30 |
+
"True"
|
| 31 |
+
]
|
| 32 |
+
},
|
| 33 |
+
"execution_count": 2,
|
| 34 |
+
"metadata": {},
|
| 35 |
+
"output_type": "execute_result"
|
| 36 |
+
}
|
| 37 |
+
],
|
| 38 |
+
"source": [
|
| 39 |
+
"load_dotenv()"
|
| 40 |
+
]
|
| 41 |
+
},
|
| 42 |
+
{
|
| 43 |
+
"cell_type": "code",
|
| 44 |
+
"execution_count": 3,
|
| 45 |
+
"metadata": {},
|
| 46 |
+
"outputs": [],
|
| 47 |
+
"source": [
|
| 48 |
+
"key=os.getenv(\"OPENAI_API_KEY\")"
|
| 49 |
+
]
|
| 50 |
+
},
|
| 51 |
+
{
|
| 52 |
+
"cell_type": "code",
|
| 53 |
+
"execution_count": 4,
|
| 54 |
+
"metadata": {},
|
| 55 |
+
"outputs": [],
|
| 56 |
+
"source": [
|
| 57 |
+
"llm=ChatOpenAI(model=\"gpt-3.5-turbo\", temperature=0.7)"
|
| 58 |
+
]
|
| 59 |
+
},
|
| 60 |
+
{
|
| 61 |
+
"cell_type": "code",
|
| 62 |
+
"execution_count": 5,
|
| 63 |
+
"metadata": {},
|
| 64 |
+
"outputs": [],
|
| 65 |
+
"source": [
|
| 66 |
+
"from langchain.llms import openai\n",
|
| 67 |
+
"from langchain.prompts import PromptTemplate\n",
|
| 68 |
+
"from langchain.chains import LLMChain, SequentialChain\n",
|
| 69 |
+
"from langchain.callbacks import get_openai_callback\n",
|
| 70 |
+
"import PyPDF2"
|
| 71 |
+
]
|
| 72 |
+
},
|
| 73 |
+
{
|
| 74 |
+
"cell_type": "code",
|
| 75 |
+
"execution_count": 6,
|
| 76 |
+
"metadata": {},
|
| 77 |
+
"outputs": [],
|
| 78 |
+
"source": [
|
| 79 |
+
"response_json={\n",
|
| 80 |
+
" \"1\": {\n",
|
| 81 |
+
" \"mcq\": \"multiple choice question\",\n",
|
| 82 |
+
" \"options\": {\n",
|
| 83 |
+
" \"a\": \"choice here\",\n",
|
| 84 |
+
" \"b\": \"choice here\",\n",
|
| 85 |
+
" \"c\": \"choice here\",\n",
|
| 86 |
+
" \"d\": \"choice here\"\n",
|
| 87 |
+
" },\n",
|
| 88 |
+
" \"correct_answer\": \"correct answer\"\n",
|
| 89 |
+
" },\n",
|
| 90 |
+
" \"2\": {\n",
|
| 91 |
+
" \"mcq\": \"multiple choice question\",\n",
|
| 92 |
+
" \"options\": {\n",
|
| 93 |
+
" \"a\": \"choice here\",\n",
|
| 94 |
+
" \"b\": \"choice here\",\n",
|
| 95 |
+
" \"c\": \"choice here\",\n",
|
| 96 |
+
" \"d\": \"choice here\"\n",
|
| 97 |
+
" },\n",
|
| 98 |
+
" \"correct_answer\": \"correct answer\"\n",
|
| 99 |
+
" },\n",
|
| 100 |
+
" \"3\": {\n",
|
| 101 |
+
" \"mcq\": \"multiple choice question\",\n",
|
| 102 |
+
" \"options\": {\n",
|
| 103 |
+
" \"a\": \"choice here\",\n",
|
| 104 |
+
" \"b\": \"choice here\",\n",
|
| 105 |
+
" \"c\": \"choice here\",\n",
|
| 106 |
+
" \"d\": \"choice here\"\n",
|
| 107 |
+
" },\n",
|
| 108 |
+
" \"correct_answer\": \"correct answer\"\n",
|
| 109 |
+
" }\n",
|
| 110 |
+
"}"
|
| 111 |
+
]
|
| 112 |
+
},
|
| 113 |
+
{
|
| 114 |
+
"cell_type": "code",
|
| 115 |
+
"execution_count": 7,
|
| 116 |
+
"metadata": {},
|
| 117 |
+
"outputs": [],
|
| 118 |
+
"source": [
|
| 119 |
+
"Template='''\n",
|
| 120 |
+
"Text:{text}\n",
|
| 121 |
+
"You are an expert MCQ maker. Given the above text, it is your job to create a quiz of {number} multiple \n",
|
| 122 |
+
"choice questions for {subject} in {tone} tone. Make sure that the questions are not repeated and check all\n",
|
| 123 |
+
"question to be confirming the text as well. Make sure to format your responses like {response_json} and use\n",
|
| 124 |
+
"it as a guide \n",
|
| 125 |
+
"'''\n",
|
| 126 |
+
"\n",
|
| 127 |
+
"\n",
|
| 128 |
+
"quiz_generation_prompt=PromptTemplate(\n",
|
| 129 |
+
" input_variables=[\"text\", \"number\", \"subject\", \"tone\", \"respone_json\"],\n",
|
| 130 |
+
" template=Template\n",
|
| 131 |
+
")"
|
| 132 |
+
]
|
| 133 |
+
},
|
| 134 |
+
{
|
| 135 |
+
"cell_type": "code",
|
| 136 |
+
"execution_count": 8,
|
| 137 |
+
"metadata": {},
|
| 138 |
+
"outputs": [],
|
| 139 |
+
"source": [
|
| 140 |
+
"quiz_chain=LLMChain(llm=llm, prompt=quiz_generation_prompt, output_key=\"quiz\", verbose=True)"
|
| 141 |
+
]
|
| 142 |
+
},
|
| 143 |
+
{
|
| 144 |
+
"cell_type": "code",
|
| 145 |
+
"execution_count": 9,
|
| 146 |
+
"metadata": {},
|
| 147 |
+
"outputs": [],
|
| 148 |
+
"source": [
|
| 149 |
+
"Template2=\"\"\"\n",
|
| 150 |
+
"You are an expert english grammarian and a writer. Given a multiple choice Quiz for {subject} students,\n",
|
| 151 |
+
"you need to evaluate the complexity of the quiz and give a complete analysis of the quiz. Only use 50 words\n",
|
| 152 |
+
"at max. If the quiz is not at par the with the cognitive and the analytical ability of the student, update\n",
|
| 153 |
+
"the quiz questions and change the tone such that it perfectly fits the student's analytical ability.\n",
|
| 154 |
+
"Quiz_MCQ:\n",
|
| 155 |
+
"{quiz}\n",
|
| 156 |
+
"\n",
|
| 157 |
+
"\n",
|
| 158 |
+
"\"\"\""
|
| 159 |
+
]
|
| 160 |
+
},
|
| 161 |
+
{
|
| 162 |
+
"cell_type": "code",
|
| 163 |
+
"execution_count": 10,
|
| 164 |
+
"metadata": {},
|
| 165 |
+
"outputs": [],
|
| 166 |
+
"source": [
|
| 167 |
+
"quiz_evaluation_prompt=PromptTemplate(input_variables=['subject','quiz'], template=Template2)"
|
| 168 |
+
]
|
| 169 |
+
},
|
| 170 |
+
{
|
| 171 |
+
"cell_type": "code",
|
| 172 |
+
"execution_count": 11,
|
| 173 |
+
"metadata": {},
|
| 174 |
+
"outputs": [],
|
| 175 |
+
"source": [
|
| 176 |
+
"review_chain=LLMChain(llm=llm, prompt=quiz_evaluation_prompt, output_key=\"review\", verbose=True)"
|
| 177 |
+
]
|
| 178 |
+
},
|
| 179 |
+
{
|
| 180 |
+
"cell_type": "code",
|
| 181 |
+
"execution_count": 12,
|
| 182 |
+
"metadata": {},
|
| 183 |
+
"outputs": [],
|
| 184 |
+
"source": [
|
| 185 |
+
"generate_evaluate_chain=SequentialChain(chains=[quiz_chain,review_chain], input_variables=[\"text\", \"number\", \"subject\", \"tone\", \"response_json\"], output_variables=['quiz', 'review'], verbose=True)"
|
| 186 |
+
]
|
| 187 |
+
},
|
| 188 |
+
{
|
| 189 |
+
"cell_type": "code",
|
| 190 |
+
"execution_count": 15,
|
| 191 |
+
"metadata": {},
|
| 192 |
+
"outputs": [],
|
| 193 |
+
"source": [
|
| 194 |
+
"file_path='C:\\\\Krish\\\\Full GenAI course\\\\openai_basic\\\\1.MCQGen\\\\data.txt'\n",
|
| 195 |
+
"with open(file_path, 'r') as file:\n",
|
| 196 |
+
" text=file.read()"
|
| 197 |
+
]
|
| 198 |
+
},
|
| 199 |
+
{
|
| 200 |
+
"cell_type": "code",
|
| 201 |
+
"execution_count": 16,
|
| 202 |
+
"metadata": {},
|
| 203 |
+
"outputs": [
|
| 204 |
+
{
|
| 205 |
+
"data": {
|
| 206 |
+
"text/plain": [
|
| 207 |
+
"'{\"1\": {\"mcq\": \"multiple choice question\", \"options\": {\"a\": \"choice here\", \"b\": \"choice here\", \"c\": \"choice here\", \"d\": \"choice here\"}, \"correct_answer\": \"correct answer\"}, \"2\": {\"mcq\": \"multiple choice question\", \"options\": {\"a\": \"choice here\", \"b\": \"choice here\", \"c\": \"choice here\", \"d\": \"choice here\"}, \"correct_answer\": \"correct answer\"}, \"3\": {\"mcq\": \"multiple choice question\", \"options\": {\"a\": \"choice here\", \"b\": \"choice here\", \"c\": \"choice here\", \"d\": \"choice here\"}, \"correct_answer\": \"correct answer\"}}'"
|
| 208 |
+
]
|
| 209 |
+
},
|
| 210 |
+
"execution_count": 16,
|
| 211 |
+
"metadata": {},
|
| 212 |
+
"output_type": "execute_result"
|
| 213 |
+
}
|
| 214 |
+
],
|
| 215 |
+
"source": [
|
| 216 |
+
"# Serialize the python dic into JSON-formatted string \n",
|
| 217 |
+
"json.dumps(response_json)"
|
| 218 |
+
]
|
| 219 |
+
},
|
| 220 |
+
{
|
| 221 |
+
"cell_type": "code",
|
| 222 |
+
"execution_count": 15,
|
| 223 |
+
"metadata": {},
|
| 224 |
+
"outputs": [],
|
| 225 |
+
"source": [
|
| 226 |
+
"number =6\n",
|
| 227 |
+
"subject=\"Machine Learning\"\n",
|
| 228 |
+
"tone=\"simple\""
|
| 229 |
+
]
|
| 230 |
+
},
|
| 231 |
+
{
|
| 232 |
+
"cell_type": "code",
|
| 233 |
+
"execution_count": 16,
|
| 234 |
+
"metadata": {},
|
| 235 |
+
"outputs": [
|
| 236 |
+
{
|
| 237 |
+
"name": "stderr",
|
| 238 |
+
"output_type": "stream",
|
| 239 |
+
"text": [
|
| 240 |
+
"c:\\Users\\avimi\\.conda\\envs\\myenv\\lib\\site-packages\\langchain_core\\_api\\deprecation.py:119: LangChainDeprecationWarning: The method `Chain.__call__` was deprecated in langchain 0.1.0 and will be removed in 0.2.0. Use invoke instead.\n",
|
| 241 |
+
" warn_deprecated(\n"
|
| 242 |
+
]
|
| 243 |
+
},
|
| 244 |
+
{
|
| 245 |
+
"name": "stdout",
|
| 246 |
+
"output_type": "stream",
|
| 247 |
+
"text": [
|
| 248 |
+
"\n",
|
| 249 |
+
"\n",
|
| 250 |
+
"\u001b[1m> Entering new SequentialChain chain...\u001b[0m\n",
|
| 251 |
+
"\n",
|
| 252 |
+
"\n",
|
| 253 |
+
"\u001b[1m> Entering new LLMChain chain...\u001b[0m\n",
|
| 254 |
+
"Prompt after formatting:\n",
|
| 255 |
+
"\u001b[32;1m\u001b[1;3m\n",
|
| 256 |
+
"Text:The near future will be dominated by several missions. Currently in development, are ESA's Euclid mission which will fly NASA furnished detectors and JAXA's XRISM (X-Ray Imaging and Spectroscopy) mission which uses NASA furnished technologies that will help provide breakthroughs in the study of structure formation of the universe, outflows from galaxy nuclei, and dark matter.\n",
|
| 257 |
+
"\n",
|
| 258 |
+
"Completing the missions in development, supporting the operational missions, and funding the research and analysis programs will consume most of the Astrophysics Division resources.\n",
|
| 259 |
+
"\n",
|
| 260 |
+
"In October 2021, NASA selected a new Explorer Mission, the gamma-ray telescope COSI (Compton Spectrometer and Imager). COSI will study the recent history of star birth, star death, and the formation of chemical elements in the Milky Way.\n",
|
| 261 |
+
"\n",
|
| 262 |
+
"In March 2017, NASA selected the Explorer Mission of Opportunity GUSTO (Galactic/Extragalactic ULDB Spectroscopic Terahertz Observatory) to measure emissions from the interstellar medium to help scientists determine the life cycle of interstellar gas in our Milky Way, witness the formation and destruction of star-forming clouds, and understand the dynamics and gas flow in the vicinity of the center of our galaxy.\n",
|
| 263 |
+
"\n",
|
| 264 |
+
"In February 2016, NASA formally started the top Astro2010 decadal recommendation, the Wide Field Infrared Survey Telescope (WFIRST). In spring of 2020, WFIRST was renamed the Nancy Grace Roman Space Telescope. Roman will aid researchers in their efforts to unravel the secrets of dark energy and dark matter, and explore the evolution of the cosmos. It will also discover new worlds outside our solar system and advance the search for worlds that could be suitable for life.\n",
|
| 265 |
+
"\n",
|
| 266 |
+
"The Future\n",
|
| 267 |
+
"Since the 2001 decadal survey, the way the universe is viewed has changed dramatically. More than 3800 planets have been discovered orbiting distant stars. Black holes are now known to be present at the center of most galaxies, including the Milky Way galaxy. The age, size and shape of the universe have been mapped based on the primordial radiation left by the big bang. And it has been learned that most of the matter in the universe is dark and invisible, and the universe is not only expanding, but accelerating in an unexpected way.\n",
|
| 268 |
+
"\n",
|
| 269 |
+
"For the long term future, the Astrophysics goals will be guided based on the results of the 2020 Decadal survey Pathways to Discovery in Astronomy and Astrophysics for the 2020s. It identifies the most compelling science goals and presents an ambitious program of ground- and space-based activities for future investment. The report recommends critical near-term actions to support the foundations of the profession as well as the technologies and tools needed to carry out the science.\n",
|
| 270 |
+
"\n",
|
| 271 |
+
"In 2012 the Astrophysics Implementation Plan was released which describes the activities currently being undertaken in response to the decadal survey recommendations within the current budgetary constraints. The plan was updated in 2014, 2016, and in 2018.\n",
|
| 272 |
+
"\n",
|
| 273 |
+
"The Astrophysics roadmap Enduring Quests, Daring Visions was developed by a task force of the Astrophysics Subcommittee (APS) in 2013. The Roadmap presents a 30-year vision for astrophysics using the most recent decadal survey as the starting point.\n",
|
| 274 |
+
"You are an expert MCQ maker. Given the above text, it is your job to create a quiz of 6 multiple \n",
|
| 275 |
+
"choice questions for Machine Learning in simple tone. Make sure that the questions are not repeated and check all\n",
|
| 276 |
+
"question to be confirming the text as well. Make sure to format your responses like {\"1\": {\"mcq\": \"multiple choice question\", \"options\": {\"a\": \"choice here\", \"b\": \"choice here\", \"c\": \"choice here\", \"d\": \"choice here\"}, \"correct_answer\": \"correct answer\"}, \"2\": {\"mcq\": \"multiple choice question\", \"options\": {\"a\": \"choice here\", \"b\": \"choice here\", \"c\": \"choice here\", \"d\": \"choice here\"}, \"correct_answer\": \"correct answer\"}, \"3\": {\"mcq\": \"multiple choice question\", \"options\": {\"a\": \"choice here\", \"b\": \"choice here\", \"c\": \"choice here\", \"d\": \"choice here\"}, \"correct_answer\": \"correct answer\"}} and use\n",
|
| 277 |
+
"it as a guide \n",
|
| 278 |
+
"\u001b[0m\n",
|
| 279 |
+
"\n",
|
| 280 |
+
"\u001b[1m> Finished chain.\u001b[0m\n",
|
| 281 |
+
"\n",
|
| 282 |
+
"\n",
|
| 283 |
+
"\u001b[1m> Entering new LLMChain chain...\u001b[0m\n",
|
| 284 |
+
"Prompt after formatting:\n",
|
| 285 |
+
"\u001b[32;1m\u001b[1;3m\n",
|
| 286 |
+
"You are an expert english grammarian and a writer. Given a multiple choice Quiz for Machine Learning students,\n",
|
| 287 |
+
"you need to evaluate the complexity of the quiz and give a complete analysis of the quiz. Only use 50 words\n",
|
| 288 |
+
"at max. If the quiz is not at par the with the cognitive and the analytical ability of the student, update\n",
|
| 289 |
+
"the quiz questions and change the tone such that it perfectly fits the student's analytical ability.\n",
|
| 290 |
+
"Quiz_MCQ:\n",
|
| 291 |
+
"{\"1\": {\"mcq\": \"Which NASA mission will study the recent history of star birth, star death, and the formation of chemical elements in the Milky Way?\", \"options\": {\"a\": \"Euclid mission\", \"b\": \"XRISM mission\", \"c\": \"COSI mission\", \"d\": \"GUSTO mission\"}, \"correct_answer\": \"c\"}, \"2\": {\"mcq\": \"What was the Wide Field Infrared Survey Telescope (WFIRST) renamed to in spring of 2020?\", \"options\": {\"a\": \"Euclid Telescope\", \"b\": \"Roman Space Telescope\", \"c\": \"XRISM Telescope\", \"d\": \"COSI Telescope\"}, \"correct_answer\": \"b\"}, \"3\": {\"mcq\": \"Which mission was selected by NASA in October 2021 to study emissions from the interstellar medium?\", \"options\": {\"a\": \"Euclid mission\", \"b\": \"XRISM mission\", \"c\": \"COSI mission\", \"d\": \"GUSTO mission\"}, \"correct_answer\": \"c\"}, \"4\": {\"mcq\": \"What is the main goal of the Nancy Grace Roman Space Telescope?\", \"options\": {\"a\": \"Study dark energy and dark matter\", \"b\": \"Explore the evolution of the cosmos\", \"c\": \"Discover new worlds outside our solar system\", \"d\": \"All of the above\"}, \"correct_answer\": \"d\"}, \"5\": {\"mcq\": \"In what year was the Astrophysics Implementation Plan first released?\", \"options\": {\"a\": \"2012\", \"b\": \"2014\", \"c\": \"2016\", \"d\": \"2018\"}, \"correct_answer\": \"a\"}, \"6\": {\"mcq\": \"What does the Astrophysics roadmap Enduring Quests, Daring Visions present?\", \"options\": {\"a\": \"A 10-year vision for astrophysics\", \"b\": \"A 20-year vision for astrophysics\", \"c\": \"A 30-year vision for astrophysics\", \"d\": \"A 40-year vision for astrophysics\"}, \"correct_answer\": \"c\"}}\n",
|
| 292 |
+
"\n",
|
| 293 |
+
"\n",
|
| 294 |
+
"\u001b[0m\n",
|
| 295 |
+
"\n",
|
| 296 |
+
"\u001b[1m> Finished chain.\u001b[0m\n",
|
| 297 |
+
"\n",
|
| 298 |
+
"\u001b[1m> Finished chain.\u001b[0m\n"
|
| 299 |
+
]
|
| 300 |
+
}
|
| 301 |
+
],
|
| 302 |
+
"source": [
|
| 303 |
+
"# Track token usage using get_openai_callback\n",
|
| 304 |
+
"with get_openai_callback() as cb:\n",
|
| 305 |
+
" response=generate_evaluate_chain(\n",
|
| 306 |
+
" { \n",
|
| 307 |
+
" \"text\":text,\n",
|
| 308 |
+
" \"number\":number,\n",
|
| 309 |
+
" \"subject\":subject,\n",
|
| 310 |
+
" \"tone\":tone,\n",
|
| 311 |
+
" \"response_json\":json.dumps(response_json)\n",
|
| 312 |
+
" }\n",
|
| 313 |
+
")"
|
| 314 |
+
]
|
| 315 |
+
},
|
| 316 |
+
{
|
| 317 |
+
"cell_type": "code",
|
| 318 |
+
"execution_count": 19,
|
| 319 |
+
"metadata": {},
|
| 320 |
+
"outputs": [
|
| 321 |
+
{
|
| 322 |
+
"name": "stdout",
|
| 323 |
+
"output_type": "stream",
|
| 324 |
+
"text": [
|
| 325 |
+
"Total Tokens: 1256\n",
|
| 326 |
+
"Prompt Tokens: 808\n",
|
| 327 |
+
"Completion Tokens: 448\n",
|
| 328 |
+
"Total Cost: 0.002108\n"
|
| 329 |
+
]
|
| 330 |
+
}
|
| 331 |
+
],
|
| 332 |
+
"source": [
|
| 333 |
+
"print(f\"Total Tokens: {cb.total_tokens}\")\n",
|
| 334 |
+
"print(f\"Prompt Tokens: {cb.prompt_tokens}\")\n",
|
| 335 |
+
"print(f\"Completion Tokens: {cb.completion_tokens}\")\n",
|
| 336 |
+
"print(f\"Total Cost: {cb.total_cost}\")"
|
| 337 |
+
]
|
| 338 |
+
},
|
| 339 |
+
{
|
| 340 |
+
"cell_type": "code",
|
| 341 |
+
"execution_count": 40,
|
| 342 |
+
"metadata": {},
|
| 343 |
+
"outputs": [
|
| 344 |
+
{
|
| 345 |
+
"data": {
|
| 346 |
+
"text/plain": [
|
| 347 |
+
"'{\"1\": {\"mcq\": \"What is the purpose of Physics?\", \"options\": {\"a\": \"To find the best restaurants in town\", \"b\": \"To study the interaction of matter and energy\", \"c\": \"To explore the history of ancient civilizations\", \"d\": \"To learn how to play musical instruments\"}, \"correct_answer\": \"b\"},\\n\\n\"2\": {\"mcq\": \"Which of the following is NOT a branch of Physics mentioned in the text?\", \"options\": {\"a\": \"Chemical Physics\", \"b\": \"Classical Physics\", \"c\": \"Modern Physics\", \"d\": \"Astrophysics\"}, \"correct_answer\": \"a\"},\\n\\n\"3\": {\"mcq\": \"What is the focus of Classical Physics?\", \"options\": {\"a\": \"Studying quantum mechanics\", \"b\": \"Exploring relativity theory\", \"c\": \"Understanding macroscopic phenomena\", \"d\": \"Investigating subatomic particles\"}, \"correct_answer\": \"c\"},\\n\\n\"4\": {\"mcq\": \"Which branch of Physics deals with the Nanoworld to the planets?\", \"options\": {\"a\": \"Astrophysics\", \"b\": \"Quantum Physics\", \"c\": \"Classical Physics\", \"d\": \"Modern Physics\"}, \"correct_answer\": \"d\"},\\n\\n\"5\": {\"mcq\": \"What is the foundation of Physics?\", \"options\": {\"a\": \"Observations and experiments\", \"b\": \"Guessing and assuming\", \"c\": \"Intuition and opinion\", \"d\": \"Random selection\"}, \"correct_answer\": \"a\"},\\n\\n\"6\": {\"mcq\": \"What is the role of mathematical analysis in Physics?\", \"options\": {\"a\": \"To confuse scientists\", \"b\": \"To provide a clearer understanding of physical laws\", \"c\": \"To create chaos in experiments\", \"d\": \"To complicate measurements\"}, \"correct_answer\": \"b\"}}'"
|
| 348 |
+
]
|
| 349 |
+
},
|
| 350 |
+
"execution_count": 40,
|
| 351 |
+
"metadata": {},
|
| 352 |
+
"output_type": "execute_result"
|
| 353 |
+
}
|
| 354 |
+
],
|
| 355 |
+
"source": [
|
| 356 |
+
"quiz=response.get(\"quiz\", None).strip()\n",
|
| 357 |
+
"quiz"
|
| 358 |
+
]
|
| 359 |
+
},
|
| 360 |
+
{
|
| 361 |
+
"cell_type": "code",
|
| 362 |
+
"execution_count": 17,
|
| 363 |
+
"metadata": {},
|
| 364 |
+
"outputs": [
|
| 365 |
+
{
|
| 366 |
+
"data": {
|
| 367 |
+
"text/plain": [
|
| 368 |
+
"'The quiz is complex for Machine Learning students as it focuses on specific details of NASA missions and astrophysics. To make it more suitable, simplify questions to focus on general concepts like the goals of space missions and key milestones in astrophysics.'"
|
| 369 |
+
]
|
| 370 |
+
},
|
| 371 |
+
"execution_count": 17,
|
| 372 |
+
"metadata": {},
|
| 373 |
+
"output_type": "execute_result"
|
| 374 |
+
}
|
| 375 |
+
],
|
| 376 |
+
"source": [
|
| 377 |
+
"response.get(\"review\", None).strip()"
|
| 378 |
+
]
|
| 379 |
+
},
|
| 380 |
+
{
|
| 381 |
+
"cell_type": "code",
|
| 382 |
+
"execution_count": 21,
|
| 383 |
+
"metadata": {},
|
| 384 |
+
"outputs": [
|
| 385 |
+
{
|
| 386 |
+
"data": {
|
| 387 |
+
"text/plain": [
|
| 388 |
+
"{'1': {'mcq': 'What is the purpose of Physics?',\n",
|
| 389 |
+
" 'options': {'a': 'To find the best restaurants in town',\n",
|
| 390 |
+
" 'b': 'To study the interaction of matter and energy',\n",
|
| 391 |
+
" 'c': 'To explore the history of ancient civilizations',\n",
|
| 392 |
+
" 'd': 'To learn how to play musical instruments'},\n",
|
| 393 |
+
" 'correct_answer': 'b'},\n",
|
| 394 |
+
" '2': {'mcq': 'Which of the following is NOT a branch of Physics mentioned in the text?',\n",
|
| 395 |
+
" 'options': {'a': 'Chemical Physics',\n",
|
| 396 |
+
" 'b': 'Classical Physics',\n",
|
| 397 |
+
" 'c': 'Modern Physics',\n",
|
| 398 |
+
" 'd': 'Astrophysics'},\n",
|
| 399 |
+
" 'correct_answer': 'a'},\n",
|
| 400 |
+
" '3': {'mcq': 'What is the focus of Classical Physics?',\n",
|
| 401 |
+
" 'options': {'a': 'Studying quantum mechanics',\n",
|
| 402 |
+
" 'b': 'Exploring relativity theory',\n",
|
| 403 |
+
" 'c': 'Understanding macroscopic phenomena',\n",
|
| 404 |
+
" 'd': 'Investigating subatomic particles'},\n",
|
| 405 |
+
" 'correct_answer': 'c'},\n",
|
| 406 |
+
" '4': {'mcq': 'Which branch of Physics deals with the Nanoworld to the planets?',\n",
|
| 407 |
+
" 'options': {'a': 'Astrophysics',\n",
|
| 408 |
+
" 'b': 'Quantum Physics',\n",
|
| 409 |
+
" 'c': 'Classical Physics',\n",
|
| 410 |
+
" 'd': 'Modern Physics'},\n",
|
| 411 |
+
" 'correct_answer': 'd'},\n",
|
| 412 |
+
" '5': {'mcq': 'What is the foundation of Physics?',\n",
|
| 413 |
+
" 'options': {'a': 'Observations and experiments',\n",
|
| 414 |
+
" 'b': 'Guessing and assuming',\n",
|
| 415 |
+
" 'c': 'Intuition and opinion',\n",
|
| 416 |
+
" 'd': 'Random selection'},\n",
|
| 417 |
+
" 'correct_answer': 'a'},\n",
|
| 418 |
+
" '6': {'mcq': 'What is the role of mathematical analysis in Physics?',\n",
|
| 419 |
+
" 'options': {'a': 'To confuse scientists',\n",
|
| 420 |
+
" 'b': 'To provide a clearer understanding of physical laws',\n",
|
| 421 |
+
" 'c': 'To create chaos in experiments',\n",
|
| 422 |
+
" 'd': 'To complicate measurements'},\n",
|
| 423 |
+
" 'correct_answer': 'b'}}"
|
| 424 |
+
]
|
| 425 |
+
},
|
| 426 |
+
"execution_count": 21,
|
| 427 |
+
"metadata": {},
|
| 428 |
+
"output_type": "execute_result"
|
| 429 |
+
}
|
| 430 |
+
],
|
| 431 |
+
"source": [
|
| 432 |
+
"json.loads(quiz)"
|
| 433 |
+
]
|
| 434 |
+
},
|
| 435 |
+
{
|
| 436 |
+
"cell_type": "code",
|
| 437 |
+
"execution_count": 25,
|
| 438 |
+
"metadata": {},
|
| 439 |
+
"outputs": [],
|
| 440 |
+
"source": []
|
| 441 |
+
},
|
| 442 |
+
{
|
| 443 |
+
"cell_type": "code",
|
| 444 |
+
"execution_count": 22,
|
| 445 |
+
"metadata": {},
|
| 446 |
+
"outputs": [],
|
| 447 |
+
"source": [
|
| 448 |
+
"quiz_table_data=[]\n",
|
| 449 |
+
"for key,value in json.loads(quiz).items():\n",
|
| 450 |
+
" mcq=value['mcq']\n",
|
| 451 |
+
" options=\"|\".join(\n",
|
| 452 |
+
" [\n",
|
| 453 |
+
" f\"{option}:{option_value}\"\n",
|
| 454 |
+
" for option, option_value in value[\"options\"].items()\n",
|
| 455 |
+
" ]\n",
|
| 456 |
+
"\n",
|
| 457 |
+
" )\n",
|
| 458 |
+
"\n",
|
| 459 |
+
" correct=value['correct_answer']\n",
|
| 460 |
+
" quiz_table_data.append({\"MCQ\":mcq, \"Choices\":options, \"Correct\":correct})"
|
| 461 |
+
]
|
| 462 |
+
},
|
| 463 |
+
{
|
| 464 |
+
"cell_type": "code",
|
| 465 |
+
"execution_count": 23,
|
| 466 |
+
"metadata": {},
|
| 467 |
+
"outputs": [
|
| 468 |
+
{
|
| 469 |
+
"data": {
|
| 470 |
+
"text/plain": [
|
| 471 |
+
"[{'MCQ': 'What is the purpose of Physics?',\n",
|
| 472 |
+
" 'Choices': 'a:To find the best restaurants in town|b:To study the interaction of matter and energy|c:To explore the history of ancient civilizations|d:To learn how to play musical instruments',\n",
|
| 473 |
+
" 'Correct': 'b'},\n",
|
| 474 |
+
" {'MCQ': 'Which of the following is NOT a branch of Physics mentioned in the text?',\n",
|
| 475 |
+
" 'Choices': 'a:Chemical Physics|b:Classical Physics|c:Modern Physics|d:Astrophysics',\n",
|
| 476 |
+
" 'Correct': 'a'},\n",
|
| 477 |
+
" {'MCQ': 'What is the focus of Classical Physics?',\n",
|
| 478 |
+
" 'Choices': 'a:Studying quantum mechanics|b:Exploring relativity theory|c:Understanding macroscopic phenomena|d:Investigating subatomic particles',\n",
|
| 479 |
+
" 'Correct': 'c'},\n",
|
| 480 |
+
" {'MCQ': 'Which branch of Physics deals with the Nanoworld to the planets?',\n",
|
| 481 |
+
" 'Choices': 'a:Astrophysics|b:Quantum Physics|c:Classical Physics|d:Modern Physics',\n",
|
| 482 |
+
" 'Correct': 'd'},\n",
|
| 483 |
+
" {'MCQ': 'What is the foundation of Physics?',\n",
|
| 484 |
+
" 'Choices': 'a:Observations and experiments|b:Guessing and assuming|c:Intuition and opinion|d:Random selection',\n",
|
| 485 |
+
" 'Correct': 'a'},\n",
|
| 486 |
+
" {'MCQ': 'What is the role of mathematical analysis in Physics?',\n",
|
| 487 |
+
" 'Choices': 'a:To confuse scientists|b:To provide a clearer understanding of physical laws|c:To create chaos in experiments|d:To complicate measurements',\n",
|
| 488 |
+
" 'Correct': 'b'}]"
|
| 489 |
+
]
|
| 490 |
+
},
|
| 491 |
+
"execution_count": 23,
|
| 492 |
+
"metadata": {},
|
| 493 |
+
"output_type": "execute_result"
|
| 494 |
+
}
|
| 495 |
+
],
|
| 496 |
+
"source": [
|
| 497 |
+
"quiz_table_data"
|
| 498 |
+
]
|
| 499 |
+
},
|
| 500 |
+
{
|
| 501 |
+
"cell_type": "code",
|
| 502 |
+
"execution_count": 24,
|
| 503 |
+
"metadata": {},
|
| 504 |
+
"outputs": [
|
| 505 |
+
{
|
| 506 |
+
"data": {
|
| 507 |
+
"text/html": [
|
| 508 |
+
"<div>\n",
|
| 509 |
+
"<style scoped>\n",
|
| 510 |
+
" .dataframe tbody tr th:only-of-type {\n",
|
| 511 |
+
" vertical-align: middle;\n",
|
| 512 |
+
" }\n",
|
| 513 |
+
"\n",
|
| 514 |
+
" .dataframe tbody tr th {\n",
|
| 515 |
+
" vertical-align: top;\n",
|
| 516 |
+
" }\n",
|
| 517 |
+
"\n",
|
| 518 |
+
" .dataframe thead th {\n",
|
| 519 |
+
" text-align: right;\n",
|
| 520 |
+
" }\n",
|
| 521 |
+
"</style>\n",
|
| 522 |
+
"<table border=\"1\" class=\"dataframe\">\n",
|
| 523 |
+
" <thead>\n",
|
| 524 |
+
" <tr style=\"text-align: right;\">\n",
|
| 525 |
+
" <th></th>\n",
|
| 526 |
+
" <th>MCQ</th>\n",
|
| 527 |
+
" <th>Choices</th>\n",
|
| 528 |
+
" <th>Correct</th>\n",
|
| 529 |
+
" </tr>\n",
|
| 530 |
+
" </thead>\n",
|
| 531 |
+
" <tbody>\n",
|
| 532 |
+
" <tr>\n",
|
| 533 |
+
" <th>0</th>\n",
|
| 534 |
+
" <td>What is the purpose of Physics?</td>\n",
|
| 535 |
+
" <td>a:To find the best restaurants in town|b:To st...</td>\n",
|
| 536 |
+
" <td>b</td>\n",
|
| 537 |
+
" </tr>\n",
|
| 538 |
+
" <tr>\n",
|
| 539 |
+
" <th>1</th>\n",
|
| 540 |
+
" <td>Which of the following is NOT a branch of Phys...</td>\n",
|
| 541 |
+
" <td>a:Chemical Physics|b:Classical Physics|c:Moder...</td>\n",
|
| 542 |
+
" <td>a</td>\n",
|
| 543 |
+
" </tr>\n",
|
| 544 |
+
" <tr>\n",
|
| 545 |
+
" <th>2</th>\n",
|
| 546 |
+
" <td>What is the focus of Classical Physics?</td>\n",
|
| 547 |
+
" <td>a:Studying quantum mechanics|b:Exploring relat...</td>\n",
|
| 548 |
+
" <td>c</td>\n",
|
| 549 |
+
" </tr>\n",
|
| 550 |
+
" <tr>\n",
|
| 551 |
+
" <th>3</th>\n",
|
| 552 |
+
" <td>Which branch of Physics deals with the Nanowor...</td>\n",
|
| 553 |
+
" <td>a:Astrophysics|b:Quantum Physics|c:Classical P...</td>\n",
|
| 554 |
+
" <td>d</td>\n",
|
| 555 |
+
" </tr>\n",
|
| 556 |
+
" <tr>\n",
|
| 557 |
+
" <th>4</th>\n",
|
| 558 |
+
" <td>What is the foundation of Physics?</td>\n",
|
| 559 |
+
" <td>a:Observations and experiments|b:Guessing and ...</td>\n",
|
| 560 |
+
" <td>a</td>\n",
|
| 561 |
+
" </tr>\n",
|
| 562 |
+
" <tr>\n",
|
| 563 |
+
" <th>5</th>\n",
|
| 564 |
+
" <td>What is the role of mathematical analysis in P...</td>\n",
|
| 565 |
+
" <td>a:To confuse scientists|b:To provide a clearer...</td>\n",
|
| 566 |
+
" <td>b</td>\n",
|
| 567 |
+
" </tr>\n",
|
| 568 |
+
" </tbody>\n",
|
| 569 |
+
"</table>\n",
|
| 570 |
+
"</div>"
|
| 571 |
+
],
|
| 572 |
+
"text/plain": [
|
| 573 |
+
" MCQ \\\n",
|
| 574 |
+
"0 What is the purpose of Physics? \n",
|
| 575 |
+
"1 Which of the following is NOT a branch of Phys... \n",
|
| 576 |
+
"2 What is the focus of Classical Physics? \n",
|
| 577 |
+
"3 Which branch of Physics deals with the Nanowor... \n",
|
| 578 |
+
"4 What is the foundation of Physics? \n",
|
| 579 |
+
"5 What is the role of mathematical analysis in P... \n",
|
| 580 |
+
"\n",
|
| 581 |
+
" Choices Correct \n",
|
| 582 |
+
"0 a:To find the best restaurants in town|b:To st... b \n",
|
| 583 |
+
"1 a:Chemical Physics|b:Classical Physics|c:Moder... a \n",
|
| 584 |
+
"2 a:Studying quantum mechanics|b:Exploring relat... c \n",
|
| 585 |
+
"3 a:Astrophysics|b:Quantum Physics|c:Classical P... d \n",
|
| 586 |
+
"4 a:Observations and experiments|b:Guessing and ... a \n",
|
| 587 |
+
"5 a:To confuse scientists|b:To provide a clearer... b "
|
| 588 |
+
]
|
| 589 |
+
},
|
| 590 |
+
"execution_count": 24,
|
| 591 |
+
"metadata": {},
|
| 592 |
+
"output_type": "execute_result"
|
| 593 |
+
}
|
| 594 |
+
],
|
| 595 |
+
"source": [
|
| 596 |
+
"import pandas as pd\n",
|
| 597 |
+
"pd.DataFrame(quiz_table_data)"
|
| 598 |
+
]
|
| 599 |
+
},
|
| 600 |
+
{
|
| 601 |
+
"cell_type": "code",
|
| 602 |
+
"execution_count": 61,
|
| 603 |
+
"metadata": {},
|
| 604 |
+
"outputs": [],
|
| 605 |
+
"source": [
|
| 606 |
+
"def get_table_data(quiz_str):\n",
|
| 607 |
+
" try:\n",
|
| 608 |
+
" \n",
|
| 609 |
+
" quiz_table_data=[]\n",
|
| 610 |
+
"\n",
|
| 611 |
+
" for key, value in quiz_str.items():\n",
|
| 612 |
+
" mcq=value['mcq']\n",
|
| 613 |
+
" options=\"||\".join(\n",
|
| 614 |
+
" [\n",
|
| 615 |
+
" f\"{option}:{option_value}\"\n",
|
| 616 |
+
" for option, option_value in value[\"options\"].items()\n",
|
| 617 |
+
" ]\n",
|
| 618 |
+
"\n",
|
| 619 |
+
" )\n",
|
| 620 |
+
"\n",
|
| 621 |
+
" correct=value['correct_answer']\n",
|
| 622 |
+
" quiz_table_data.append({\"MCQ\":mcq, \"Choices\":options, \"Correct\":correct})\n",
|
| 623 |
+
" except Exception as e:\n",
|
| 624 |
+
" traceback.print_exception(type(e),e,e.__traceback__)\n",
|
| 625 |
+
" return False"
|
| 626 |
+
]
|
| 627 |
+
},
|
| 628 |
+
{
|
| 629 |
+
"cell_type": "code",
|
| 630 |
+
"execution_count": 66,
|
| 631 |
+
"metadata": {},
|
| 632 |
+
"outputs": [],
|
| 633 |
+
"source": [
|
| 634 |
+
"data_table=get_table_data(quiz)"
|
| 635 |
+
]
|
| 636 |
+
},
|
| 637 |
+
{
|
| 638 |
+
"cell_type": "code",
|
| 639 |
+
"execution_count": 68,
|
| 640 |
+
"metadata": {},
|
| 641 |
+
"outputs": [],
|
| 642 |
+
"source": [
|
| 643 |
+
"data_table"
|
| 644 |
+
]
|
| 645 |
+
},
|
| 646 |
+
{
|
| 647 |
+
"cell_type": "code",
|
| 648 |
+
"execution_count": 67,
|
| 649 |
+
"metadata": {},
|
| 650 |
+
"outputs": [
|
| 651 |
+
{
|
| 652 |
+
"data": {
|
| 653 |
+
"text/html": [
|
| 654 |
+
"<div>\n",
|
| 655 |
+
"<style scoped>\n",
|
| 656 |
+
" .dataframe tbody tr th:only-of-type {\n",
|
| 657 |
+
" vertical-align: middle;\n",
|
| 658 |
+
" }\n",
|
| 659 |
+
"\n",
|
| 660 |
+
" .dataframe tbody tr th {\n",
|
| 661 |
+
" vertical-align: top;\n",
|
| 662 |
+
" }\n",
|
| 663 |
+
"\n",
|
| 664 |
+
" .dataframe thead th {\n",
|
| 665 |
+
" text-align: right;\n",
|
| 666 |
+
" }\n",
|
| 667 |
+
"</style>\n",
|
| 668 |
+
"<table border=\"1\" class=\"dataframe\">\n",
|
| 669 |
+
" <thead>\n",
|
| 670 |
+
" <tr style=\"text-align: right;\">\n",
|
| 671 |
+
" <th></th>\n",
|
| 672 |
+
" </tr>\n",
|
| 673 |
+
" </thead>\n",
|
| 674 |
+
" <tbody>\n",
|
| 675 |
+
" </tbody>\n",
|
| 676 |
+
"</table>\n",
|
| 677 |
+
"</div>"
|
| 678 |
+
],
|
| 679 |
+
"text/plain": [
|
| 680 |
+
"Empty DataFrame\n",
|
| 681 |
+
"Columns: []\n",
|
| 682 |
+
"Index: []"
|
| 683 |
+
]
|
| 684 |
+
},
|
| 685 |
+
"execution_count": 67,
|
| 686 |
+
"metadata": {},
|
| 687 |
+
"output_type": "execute_result"
|
| 688 |
+
}
|
| 689 |
+
],
|
| 690 |
+
"source": [
|
| 691 |
+
"pd.DataFrame(data_table)"
|
| 692 |
+
]
|
| 693 |
+
},
|
| 694 |
+
{
|
| 695 |
+
"cell_type": "code",
|
| 696 |
+
"execution_count": 64,
|
| 697 |
+
"metadata": {},
|
| 698 |
+
"outputs": [],
|
| 699 |
+
"source": [
|
| 700 |
+
"data_table"
|
| 701 |
+
]
|
| 702 |
+
},
|
| 703 |
+
{
|
| 704 |
+
"cell_type": "code",
|
| 705 |
+
"execution_count": 72,
|
| 706 |
+
"metadata": {},
|
| 707 |
+
"outputs": [],
|
| 708 |
+
"source": [
|
| 709 |
+
"import traceback\n",
|
| 710 |
+
"def get_table_data(quiz_str):\n",
|
| 711 |
+
" try:\n",
|
| 712 |
+
" #quiz_dict=json.loads(quiz_str)\n",
|
| 713 |
+
" quiz_table_data=[]\n",
|
| 714 |
+
"\n",
|
| 715 |
+
" for key, value in quiz_str.items():\n",
|
| 716 |
+
" mcq=value['mcq']\n",
|
| 717 |
+
" options=\"||\".join(\n",
|
| 718 |
+
" [\n",
|
| 719 |
+
" f\"{option}:{option_value}\"\n",
|
| 720 |
+
" for option, option_value in value[\"options\"].items()\n",
|
| 721 |
+
" ]\n",
|
| 722 |
+
"\n",
|
| 723 |
+
" )\n",
|
| 724 |
+
" correct=value['correct_answer']\n",
|
| 725 |
+
" quiz_table_data.append({\"MCQ\":mcq, \"Choices\":options, \"Correct\":correct})\n",
|
| 726 |
+
" return quiz_table_data\n",
|
| 727 |
+
" except Exception as e:\n",
|
| 728 |
+
" traceback.print_exception(type(e),e,e.__traceback__)\n",
|
| 729 |
+
" return False"
|
| 730 |
+
]
|
| 731 |
+
},
|
| 732 |
+
{
|
| 733 |
+
"cell_type": "code",
|
| 734 |
+
"execution_count": 76,
|
| 735 |
+
"metadata": {},
|
| 736 |
+
"outputs": [],
|
| 737 |
+
"source": [
|
| 738 |
+
"quiz={\"1\": {\"mcq\": \"What is the purpose of Physics?\", \"options\": {\"a\": \"To find the quantitative physical laws\", \"b\": \"To study history\", \"c\": \"To explore the oceans\", \"d\": \"To learn about astronomy\"}, \"correct_answer\": \"a\"},\n",
|
| 739 |
+
"\n",
|
| 740 |
+
"\"2\": {\"mcq\": \"How many major branches of Physics are there?\", \"options\": {\"a\": \"One\", \"b\": \"Two\", \"c\": \"Three\", \"d\": \"Four\"}, \"correct_answer\": \"b\"},\n",
|
| 741 |
+
"\n",
|
| 742 |
+
"\"3\": {\"mcq\": \"Which branch of Physics deals with the interaction of matter and energy?\", \"options\": {\"a\": \"Biology\", \"b\": \"Chemistry\", \"c\": \"Physics\", \"d\": \"Modern Physics\"}, \"correct_answer\": \"c\"}}"
|
| 743 |
+
]
|
| 744 |
+
},
|
| 745 |
+
{
|
| 746 |
+
"cell_type": "code",
|
| 747 |
+
"execution_count": 75,
|
| 748 |
+
"metadata": {},
|
| 749 |
+
"outputs": [],
|
| 750 |
+
"source": [
|
| 751 |
+
"def get_table_data(quiz_str):\n",
|
| 752 |
+
" try:\n",
|
| 753 |
+
" \n",
|
| 754 |
+
" quiz_table_data=[]\n",
|
| 755 |
+
"\n",
|
| 756 |
+
" for key, value in quiz_str.items():\n",
|
| 757 |
+
" mcq=value['mcq']\n",
|
| 758 |
+
" options=\"||\".join(\n",
|
| 759 |
+
" [\n",
|
| 760 |
+
" f\"{option}:{option_value}\"\n",
|
| 761 |
+
" for option, option_value in value[\"options\"].items()\n",
|
| 762 |
+
" ]\n",
|
| 763 |
+
"\n",
|
| 764 |
+
" )\n",
|
| 765 |
+
" correct=value['correct_answer']\n",
|
| 766 |
+
" quiz_table_data.append({\"MCQ\":mcq, \"Choices\":options, \"Correct\":correct})\n",
|
| 767 |
+
" return quiz_table_data\n",
|
| 768 |
+
" except Exception as e:\n",
|
| 769 |
+
" traceback.print_exception(type(e),e,e.__traceback__)\n",
|
| 770 |
+
" return False"
|
| 771 |
+
]
|
| 772 |
+
},
|
| 773 |
+
{
|
| 774 |
+
"cell_type": "code",
|
| 775 |
+
"execution_count": 78,
|
| 776 |
+
"metadata": {},
|
| 777 |
+
"outputs": [
|
| 778 |
+
{
|
| 779 |
+
"data": {
|
| 780 |
+
"text/html": [
|
| 781 |
+
"<div>\n",
|
| 782 |
+
"<style scoped>\n",
|
| 783 |
+
" .dataframe tbody tr th:only-of-type {\n",
|
| 784 |
+
" vertical-align: middle;\n",
|
| 785 |
+
" }\n",
|
| 786 |
+
"\n",
|
| 787 |
+
" .dataframe tbody tr th {\n",
|
| 788 |
+
" vertical-align: top;\n",
|
| 789 |
+
" }\n",
|
| 790 |
+
"\n",
|
| 791 |
+
" .dataframe thead th {\n",
|
| 792 |
+
" text-align: right;\n",
|
| 793 |
+
" }\n",
|
| 794 |
+
"</style>\n",
|
| 795 |
+
"<table border=\"1\" class=\"dataframe\">\n",
|
| 796 |
+
" <thead>\n",
|
| 797 |
+
" <tr style=\"text-align: right;\">\n",
|
| 798 |
+
" <th></th>\n",
|
| 799 |
+
" <th>MCQ</th>\n",
|
| 800 |
+
" <th>Choices</th>\n",
|
| 801 |
+
" <th>Correct</th>\n",
|
| 802 |
+
" </tr>\n",
|
| 803 |
+
" </thead>\n",
|
| 804 |
+
" <tbody>\n",
|
| 805 |
+
" <tr>\n",
|
| 806 |
+
" <th>0</th>\n",
|
| 807 |
+
" <td>What is the purpose of Physics?</td>\n",
|
| 808 |
+
" <td>a:To find the quantitative physical laws||b:To...</td>\n",
|
| 809 |
+
" <td>a</td>\n",
|
| 810 |
+
" </tr>\n",
|
| 811 |
+
" <tr>\n",
|
| 812 |
+
" <th>1</th>\n",
|
| 813 |
+
" <td>How many major branches of Physics are there?</td>\n",
|
| 814 |
+
" <td>a:One||b:Two||c:Three||d:Four</td>\n",
|
| 815 |
+
" <td>b</td>\n",
|
| 816 |
+
" </tr>\n",
|
| 817 |
+
" <tr>\n",
|
| 818 |
+
" <th>2</th>\n",
|
| 819 |
+
" <td>Which branch of Physics deals with the interac...</td>\n",
|
| 820 |
+
" <td>a:Biology||b:Chemistry||c:Physics||d:Modern Ph...</td>\n",
|
| 821 |
+
" <td>c</td>\n",
|
| 822 |
+
" </tr>\n",
|
| 823 |
+
" </tbody>\n",
|
| 824 |
+
"</table>\n",
|
| 825 |
+
"</div>"
|
| 826 |
+
],
|
| 827 |
+
"text/plain": [
|
| 828 |
+
" MCQ \\\n",
|
| 829 |
+
"0 What is the purpose of Physics? \n",
|
| 830 |
+
"1 How many major branches of Physics are there? \n",
|
| 831 |
+
"2 Which branch of Physics deals with the interac... \n",
|
| 832 |
+
"\n",
|
| 833 |
+
" Choices Correct \n",
|
| 834 |
+
"0 a:To find the quantitative physical laws||b:To... a \n",
|
| 835 |
+
"1 a:One||b:Two||c:Three||d:Four b \n",
|
| 836 |
+
"2 a:Biology||b:Chemistry||c:Physics||d:Modern Ph... c "
|
| 837 |
+
]
|
| 838 |
+
},
|
| 839 |
+
"execution_count": 78,
|
| 840 |
+
"metadata": {},
|
| 841 |
+
"output_type": "execute_result"
|
| 842 |
+
}
|
| 843 |
+
],
|
| 844 |
+
"source": [
|
| 845 |
+
"pd.DataFrame(get_table_data(quiz))"
|
| 846 |
+
]
|
| 847 |
+
},
|
| 848 |
+
{
|
| 849 |
+
"cell_type": "code",
|
| 850 |
+
"execution_count": 59,
|
| 851 |
+
"metadata": {},
|
| 852 |
+
"outputs": [],
|
| 853 |
+
"source": [
|
| 854 |
+
"for key, value in quiz.items():\n",
|
| 855 |
+
" mcq=value['mcq']\n",
|
| 856 |
+
" options=\"||\".join(\n",
|
| 857 |
+
" [\n",
|
| 858 |
+
" f\"{option}:{option_value}\"\n",
|
| 859 |
+
" for option, option_value in value[\"options\"].items()\n",
|
| 860 |
+
" ]\n",
|
| 861 |
+
"\n",
|
| 862 |
+
" )\n",
|
| 863 |
+
" correct=value['correct_answer']\n",
|
| 864 |
+
" quiz_table_data.append({\"MCQ\":mcq, \"Choices\":options, \"Correct\":correct})"
|
| 865 |
+
]
|
| 866 |
+
},
|
| 867 |
+
{
|
| 868 |
+
"cell_type": "code",
|
| 869 |
+
"execution_count": 60,
|
| 870 |
+
"metadata": {},
|
| 871 |
+
"outputs": [
|
| 872 |
+
{
|
| 873 |
+
"data": {
|
| 874 |
+
"text/plain": [
|
| 875 |
+
"[{'MCQ': 'What is the purpose of Physics?',\n",
|
| 876 |
+
" 'Choices': 'a:To find the best restaurants in town||b:To study the interaction of matter and energy||c:To explore the history of ancient civilizations||d:To learn how to play musical instruments',\n",
|
| 877 |
+
" 'Correct': 'b'},\n",
|
| 878 |
+
" {'MCQ': 'Which of the following is NOT a branch of Physics mentioned in the text?',\n",
|
| 879 |
+
" 'Choices': 'a:Chemical Physics||b:Classical Physics||c:Modern Physics||d:Astrophysics',\n",
|
| 880 |
+
" 'Correct': 'a'},\n",
|
| 881 |
+
" {'MCQ': 'What is the focus of Classical Physics?',\n",
|
| 882 |
+
" 'Choices': 'a:Studying quantum mechanics||b:Exploring relativity theory||c:Understanding macroscopic phenomena||d:Investigating subatomic particles',\n",
|
| 883 |
+
" 'Correct': 'c'},\n",
|
| 884 |
+
" {'MCQ': 'Which branch of Physics deals with the Nanoworld to the planets?',\n",
|
| 885 |
+
" 'Choices': 'a:Astrophysics||b:Quantum Physics||c:Classical Physics||d:Modern Physics',\n",
|
| 886 |
+
" 'Correct': 'd'},\n",
|
| 887 |
+
" {'MCQ': 'What is the foundation of Physics?',\n",
|
| 888 |
+
" 'Choices': 'a:Observations and experiments||b:Guessing and assuming||c:Intuition and opinion||d:Random selection',\n",
|
| 889 |
+
" 'Correct': 'a'},\n",
|
| 890 |
+
" {'MCQ': 'What is the role of mathematical analysis in Physics?',\n",
|
| 891 |
+
" 'Choices': 'a:To confuse scientists||b:To provide a clearer understanding of physical laws||c:To create chaos in experiments||d:To complicate measurements',\n",
|
| 892 |
+
" 'Correct': 'b'},\n",
|
| 893 |
+
" {'MCQ': 'What is the purpose of Physics?',\n",
|
| 894 |
+
" 'Choices': 'a:To find the quantitative physical laws||b:To study history||c:To explore the oceans||d:To learn about astronomy',\n",
|
| 895 |
+
" 'Correct': 'a'},\n",
|
| 896 |
+
" {'MCQ': 'How many major branches of Physics are there?',\n",
|
| 897 |
+
" 'Choices': 'a:One||b:Two||c:Three||d:Four',\n",
|
| 898 |
+
" 'Correct': 'b'},\n",
|
| 899 |
+
" {'MCQ': 'Which branch of Physics deals with the interaction of matter and energy?',\n",
|
| 900 |
+
" 'Choices': 'a:Biology||b:Chemistry||c:Physics||d:Modern Physics',\n",
|
| 901 |
+
" 'Correct': 'c'}]"
|
| 902 |
+
]
|
| 903 |
+
},
|
| 904 |
+
"execution_count": 60,
|
| 905 |
+
"metadata": {},
|
| 906 |
+
"output_type": "execute_result"
|
| 907 |
+
}
|
| 908 |
+
],
|
| 909 |
+
"source": [
|
| 910 |
+
"quiz_table_data"
|
| 911 |
+
]
|
| 912 |
+
},
|
| 913 |
+
{
|
| 914 |
+
"cell_type": "code",
|
| 915 |
+
"execution_count": 2,
|
| 916 |
+
"metadata": {},
|
| 917 |
+
"outputs": [],
|
| 918 |
+
"source": [
|
| 919 |
+
"q={\"1\": {\"mcq\": \"What is the purpose of Physics?\", \"options\": {\"a\": \"To find the best recipes\", \"b\": \"To find the quantitative physical laws\", \"c\": \"To predict the weather\", \"d\": \"To study historical events\"}, \"correct_answer\": \"b\"},\n",
|
| 920 |
+
"\n",
|
| 921 |
+
"\"2\": {\"mcq\": \"What are the two major branches of Physics mentioned in the text?\", \"options\": {\"a\": \"Chemistry and Biology\", \"b\": \"Classical Physics and Quantum Physics\", \"c\": \"Physics and Mathematics\", \"d\": \"Modern Physics and Ancient Physics\"}, \"correct_answer\": \"b\"}, \n",
|
| 922 |
+
"\n",
|
| 923 |
+
"\"3\": {\"mcq\": \"What is Physics commonly known as?\", \"options\": {\"a\": \"The study of plants\", \"b\": \"The study of matter and energy interaction\", \"c\": \"The study of history\", \"d\": \"The study of language\"}, \"correct_answer\": \"b\"}}"
|
| 924 |
+
]
|
| 925 |
+
},
|
| 926 |
+
{
|
| 927 |
+
"cell_type": "code",
|
| 928 |
+
"execution_count": 3,
|
| 929 |
+
"metadata": {},
|
| 930 |
+
"outputs": [],
|
| 931 |
+
"source": [
|
| 932 |
+
"def get_table_data(quiz_str):\n",
|
| 933 |
+
" try:\n",
|
| 934 |
+
" \n",
|
| 935 |
+
" quiz_table_data=[]\n",
|
| 936 |
+
"\n",
|
| 937 |
+
" for key, value in quiz_str.items():\n",
|
| 938 |
+
" mcq=value['mcq']\n",
|
| 939 |
+
" options=\"||\".join(\n",
|
| 940 |
+
" [\n",
|
| 941 |
+
" f\"{option}:{option_value}\"\n",
|
| 942 |
+
" for option, option_value in value[\"options\"].items()\n",
|
| 943 |
+
" ]\n",
|
| 944 |
+
"\n",
|
| 945 |
+
" )\n",
|
| 946 |
+
" correct=value['correct_answer']\n",
|
| 947 |
+
" quiz_table_data.append({\"MCQ\":mcq, \"Choices\":options, \"Correct\":correct})\n",
|
| 948 |
+
" return quiz_table_data\n",
|
| 949 |
+
" except Exception as e:\n",
|
| 950 |
+
" traceback.print_exception(type(e),e,e.__traceback__)\n",
|
| 951 |
+
" return False"
|
| 952 |
+
]
|
| 953 |
+
},
|
| 954 |
+
{
|
| 955 |
+
"cell_type": "code",
|
| 956 |
+
"execution_count": 4,
|
| 957 |
+
"metadata": {},
|
| 958 |
+
"outputs": [
|
| 959 |
+
{
|
| 960 |
+
"data": {
|
| 961 |
+
"text/plain": [
|
| 962 |
+
"[{'MCQ': 'What is the purpose of Physics?',\n",
|
| 963 |
+
" 'Choices': 'a:To find the best recipes||b:To find the quantitative physical laws||c:To predict the weather||d:To study historical events',\n",
|
| 964 |
+
" 'Correct': 'b'},\n",
|
| 965 |
+
" {'MCQ': 'What are the two major branches of Physics mentioned in the text?',\n",
|
| 966 |
+
" 'Choices': 'a:Chemistry and Biology||b:Classical Physics and Quantum Physics||c:Physics and Mathematics||d:Modern Physics and Ancient Physics',\n",
|
| 967 |
+
" 'Correct': 'b'},\n",
|
| 968 |
+
" {'MCQ': 'What is Physics commonly known as?',\n",
|
| 969 |
+
" 'Choices': 'a:The study of plants||b:The study of matter and energy interaction||c:The study of history||d:The study of language',\n",
|
| 970 |
+
" 'Correct': 'b'}]"
|
| 971 |
+
]
|
| 972 |
+
},
|
| 973 |
+
"execution_count": 4,
|
| 974 |
+
"metadata": {},
|
| 975 |
+
"output_type": "execute_result"
|
| 976 |
+
}
|
| 977 |
+
],
|
| 978 |
+
"source": [
|
| 979 |
+
"get_table_data(q)"
|
| 980 |
+
]
|
| 981 |
+
},
|
| 982 |
+
{
|
| 983 |
+
"cell_type": "code",
|
| 984 |
+
"execution_count": null,
|
| 985 |
+
"metadata": {},
|
| 986 |
+
"outputs": [],
|
| 987 |
+
"source": []
|
| 988 |
+
}
|
| 989 |
+
],
|
| 990 |
+
"metadata": {
|
| 991 |
+
"kernelspec": {
|
| 992 |
+
"display_name": "myenv",
|
| 993 |
+
"language": "python",
|
| 994 |
+
"name": "python3"
|
| 995 |
+
},
|
| 996 |
+
"language_info": {
|
| 997 |
+
"codemirror_mode": {
|
| 998 |
+
"name": "ipython",
|
| 999 |
+
"version": 3
|
| 1000 |
+
},
|
| 1001 |
+
"file_extension": ".py",
|
| 1002 |
+
"mimetype": "text/x-python",
|
| 1003 |
+
"name": "python",
|
| 1004 |
+
"nbconvert_exporter": "python",
|
| 1005 |
+
"pygments_lexer": "ipython3",
|
| 1006 |
+
"version": "3.9.19"
|
| 1007 |
+
}
|
| 1008 |
+
},
|
| 1009 |
+
"nbformat": 4,
|
| 1010 |
+
"nbformat_minor": 2
|
| 1011 |
+
}
|
logs/04_22_2024_19_37_02.log
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[2024-04-22 19:38:01,574] 1026 httpx - INFO - HTTP Request: POST https://api.openai.com/v1/chat/completions "HTTP/1.1 200 OK"
|
| 2 |
+
[2024-04-22 19:38:03,612] 1026 httpx - INFO - HTTP Request: POST https://api.openai.com/v1/chat/completions "HTTP/1.1 200 OK"
|
| 3 |
+
[2024-04-22 19:42:55,152] 1026 httpx - INFO - HTTP Request: POST https://api.openai.com/v1/chat/completions "HTTP/1.1 200 OK"
|
| 4 |
+
[2024-04-22 19:42:56,786] 1026 httpx - INFO - HTTP Request: POST https://api.openai.com/v1/chat/completions "HTTP/1.1 200 OK"
|
logs/04_22_2024_19_57_57.log
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[2024-04-22 19:58:41,122] 1026 httpx - INFO - HTTP Request: POST https://api.openai.com/v1/chat/completions "HTTP/1.1 200 OK"
|
| 2 |
+
[2024-04-22 19:58:43,244] 1026 httpx - INFO - HTTP Request: POST https://api.openai.com/v1/chat/completions "HTTP/1.1 200 OK"
|
logs/04_22_2024_21_36_57.log
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[2024-04-22 21:37:44,262] 1026 httpx - INFO - HTTP Request: POST https://api.openai.com/v1/chat/completions "HTTP/1.1 200 OK"
|
| 2 |
+
[2024-04-22 21:37:47,665] 1026 httpx - INFO - HTTP Request: POST https://api.openai.com/v1/chat/completions "HTTP/1.1 200 OK"
|
logs/04_27_2024_17_51_55.log
ADDED
|
File without changes
|
logs/04_27_2024_18_33_22.log
ADDED
|
File without changes
|
requirements.txt
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
openai
|
| 2 |
+
langchain
|
| 3 |
+
langchain-openai
|
| 4 |
+
streamlit
|
| 5 |
+
python-dotenv
|
| 6 |
+
PyPDF2
|
response.json
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"1": {
|
| 3 |
+
"mcq": "multiple choice question",
|
| 4 |
+
"options": {
|
| 5 |
+
"a": "choice here",
|
| 6 |
+
"b": "choice here",
|
| 7 |
+
"c": "choice here",
|
| 8 |
+
"d": "choice here"
|
| 9 |
+
},
|
| 10 |
+
"correct_answer": "correct answer"
|
| 11 |
+
},
|
| 12 |
+
"2": {
|
| 13 |
+
"mcq": "multiple choice question",
|
| 14 |
+
"options": {
|
| 15 |
+
"a": "choice here",
|
| 16 |
+
"b": "choice here",
|
| 17 |
+
"c": "choice here",
|
| 18 |
+
"d": "choice here"
|
| 19 |
+
},
|
| 20 |
+
"correct_answer": "correct answer"
|
| 21 |
+
},
|
| 22 |
+
"3": {
|
| 23 |
+
"mcq": "multiple choice question",
|
| 24 |
+
"options": {
|
| 25 |
+
"a": "choice here",
|
| 26 |
+
"b": "choice here",
|
| 27 |
+
"c": "choice here",
|
| 28 |
+
"d": "choice here"
|
| 29 |
+
},
|
| 30 |
+
"correct_answer": "correct answer"
|
| 31 |
+
}
|
| 32 |
+
}
|
| 33 |
+
|
setup.py
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from setuptools import find_packages,setup
|
| 2 |
+
|
| 3 |
+
setup(
|
| 4 |
+
name="mcqgenerator",
|
| 5 |
+
version='0.0.0.1',
|
| 6 |
+
author='aviral mittal',
|
| 7 |
+
author_email='avimittal30@gmail.com',
|
| 8 |
+
install_requires=['openai','streamlit', 'langchain', 'python-dotenv', 'PyPDF2'],
|
| 9 |
+
packages=find_packages()
|
| 10 |
+
)
|
src/__init__.py
ADDED
|
File without changes
|
src/__pycache__/__init__.cpython-39.pyc
ADDED
|
Binary file (153 Bytes). View file
|
|
|
src/mcqgen/__init__.py
ADDED
|
File without changes
|
src/mcqgen/__pycache__/__init__.cpython-39.pyc
ADDED
|
Binary file (166 Bytes). View file
|
|
|
src/mcqgen/__pycache__/logger.cpython-39.pyc
ADDED
|
Binary file (557 Bytes). View file
|
|
|
src/mcqgen/__pycache__/mcqgenerator.cpython-39.pyc
ADDED
|
Binary file (2.1 kB). View file
|
|
|
src/mcqgen/__pycache__/utils.cpython-39.pyc
ADDED
|
Binary file (1.37 kB). View file
|
|
|
src/mcqgen/logger.py
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
import os
|
| 3 |
+
from datetime import datetime
|
| 4 |
+
|
| 5 |
+
LOG_FILE=f"{datetime.now().strftime('%m_%d_%Y_%H_%M_%S')}.log"
|
| 6 |
+
|
| 7 |
+
log_path=os.path.join(os.getcwd(),"logs")
|
| 8 |
+
|
| 9 |
+
os.makedirs(log_path, exist_ok=True)
|
| 10 |
+
|
| 11 |
+
log_file_path=os.path.join(log_path, LOG_FILE)
|
| 12 |
+
|
| 13 |
+
logging.basicConfig(level=logging.INFO, filename=log_file_path, format="[%(asctime)s] %(lineno)d %(name)s - %(levelname)s - %(message)s")
|
src/mcqgen/mcqgenerator.py
ADDED
|
@@ -0,0 +1,75 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import openai
|
| 2 |
+
import json
|
| 3 |
+
import langchain
|
| 4 |
+
import pandas as pd
|
| 5 |
+
from openai import OpenAI
|
| 6 |
+
from langchain_openai import OpenAI, ChatOpenAI
|
| 7 |
+
import os
|
| 8 |
+
from dotenv import load_dotenv
|
| 9 |
+
from src.mcqgen.utils import read_file, get_table_data
|
| 10 |
+
from src.mcqgen.logger import logging
|
| 11 |
+
|
| 12 |
+
from langchain.llms import openai
|
| 13 |
+
from langchain.prompts import PromptTemplate
|
| 14 |
+
from langchain.chains import LLMChain, SequentialChain
|
| 15 |
+
from langchain.callbacks import get_openai_callback
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
load_dotenv()
|
| 20 |
+
|
| 21 |
+
key=os.getenv("OPENAI_API_KEY")
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
llm=ChatOpenAI(model="gpt-3.5-turbo", temperature=0.7)
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
Template='''
|
| 29 |
+
Text:{text}
|
| 30 |
+
You are an expert MCQ maker. Given the above text, it is your job to create a quiz of {number} multiple
|
| 31 |
+
choice questions for {subject} in {tone} tone. Make sure that the questions are not repeated and check all
|
| 32 |
+
question to be confirming the text as well. Make sure to format your responses like {response_json} and use
|
| 33 |
+
it as a guide
|
| 34 |
+
'''
|
| 35 |
+
|
| 36 |
+
quiz_generation_prompt=PromptTemplate(
|
| 37 |
+
input_variables=["text", "number", "subject", "tone", "respone_json"],
|
| 38 |
+
template=Template
|
| 39 |
+
)
|
| 40 |
+
|
| 41 |
+
quiz_chain=LLMChain(llm=llm, prompt=quiz_generation_prompt, output_key="quiz", verbose=True)
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
Template2="""
|
| 45 |
+
You are an expert english grammarian and a writer. Given a multiple choice Quiz for {subject} students,
|
| 46 |
+
you need to evaluate the complexity of the quiz and give a complete analysis of the quiz. Only use 50 words
|
| 47 |
+
at max. If the quiz is not at par the with the cognitive and the analytical ability of the student, update
|
| 48 |
+
the quiz questions and change the tone such that it perfectly fits the student's analytical ability.
|
| 49 |
+
Quiz_MCQ:
|
| 50 |
+
{quiz}
|
| 51 |
+
"""
|
| 52 |
+
|
| 53 |
+
quiz_evaluation_prompt=PromptTemplate(input_variables=['subject','quiz'], template=Template2)
|
| 54 |
+
review_chain=LLMChain(llm=llm, prompt=quiz_evaluation_prompt, output_key="review", verbose=True)
|
| 55 |
+
|
| 56 |
+
generate_evaluate_chain=SequentialChain(chains=[quiz_chain,review_chain], input_variables=["text", "number", "subject", "tone", "response_json"], output_variables=['quiz', 'review'], verbose=True)
|
| 57 |
+
|
| 58 |
+
# number =6
|
| 59 |
+
# subject="Machine Learning"
|
| 60 |
+
# tone="simple"
|
| 61 |
+
|
| 62 |
+
# with get_openai_callback() as cb:
|
| 63 |
+
# response=generate_evaluate_chain(
|
| 64 |
+
# {
|
| 65 |
+
# "text":text,
|
| 66 |
+
# "number":number,
|
| 67 |
+
# "subject":subject,
|
| 68 |
+
# "tone":tone,
|
| 69 |
+
# "response_json":json.dumps(response_json)
|
| 70 |
+
# }
|
| 71 |
+
# )
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
|
src/mcqgen/utils.py
ADDED
|
@@ -0,0 +1,44 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import PyPDF2
|
| 3 |
+
import json
|
| 4 |
+
import traceback
|
| 5 |
+
|
| 6 |
+
def read_file(file):
|
| 7 |
+
if file.name.endswith(".pdf"):
|
| 8 |
+
try:
|
| 9 |
+
pdf_reader=PyPDF2.PdfFileReader(file)
|
| 10 |
+
text=""
|
| 11 |
+
for page in pdf_reader.pages:
|
| 12 |
+
text+page.extract_text()
|
| 13 |
+
return text
|
| 14 |
+
except Exception as e:
|
| 15 |
+
raise Exception("error reading the file")
|
| 16 |
+
|
| 17 |
+
elif file.name.endswith(".txt"):
|
| 18 |
+
return file.read().decode("utf-8")
|
| 19 |
+
|
| 20 |
+
else:
|
| 21 |
+
raise Exception(
|
| 22 |
+
"Unsupported file format, only .pdf and .txt supported"
|
| 23 |
+
)
|
| 24 |
+
|
| 25 |
+
def get_table_data(quiz_str):
|
| 26 |
+
try:
|
| 27 |
+
|
| 28 |
+
quiz_table_data=[]
|
| 29 |
+
|
| 30 |
+
for key, value in json.loads(quiz_str).items():
|
| 31 |
+
mcq=value['mcq']
|
| 32 |
+
options="||".join(
|
| 33 |
+
[
|
| 34 |
+
f"{option}:{option_value}"
|
| 35 |
+
for option, option_value in value["options"].items()
|
| 36 |
+
]
|
| 37 |
+
|
| 38 |
+
)
|
| 39 |
+
correct=value['correct_answer']
|
| 40 |
+
quiz_table_data.append({"MCQ":mcq, "Choices":options, "Correct":correct})
|
| 41 |
+
return quiz_table_data
|
| 42 |
+
except Exception as e:
|
| 43 |
+
traceback.print_exception(type(e),e,e.__traceback__)
|
| 44 |
+
return False
|
test.py
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from src.mcqgen.logger import logging
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
logging.info('Hi!! I am going to start my execution')
|