Datasets:
Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0000/original/question.json +14 -0
- M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0001/original/question.json +14 -0
- M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0006/original/question.json +14 -0
- M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0007/original/question.json +14 -0
- M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0008/original/question.json +14 -0
- M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0009/original/question.json +14 -0
- M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0012/final_frame.png +3 -0
- M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0012/first_frame.png +3 -0
- M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0012/ground_truth.mp4 +3 -0
- M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0030/original/question.json +14 -0
- M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0031/original/question.json +14 -0
- M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0036/original/question.json +14 -0
- M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0037/original/question.json +14 -0
- M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0038/original/question.json +14 -0
- M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0039/original/question.json +14 -0
- M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0052/original/question.json +14 -0
- M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0053/original/question.json +14 -0
- M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0054/original/question.json +14 -0
- M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0055/original/question.json +14 -0
- M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0062/original/question.json +14 -0
- M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0063/original/question.json +14 -0
- M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0064/original/question.json +14 -0
- M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0079/final_frame.png +3 -0
- M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0079/first_frame.png +3 -0
- M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0079/ground_truth.mp4 +3 -0
- M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0090/original/question.json +14 -0
- M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0091/original/question.json +14 -0
- M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0096/original/question.json +14 -0
- M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0097/original/question.json +14 -0
- M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0098/original/question.json +14 -0
- M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0099/original/question.json +14 -0
- M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0202/original/question.json +14 -0
- M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0203/original/question.json +14 -0
- M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0204/original/question.json +14 -0
- M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0205/original/question.json +14 -0
- M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0220/final_frame.png +3 -0
- M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0220/first_frame.png +3 -0
- M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0220/ground_truth.mp4 +3 -0
- M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0232/original/question.json +14 -0
- M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0233/original/question.json +14 -0
- M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0234/original/question.json +14 -0
- M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0235/original/question.json +14 -0
- M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0250/original/question.json +14 -0
- M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0251/original/question.json +14 -0
- M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0256/original/question.json +14 -0
- M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0258/original/question.json +14 -0
- M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0259/original/question.json +14 -0
- M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0260/original/question.json +14 -0
- M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0261/original/question.json +14 -0
- M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0266/original/question.json +14 -0
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0000/original/question.json
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"dataset": "ScienceQA",
|
| 3 |
+
"source_id": "train_0000000",
|
| 4 |
+
"question": "Which of these states is farthest north?",
|
| 5 |
+
"choices": {
|
| 6 |
+
"A": "West Virginia",
|
| 7 |
+
"B": "Louisiana",
|
| 8 |
+
"C": "Arizona",
|
| 9 |
+
"D": "Oklahoma"
|
| 10 |
+
},
|
| 11 |
+
"answer": "A",
|
| 12 |
+
"original_image_filename": "train_0000000.png",
|
| 13 |
+
"has_embedded_choices": false
|
| 14 |
+
}
|
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0001/original/question.json
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"dataset": "ScienceQA",
|
| 3 |
+
"source_id": "train_0000006",
|
| 4 |
+
"question": "What is the probability that a goat produced by this cross will be homozygous dominant for the myotonia congenita gene?",
|
| 5 |
+
"choices": {
|
| 6 |
+
"A": "1/4",
|
| 7 |
+
"B": "0/4",
|
| 8 |
+
"C": "4/4",
|
| 9 |
+
"D": "2/4"
|
| 10 |
+
},
|
| 11 |
+
"answer": "A",
|
| 12 |
+
"original_image_filename": "train_0000006.png",
|
| 13 |
+
"has_embedded_choices": false
|
| 14 |
+
}
|
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0006/original/question.json
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"dataset": "ScienceQA",
|
| 3 |
+
"source_id": "train_0000032",
|
| 4 |
+
"question": "What is the expected ratio of offspring with a hairy body to offspring with a hairless body? Choose the most likely ratio.",
|
| 5 |
+
"choices": {
|
| 6 |
+
"A": "3:1",
|
| 7 |
+
"B": "4:0",
|
| 8 |
+
"C": "2:2",
|
| 9 |
+
"D": "0:4"
|
| 10 |
+
},
|
| 11 |
+
"answer": "B",
|
| 12 |
+
"original_image_filename": "train_0000032.png",
|
| 13 |
+
"has_embedded_choices": false
|
| 14 |
+
}
|
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0007/original/question.json
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"dataset": "ScienceQA",
|
| 3 |
+
"source_id": "train_0000066",
|
| 4 |
+
"question": "What is the capital of Washington?",
|
| 5 |
+
"choices": {
|
| 6 |
+
"A": "Santa Fe",
|
| 7 |
+
"B": "Seattle",
|
| 8 |
+
"C": "New Orleans",
|
| 9 |
+
"D": "Olympia"
|
| 10 |
+
},
|
| 11 |
+
"answer": "D",
|
| 12 |
+
"original_image_filename": "train_0000066.png",
|
| 13 |
+
"has_embedded_choices": false
|
| 14 |
+
}
|
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0008/original/question.json
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"dataset": "ScienceQA",
|
| 3 |
+
"source_id": "train_0000067",
|
| 4 |
+
"question": "What is the capital of New York?",
|
| 5 |
+
"choices": {
|
| 6 |
+
"A": "Albany",
|
| 7 |
+
"B": "Lansing",
|
| 8 |
+
"C": "Buffalo",
|
| 9 |
+
"D": "Hartford"
|
| 10 |
+
},
|
| 11 |
+
"answer": "A",
|
| 12 |
+
"original_image_filename": "train_0000067.png",
|
| 13 |
+
"has_embedded_choices": false
|
| 14 |
+
}
|
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0009/original/question.json
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"dataset": "ScienceQA",
|
| 3 |
+
"source_id": "train_0000071",
|
| 4 |
+
"question": "What can Debbie and Madelyn trade to each get what they want?",
|
| 5 |
+
"choices": {
|
| 6 |
+
"A": "Madelyn can trade her broccoli for Debbie's oranges.",
|
| 7 |
+
"B": "Debbie can trade her tomatoes for Madelyn's broccoli.",
|
| 8 |
+
"C": "Debbie can trade her tomatoes for Madelyn's sandwich.",
|
| 9 |
+
"D": "Madelyn can trade her almonds for Debbie's tomatoes."
|
| 10 |
+
},
|
| 11 |
+
"answer": "B",
|
| 12 |
+
"original_image_filename": "train_0000071.png",
|
| 13 |
+
"has_embedded_choices": false
|
| 14 |
+
}
|
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0012/final_frame.png
ADDED
|
Git LFS Details
|
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0012/first_frame.png
ADDED
|
Git LFS Details
|
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0012/ground_truth.mp4
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:797ab11e2a2aac95d62686730d56fdf8ff12c251ff6860606dcb449b7cc44b8f
|
| 3 |
+
size 21439
|
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0030/original/question.json
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"dataset": "ScienceQA",
|
| 3 |
+
"source_id": "train_0000167",
|
| 4 |
+
"question": "What is the capital of Massachusetts?",
|
| 5 |
+
"choices": {
|
| 6 |
+
"A": "Cambridge",
|
| 7 |
+
"B": "Boston",
|
| 8 |
+
"C": "Trenton",
|
| 9 |
+
"D": "New Orleans"
|
| 10 |
+
},
|
| 11 |
+
"answer": "B",
|
| 12 |
+
"original_image_filename": "train_0000167.png",
|
| 13 |
+
"has_embedded_choices": false
|
| 14 |
+
}
|
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0031/original/question.json
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"dataset": "ScienceQA",
|
| 3 |
+
"source_id": "train_0000179",
|
| 4 |
+
"question": "What is the capital of Iowa?",
|
| 5 |
+
"choices": {
|
| 6 |
+
"A": "Des Moines",
|
| 7 |
+
"B": "Cedar Rapids",
|
| 8 |
+
"C": "Baltimore",
|
| 9 |
+
"D": "Pierre"
|
| 10 |
+
},
|
| 11 |
+
"answer": "A",
|
| 12 |
+
"original_image_filename": "train_0000179.png",
|
| 13 |
+
"has_embedded_choices": false
|
| 14 |
+
}
|
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0036/original/question.json
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"dataset": "ScienceQA",
|
| 3 |
+
"source_id": "train_0000210",
|
| 4 |
+
"question": "Which i in row C?",
|
| 5 |
+
"choices": {
|
| 6 |
+
"A": "the diner",
|
| 7 |
+
"B": "the grocery store",
|
| 8 |
+
"C": "the library",
|
| 9 |
+
"D": "the school"
|
| 10 |
+
},
|
| 11 |
+
"answer": "D",
|
| 12 |
+
"original_image_filename": "train_0000210.png",
|
| 13 |
+
"has_embedded_choices": false
|
| 14 |
+
}
|
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0037/original/question.json
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"dataset": "ScienceQA",
|
| 3 |
+
"source_id": "train_0000215",
|
| 4 |
+
"question": "What is the capital of Hawaii?",
|
| 5 |
+
"choices": {
|
| 6 |
+
"A": "Helena",
|
| 7 |
+
"B": "Santa Fe",
|
| 8 |
+
"C": "Honolulu",
|
| 9 |
+
"D": "Phoenix"
|
| 10 |
+
},
|
| 11 |
+
"answer": "C",
|
| 12 |
+
"original_image_filename": "train_0000215.png",
|
| 13 |
+
"has_embedded_choices": false
|
| 14 |
+
}
|
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0038/original/question.json
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"dataset": "ScienceQA",
|
| 3 |
+
"source_id": "train_0000217",
|
| 4 |
+
"question": "What is the name of the colony shown?",
|
| 5 |
+
"choices": {
|
| 6 |
+
"A": "Maryland",
|
| 7 |
+
"B": "New Jersey",
|
| 8 |
+
"C": "Rhode Island",
|
| 9 |
+
"D": "Delaware"
|
| 10 |
+
},
|
| 11 |
+
"answer": "B",
|
| 12 |
+
"original_image_filename": "train_0000217.png",
|
| 13 |
+
"has_embedded_choices": false
|
| 14 |
+
}
|
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0039/original/question.json
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"dataset": "ScienceQA",
|
| 3 |
+
"source_id": "train_0000229",
|
| 4 |
+
"question": "What can Percy and Cole trade to each get what they want?",
|
| 5 |
+
"choices": {
|
| 6 |
+
"A": "Percy can trade his tomatoes for Cole's broccoli.",
|
| 7 |
+
"B": "Cole can trade his broccoli for Percy's oranges.",
|
| 8 |
+
"C": "Cole can trade his almonds for Percy's tomatoes.",
|
| 9 |
+
"D": "Percy can trade his tomatoes for Cole's carrots."
|
| 10 |
+
},
|
| 11 |
+
"answer": "A",
|
| 12 |
+
"original_image_filename": "train_0000229.png",
|
| 13 |
+
"has_embedded_choices": false
|
| 14 |
+
}
|
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0052/original/question.json
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"dataset": "ScienceQA",
|
| 3 |
+
"source_id": "train_0000277",
|
| 4 |
+
"question": "What is the expected ratio of offspring with green body feathers to offspring with blue body feathers? Choose the most likely ratio.",
|
| 5 |
+
"choices": {
|
| 6 |
+
"A": "2:2",
|
| 7 |
+
"B": "0:4",
|
| 8 |
+
"C": "1:3",
|
| 9 |
+
"D": "3:1"
|
| 10 |
+
},
|
| 11 |
+
"answer": "A",
|
| 12 |
+
"original_image_filename": "train_0000277.png",
|
| 13 |
+
"has_embedded_choices": false
|
| 14 |
+
}
|
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0053/original/question.json
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"dataset": "ScienceQA",
|
| 3 |
+
"source_id": "train_0000279",
|
| 4 |
+
"question": "Which continent is highlighted?",
|
| 5 |
+
"choices": {
|
| 6 |
+
"A": "Antarctica",
|
| 7 |
+
"B": "North America",
|
| 8 |
+
"C": "South America",
|
| 9 |
+
"D": "Europe"
|
| 10 |
+
},
|
| 11 |
+
"answer": "C",
|
| 12 |
+
"original_image_filename": "train_0000279.png",
|
| 13 |
+
"has_embedded_choices": false
|
| 14 |
+
}
|
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0054/original/question.json
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"dataset": "ScienceQA",
|
| 3 |
+
"source_id": "train_0000285",
|
| 4 |
+
"question": "Which of these cities is marked on the map?",
|
| 5 |
+
"choices": {
|
| 6 |
+
"A": "Salt Lake City",
|
| 7 |
+
"B": "San Jose",
|
| 8 |
+
"C": "Los Angeles",
|
| 9 |
+
"D": "Portland"
|
| 10 |
+
},
|
| 11 |
+
"answer": "D",
|
| 12 |
+
"original_image_filename": "train_0000285.png",
|
| 13 |
+
"has_embedded_choices": false
|
| 14 |
+
}
|
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0055/original/question.json
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"dataset": "ScienceQA",
|
| 3 |
+
"source_id": "train_0000299",
|
| 4 |
+
"question": "Which continent is highlighted?",
|
| 5 |
+
"choices": {
|
| 6 |
+
"A": "Europe",
|
| 7 |
+
"B": "Asia",
|
| 8 |
+
"C": "Africa",
|
| 9 |
+
"D": "North America"
|
| 10 |
+
},
|
| 11 |
+
"answer": "B",
|
| 12 |
+
"original_image_filename": "train_0000299.png",
|
| 13 |
+
"has_embedded_choices": false
|
| 14 |
+
}
|
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0062/original/question.json
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"dataset": "ScienceQA",
|
| 3 |
+
"source_id": "train_0000340",
|
| 4 |
+
"question": "What is the capital of Maryland?",
|
| 5 |
+
"choices": {
|
| 6 |
+
"A": "Harrisburg",
|
| 7 |
+
"B": "Augusta",
|
| 8 |
+
"C": "Norfolk",
|
| 9 |
+
"D": "Annapolis"
|
| 10 |
+
},
|
| 11 |
+
"answer": "D",
|
| 12 |
+
"original_image_filename": "train_0000340.png",
|
| 13 |
+
"has_embedded_choices": false
|
| 14 |
+
}
|
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0063/original/question.json
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"dataset": "ScienceQA",
|
| 3 |
+
"source_id": "train_0000342",
|
| 4 |
+
"question": "What is the capital of California?",
|
| 5 |
+
"choices": {
|
| 6 |
+
"A": "Sacramento",
|
| 7 |
+
"B": "Salem",
|
| 8 |
+
"C": "Dover",
|
| 9 |
+
"D": "Santa Fe"
|
| 10 |
+
},
|
| 11 |
+
"answer": "A",
|
| 12 |
+
"original_image_filename": "train_0000342.png",
|
| 13 |
+
"has_embedded_choices": false
|
| 14 |
+
}
|
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0064/original/question.json
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"dataset": "ScienceQA",
|
| 3 |
+
"source_id": "train_0000356",
|
| 4 |
+
"question": "Which of these states is farthest south?",
|
| 5 |
+
"choices": {
|
| 6 |
+
"A": "North Carolina",
|
| 7 |
+
"B": "Rhode Island",
|
| 8 |
+
"C": "Utah",
|
| 9 |
+
"D": "Minnesota"
|
| 10 |
+
},
|
| 11 |
+
"answer": "A",
|
| 12 |
+
"original_image_filename": "train_0000356.png",
|
| 13 |
+
"has_embedded_choices": false
|
| 14 |
+
}
|
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0079/final_frame.png
ADDED
|
Git LFS Details
|
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0079/first_frame.png
ADDED
|
Git LFS Details
|
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0079/ground_truth.mp4
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:cf2e973f033d15ad4c69286774af6f3fad3d16ca980de5789b74acd34dfc0962
|
| 3 |
+
size 27461
|
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0090/original/question.json
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"dataset": "ScienceQA",
|
| 3 |
+
"source_id": "train_0000510",
|
| 4 |
+
"question": "Which country is highlighted?",
|
| 5 |
+
"choices": {
|
| 6 |
+
"A": "Fiji",
|
| 7 |
+
"B": "Nauru",
|
| 8 |
+
"C": "Solomon Islands",
|
| 9 |
+
"D": "Vanuatu"
|
| 10 |
+
},
|
| 11 |
+
"answer": "D",
|
| 12 |
+
"original_image_filename": "train_0000510.png",
|
| 13 |
+
"has_embedded_choices": false
|
| 14 |
+
}
|
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0091/original/question.json
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"dataset": "ScienceQA",
|
| 3 |
+
"source_id": "train_0000515",
|
| 4 |
+
"question": "What is the capital of North Carolina?",
|
| 5 |
+
"choices": {
|
| 6 |
+
"A": "Little Rock",
|
| 7 |
+
"B": "Oklahoma City",
|
| 8 |
+
"C": "Raleigh",
|
| 9 |
+
"D": "Charlotte"
|
| 10 |
+
},
|
| 11 |
+
"answer": "C",
|
| 12 |
+
"original_image_filename": "train_0000515.png",
|
| 13 |
+
"has_embedded_choices": false
|
| 14 |
+
}
|
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0096/original/question.json
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"dataset": "ScienceQA",
|
| 3 |
+
"source_id": "train_0000549",
|
| 4 |
+
"question": "What is the capital of Missouri?",
|
| 5 |
+
"choices": {
|
| 6 |
+
"A": "Jefferson City",
|
| 7 |
+
"B": "Indianapolis",
|
| 8 |
+
"C": "Bismarck",
|
| 9 |
+
"D": "Springfield"
|
| 10 |
+
},
|
| 11 |
+
"answer": "A",
|
| 12 |
+
"original_image_filename": "train_0000549.png",
|
| 13 |
+
"has_embedded_choices": false
|
| 14 |
+
}
|
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0097/original/question.json
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"dataset": "ScienceQA",
|
| 3 |
+
"source_id": "train_0000550",
|
| 4 |
+
"question": "Which trait did Glyphithyreus have? Select the trait you can observe on the fossil.",
|
| 5 |
+
"choices": {
|
| 6 |
+
"A": "long, thin antennae",
|
| 7 |
+
"B": "red legs with orange tips",
|
| 8 |
+
"C": "eight legs and two claws",
|
| 9 |
+
"D": "hair on its body and legs"
|
| 10 |
+
},
|
| 11 |
+
"answer": "C",
|
| 12 |
+
"original_image_filename": "train_0000550.png",
|
| 13 |
+
"has_embedded_choices": false
|
| 14 |
+
}
|
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0098/original/question.json
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"dataset": "ScienceQA",
|
| 3 |
+
"source_id": "train_0000554",
|
| 4 |
+
"question": "What is the capital of Maine?",
|
| 5 |
+
"choices": {
|
| 6 |
+
"A": "Portland",
|
| 7 |
+
"B": "Augusta",
|
| 8 |
+
"C": "Cambridge",
|
| 9 |
+
"D": "Annapolis"
|
| 10 |
+
},
|
| 11 |
+
"answer": "B",
|
| 12 |
+
"original_image_filename": "train_0000554.png",
|
| 13 |
+
"has_embedded_choices": false
|
| 14 |
+
}
|
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0099/original/question.json
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"dataset": "ScienceQA",
|
| 3 |
+
"source_id": "train_0000558",
|
| 4 |
+
"question": "Which i in column 4?",
|
| 5 |
+
"choices": {
|
| 6 |
+
"A": "the fire department",
|
| 7 |
+
"B": "the library",
|
| 8 |
+
"C": "the gas station",
|
| 9 |
+
"D": "the grocery store"
|
| 10 |
+
},
|
| 11 |
+
"answer": "C",
|
| 12 |
+
"original_image_filename": "train_0000558.png",
|
| 13 |
+
"has_embedded_choices": false
|
| 14 |
+
}
|
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0202/original/question.json
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"dataset": "ScienceQA",
|
| 3 |
+
"source_id": "train_0001075",
|
| 4 |
+
"question": "What is the capital of California?",
|
| 5 |
+
"choices": {
|
| 6 |
+
"A": "Sacramento",
|
| 7 |
+
"B": "Nampa",
|
| 8 |
+
"C": "Salem",
|
| 9 |
+
"D": "Phoenix"
|
| 10 |
+
},
|
| 11 |
+
"answer": "A",
|
| 12 |
+
"original_image_filename": "train_0001075.png",
|
| 13 |
+
"has_embedded_choices": false
|
| 14 |
+
}
|
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0203/original/question.json
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"dataset": "ScienceQA",
|
| 3 |
+
"source_id": "train_0001076",
|
| 4 |
+
"question": "What is the name of the colony shown?",
|
| 5 |
+
"choices": {
|
| 6 |
+
"A": "Virginia",
|
| 7 |
+
"B": "New York",
|
| 8 |
+
"C": "Vermont",
|
| 9 |
+
"D": "Pennsylvania"
|
| 10 |
+
},
|
| 11 |
+
"answer": "B",
|
| 12 |
+
"original_image_filename": "train_0001076.png",
|
| 13 |
+
"has_embedded_choices": false
|
| 14 |
+
}
|
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0204/original/question.json
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"dataset": "ScienceQA",
|
| 3 |
+
"source_id": "train_0001077",
|
| 4 |
+
"question": "Select the reptile below.",
|
| 5 |
+
"choices": {
|
| 6 |
+
"A": "porcupinefish",
|
| 7 |
+
"B": "anchovy",
|
| 8 |
+
"C": "fire salamander",
|
| 9 |
+
"D": "cobra"
|
| 10 |
+
},
|
| 11 |
+
"answer": "D",
|
| 12 |
+
"original_image_filename": "train_0001077.png",
|
| 13 |
+
"has_embedded_choices": false
|
| 14 |
+
}
|
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0205/original/question.json
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"dataset": "ScienceQA",
|
| 3 |
+
"source_id": "train_0001079",
|
| 4 |
+
"question": "Which of these states is farthest west?",
|
| 5 |
+
"choices": {
|
| 6 |
+
"A": "Vermont",
|
| 7 |
+
"B": "Ohio",
|
| 8 |
+
"C": "Washington",
|
| 9 |
+
"D": "Virginia"
|
| 10 |
+
},
|
| 11 |
+
"answer": "C",
|
| 12 |
+
"original_image_filename": "train_0001079.png",
|
| 13 |
+
"has_embedded_choices": false
|
| 14 |
+
}
|
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0220/final_frame.png
ADDED
|
Git LFS Details
|
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0220/first_frame.png
ADDED
|
Git LFS Details
|
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0220/ground_truth.mp4
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:cca76bd98e20adbdf99bff017169cc3f50005a81ed8b317249a7678a7815d9a8
|
| 3 |
+
size 23274
|
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0232/original/question.json
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"dataset": "ScienceQA",
|
| 3 |
+
"source_id": "train_0001231",
|
| 4 |
+
"question": "Which ocean is highlighted?",
|
| 5 |
+
"choices": {
|
| 6 |
+
"A": "the Pacific Ocean",
|
| 7 |
+
"B": "the Indian Ocean",
|
| 8 |
+
"C": "the Southern Ocean",
|
| 9 |
+
"D": "the Atlantic Ocean"
|
| 10 |
+
},
|
| 11 |
+
"answer": "A",
|
| 12 |
+
"original_image_filename": "train_0001231.png",
|
| 13 |
+
"has_embedded_choices": false
|
| 14 |
+
}
|
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0233/original/question.json
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"dataset": "ScienceQA",
|
| 3 |
+
"source_id": "train_0001258",
|
| 4 |
+
"question": "What is the capital of South Dakota?",
|
| 5 |
+
"choices": {
|
| 6 |
+
"A": "Sioux Falls",
|
| 7 |
+
"B": "Rapid City",
|
| 8 |
+
"C": "Bismarck",
|
| 9 |
+
"D": "Pierre"
|
| 10 |
+
},
|
| 11 |
+
"answer": "D",
|
| 12 |
+
"original_image_filename": "train_0001258.png",
|
| 13 |
+
"has_embedded_choices": false
|
| 14 |
+
}
|
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0234/original/question.json
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"dataset": "ScienceQA",
|
| 3 |
+
"source_id": "train_0001270",
|
| 4 |
+
"question": "What is the capital of Massachusetts?",
|
| 5 |
+
"choices": {
|
| 6 |
+
"A": "Concord",
|
| 7 |
+
"B": "Cambridge",
|
| 8 |
+
"C": "Boston",
|
| 9 |
+
"D": "Missoula"
|
| 10 |
+
},
|
| 11 |
+
"answer": "C",
|
| 12 |
+
"original_image_filename": "train_0001270.png",
|
| 13 |
+
"has_embedded_choices": false
|
| 14 |
+
}
|
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0235/original/question.json
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"dataset": "ScienceQA",
|
| 3 |
+
"source_id": "train_0001278",
|
| 4 |
+
"question": "Which country is highlighted?",
|
| 5 |
+
"choices": {
|
| 6 |
+
"A": "Papua New Guinea",
|
| 7 |
+
"B": "New Zealand",
|
| 8 |
+
"C": "Solomon Islands",
|
| 9 |
+
"D": "Australia"
|
| 10 |
+
},
|
| 11 |
+
"answer": "D",
|
| 12 |
+
"original_image_filename": "train_0001278.png",
|
| 13 |
+
"has_embedded_choices": false
|
| 14 |
+
}
|
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0250/original/question.json
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"dataset": "ScienceQA",
|
| 3 |
+
"source_id": "train_0001357",
|
| 4 |
+
"question": "Which country is highlighted?",
|
| 5 |
+
"choices": {
|
| 6 |
+
"A": "Haiti",
|
| 7 |
+
"B": "the Dominican Republic",
|
| 8 |
+
"C": "Jamaica",
|
| 9 |
+
"D": "Cuba"
|
| 10 |
+
},
|
| 11 |
+
"answer": "A",
|
| 12 |
+
"original_image_filename": "train_0001357.png",
|
| 13 |
+
"has_embedded_choices": false
|
| 14 |
+
}
|
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0251/original/question.json
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"dataset": "ScienceQA",
|
| 3 |
+
"source_id": "train_0001359",
|
| 4 |
+
"question": "What is the capital of Colorado?",
|
| 5 |
+
"choices": {
|
| 6 |
+
"A": "Boise",
|
| 7 |
+
"B": "Olympia",
|
| 8 |
+
"C": "Denver",
|
| 9 |
+
"D": "Colorado Springs"
|
| 10 |
+
},
|
| 11 |
+
"answer": "C",
|
| 12 |
+
"original_image_filename": "train_0001359.png",
|
| 13 |
+
"has_embedded_choices": false
|
| 14 |
+
}
|
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0256/original/question.json
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"dataset": "ScienceQA",
|
| 3 |
+
"source_id": "train_0001385",
|
| 4 |
+
"question": "What is the name of the colony shown?",
|
| 5 |
+
"choices": {
|
| 6 |
+
"A": "North Carolina",
|
| 7 |
+
"B": "Washington, D.C.",
|
| 8 |
+
"C": "Iowa",
|
| 9 |
+
"D": "Maryland"
|
| 10 |
+
},
|
| 11 |
+
"answer": "D",
|
| 12 |
+
"original_image_filename": "train_0001385.png",
|
| 13 |
+
"has_embedded_choices": false
|
| 14 |
+
}
|
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0258/original/question.json
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"dataset": "ScienceQA",
|
| 3 |
+
"source_id": "train_0001389",
|
| 4 |
+
"question": "Which of these states is farthest north?",
|
| 5 |
+
"choices": {
|
| 6 |
+
"A": "West Virginia",
|
| 7 |
+
"B": "New Mexico",
|
| 8 |
+
"C": "South Carolina",
|
| 9 |
+
"D": "Louisiana"
|
| 10 |
+
},
|
| 11 |
+
"answer": "A",
|
| 12 |
+
"original_image_filename": "train_0001389.png",
|
| 13 |
+
"has_embedded_choices": false
|
| 14 |
+
}
|
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0259/original/question.json
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"dataset": "ScienceQA",
|
| 3 |
+
"source_id": "train_0001397",
|
| 4 |
+
"question": "Select the fish below.",
|
| 5 |
+
"choices": {
|
| 6 |
+
"A": "tortoise",
|
| 7 |
+
"B": "ostrich",
|
| 8 |
+
"C": "piranha",
|
| 9 |
+
"D": "African bullfrog"
|
| 10 |
+
},
|
| 11 |
+
"answer": "C",
|
| 12 |
+
"original_image_filename": "train_0001397.png",
|
| 13 |
+
"has_embedded_choices": false
|
| 14 |
+
}
|
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0260/original/question.json
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"dataset": "ScienceQA",
|
| 3 |
+
"source_id": "train_0001400",
|
| 4 |
+
"question": "What is the capital of Wisconsin?",
|
| 5 |
+
"choices": {
|
| 6 |
+
"A": "Columbus",
|
| 7 |
+
"B": "Milwaukee",
|
| 8 |
+
"C": "Madison",
|
| 9 |
+
"D": "Green Bay"
|
| 10 |
+
},
|
| 11 |
+
"answer": "C",
|
| 12 |
+
"original_image_filename": "train_0001400.png",
|
| 13 |
+
"has_embedded_choices": false
|
| 14 |
+
}
|
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0261/original/question.json
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"dataset": "ScienceQA",
|
| 3 |
+
"source_id": "train_0001401",
|
| 4 |
+
"question": "What is the capital of Louisiana?",
|
| 5 |
+
"choices": {
|
| 6 |
+
"A": "Baton Rouge",
|
| 7 |
+
"B": "New Orleans",
|
| 8 |
+
"C": "Richmond",
|
| 9 |
+
"D": "Jefferson City"
|
| 10 |
+
},
|
| 11 |
+
"answer": "A",
|
| 12 |
+
"original_image_filename": "train_0001401.png",
|
| 13 |
+
"has_embedded_choices": false
|
| 14 |
+
}
|
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0266/original/question.json
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"dataset": "ScienceQA",
|
| 3 |
+
"source_id": "train_0001432",
|
| 4 |
+
"question": "Which country is highlighted?",
|
| 5 |
+
"choices": {
|
| 6 |
+
"A": "Antigua and Barbuda",
|
| 7 |
+
"B": "Jamaica",
|
| 8 |
+
"C": "Grenada",
|
| 9 |
+
"D": "Trinidad and Tobago"
|
| 10 |
+
},
|
| 11 |
+
"answer": "D",
|
| 12 |
+
"original_image_filename": "train_0001432.png",
|
| 13 |
+
"has_embedded_choices": false
|
| 14 |
+
}
|