Hokin commited on
Commit
bb5fda8
·
verified ·
1 Parent(s): 4071526

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0011/original/question.json +14 -0
  2. M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0016/original/question.json +14 -0
  3. M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0018/original/question.json +14 -0
  4. M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0020/original/question.json +14 -0
  5. M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0027/original/question.json +14 -0
  6. M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0029/original/question.json +14 -0
  7. M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0042/original/question.json +14 -0
  8. M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0042/prompt.txt +8 -0
  9. M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0045/original/question.json +14 -0
  10. M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0072/original/question.json +14 -0
  11. M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0073/original/question.json +14 -0
  12. M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0074/original/question.json +14 -0
  13. M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0075/original/question.json +14 -0
  14. M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0080/original/question.json +14 -0
  15. M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0081/original/question.json +14 -0
  16. M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0087/original/question.json +14 -0
  17. M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0089/original/question.json +14 -0
  18. M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0089/prompt.txt +8 -0
  19. M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0102/final_frame.png +3 -0
  20. M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0102/first_frame.png +3 -0
  21. M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0102/ground_truth.mp4 +3 -0
  22. M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0159/final_frame.png +3 -0
  23. M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0159/first_frame.png +3 -0
  24. M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0159/ground_truth.mp4 +3 -0
  25. M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0161/final_frame.png +3 -0
  26. M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0161/first_frame.png +3 -0
  27. M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0161/ground_truth.mp4 +3 -0
  28. M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0166/final_frame.png +3 -0
  29. M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0166/first_frame.png +3 -0
  30. M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0166/ground_truth.mp4 +3 -0
  31. M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0192/final_frame.png +3 -0
  32. M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0192/first_frame.png +3 -0
  33. M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0192/ground_truth.mp4 +3 -0
  34. M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0195/final_frame.png +3 -0
  35. M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0195/first_frame.png +3 -0
  36. M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0195/ground_truth.mp4 +3 -0
  37. M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0212/original/question.json +14 -0
  38. M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0215/original/question.json +14 -0
  39. M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0223/original/question.json +14 -0
  40. M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0224/original/question.json +14 -0
  41. M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0240/original/question.json +14 -0
  42. M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0241/original/question.json +14 -0
  43. M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0246/original/question.json +14 -0
  44. M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0247/original/question.json +14 -0
  45. M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0248/original/question.json +14 -0
  46. M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0270/original/question.json +14 -0
  47. M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0270/prompt.txt +8 -0
  48. M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0277/original/question.json +14 -0
  49. M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0278/original/question.json +14 -0
  50. M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0279/original/question.json +14 -0
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0011/original/question.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset": "ScienceQA",
3
+ "source_id": "train_0000088",
4
+ "question": "Which country is highlighted?",
5
+ "choices": {
6
+ "A": "Saint Lucia",
7
+ "B": "Grenada",
8
+ "C": "Haiti",
9
+ "D": "Cuba"
10
+ },
11
+ "answer": "C",
12
+ "original_image_filename": "train_0000088.png",
13
+ "has_embedded_choices": false
14
+ }
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0016/original/question.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset": "ScienceQA",
3
+ "source_id": "train_0000112",
4
+ "question": "What can Akira and Brooke trade to each get what they want?",
5
+ "choices": {
6
+ "A": "Brooke can trade her broccoli for Akira's oranges.",
7
+ "B": "Brooke can trade her almonds for Akira's tomatoes.",
8
+ "C": "Akira can trade her tomatoes for Brooke's sandwich.",
9
+ "D": "Akira can trade her tomatoes for Brooke's broccoli."
10
+ },
11
+ "answer": "D",
12
+ "original_image_filename": "train_0000112.png",
13
+ "has_embedded_choices": false
14
+ }
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0018/original/question.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset": "ScienceQA",
3
+ "source_id": "train_0000123",
4
+ "question": "Which of these states is farthest north?",
5
+ "choices": {
6
+ "A": "West Virginia",
7
+ "B": "Mississippi",
8
+ "C": "Nebraska",
9
+ "D": "Oklahoma"
10
+ },
11
+ "answer": "C",
12
+ "original_image_filename": "train_0000123.png",
13
+ "has_embedded_choices": false
14
+ }
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0020/original/question.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset": "ScienceQA",
3
+ "source_id": "train_0000128",
4
+ "question": "What is the capital of Washington?",
5
+ "choices": {
6
+ "A": "Louisville",
7
+ "B": "Seattle",
8
+ "C": "Olympia",
9
+ "D": "Spokane"
10
+ },
11
+ "answer": "C",
12
+ "original_image_filename": "train_0000128.png",
13
+ "has_embedded_choices": false
14
+ }
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0027/original/question.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset": "ScienceQA",
3
+ "source_id": "train_0000155",
4
+ "question": "What is the capital of Kentucky?",
5
+ "choices": {
6
+ "A": "Frankfort",
7
+ "B": "Nashville",
8
+ "C": "Louisville",
9
+ "D": "Lexington"
10
+ },
11
+ "answer": "A",
12
+ "original_image_filename": "train_0000155.png",
13
+ "has_embedded_choices": false
14
+ }
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0029/original/question.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset": "ScienceQA",
3
+ "source_id": "train_0000160",
4
+ "question": "What is the capital of Pennsylvania?",
5
+ "choices": {
6
+ "A": "Harrisburg",
7
+ "B": "Trenton",
8
+ "C": "Pittsburgh",
9
+ "D": "Philadelphia"
10
+ },
11
+ "answer": "A",
12
+ "original_image_filename": "train_0000160.png",
13
+ "has_embedded_choices": false
14
+ }
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0042/original/question.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset": "ScienceQA",
3
+ "source_id": "train_0000236",
4
+ "question": "Which ocean is highlighted?",
5
+ "choices": {
6
+ "A": "the Arctic Ocean",
7
+ "B": "the Southern Ocean",
8
+ "C": "the Pacific Ocean",
9
+ "D": "the Atlantic Ocean"
10
+ },
11
+ "answer": "C",
12
+ "original_image_filename": "train_0000236.png",
13
+ "has_embedded_choices": false
14
+ }
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0042/prompt.txt ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ Which ocean is highlighted?
2
+
3
+ A: the Arctic Ocean
4
+ B: the Southern Ocean
5
+ C: the Pacific Ocean
6
+ D: the Atlantic Ocean
7
+
8
+ Answer: C
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0045/original/question.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset": "ScienceQA",
3
+ "source_id": "train_0000252",
4
+ "question": "What is the capital of Maryland?",
5
+ "choices": {
6
+ "A": "Boston",
7
+ "B": "Omaha",
8
+ "C": "Dover",
9
+ "D": "Annapolis"
10
+ },
11
+ "answer": "D",
12
+ "original_image_filename": "train_0000252.png",
13
+ "has_embedded_choices": false
14
+ }
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0072/original/question.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset": "ScienceQA",
3
+ "source_id": "train_0000433",
4
+ "question": "What is the name of the colony shown?",
5
+ "choices": {
6
+ "A": "Florida",
7
+ "B": "New Jersey",
8
+ "C": "Pennsylvania",
9
+ "D": "Delaware"
10
+ },
11
+ "answer": "C",
12
+ "original_image_filename": "train_0000433.png",
13
+ "has_embedded_choices": false
14
+ }
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0073/original/question.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset": "ScienceQA",
3
+ "source_id": "train_0000445",
4
+ "question": "What can Sharon and Ernesto trade to each get what they want?",
5
+ "choices": {
6
+ "A": "Ernesto can trade his almonds for Sharon's tomatoes.",
7
+ "B": "Sharon can trade her tomatoes for Ernesto's broccoli.",
8
+ "C": "Ernesto can trade his broccoli for Sharon's oranges.",
9
+ "D": "Sharon can trade her tomatoes for Ernesto's carrots."
10
+ },
11
+ "answer": "B",
12
+ "original_image_filename": "train_0000445.png",
13
+ "has_embedded_choices": false
14
+ }
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0074/original/question.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset": "ScienceQA",
3
+ "source_id": "train_0000446",
4
+ "question": "What is the capital of Washington?",
5
+ "choices": {
6
+ "A": "Denver",
7
+ "B": "Olympia",
8
+ "C": "Topeka",
9
+ "D": "Des Moines"
10
+ },
11
+ "answer": "B",
12
+ "original_image_filename": "train_0000446.png",
13
+ "has_embedded_choices": false
14
+ }
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0075/original/question.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset": "ScienceQA",
3
+ "source_id": "train_0000448",
4
+ "question": "What is the capital of North Dakota?",
5
+ "choices": {
6
+ "A": "Frankfort",
7
+ "B": "Lincoln",
8
+ "C": "Harrisburg",
9
+ "D": "Bismarck"
10
+ },
11
+ "answer": "D",
12
+ "original_image_filename": "train_0000448.png",
13
+ "has_embedded_choices": false
14
+ }
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0080/original/question.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset": "ScienceQA",
3
+ "source_id": "train_0000476",
4
+ "question": "What is the capital of Utah?",
5
+ "choices": {
6
+ "A": "Salem",
7
+ "B": "Salt Lake City",
8
+ "C": "Provo",
9
+ "D": "Helena"
10
+ },
11
+ "answer": "B",
12
+ "original_image_filename": "train_0000476.png",
13
+ "has_embedded_choices": false
14
+ }
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0081/original/question.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset": "ScienceQA",
3
+ "source_id": "train_0000477",
4
+ "question": "Which of these cities is marked on the map?",
5
+ "choices": {
6
+ "A": "St. Louis",
7
+ "B": "New Orleans",
8
+ "C": "Houston",
9
+ "D": "New York City"
10
+ },
11
+ "answer": "B",
12
+ "original_image_filename": "train_0000477.png",
13
+ "has_embedded_choices": false
14
+ }
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0087/original/question.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset": "ScienceQA",
3
+ "source_id": "train_0000499",
4
+ "question": "What is the name of the colony shown?",
5
+ "choices": {
6
+ "A": "Connecticut",
7
+ "B": "New Hampshire",
8
+ "C": "Massachusetts",
9
+ "D": "Wisconsin"
10
+ },
11
+ "answer": "A",
12
+ "original_image_filename": "train_0000499.png",
13
+ "has_embedded_choices": false
14
+ }
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0089/original/question.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset": "ScienceQA",
3
+ "source_id": "train_0000502",
4
+ "question": "What is the capital of Minnesota?",
5
+ "choices": {
6
+ "A": "Bismarck",
7
+ "B": "Springfield",
8
+ "C": "Saint Paul",
9
+ "D": "Minneapolis"
10
+ },
11
+ "answer": "C",
12
+ "original_image_filename": "train_0000502.png",
13
+ "has_embedded_choices": false
14
+ }
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0089/prompt.txt ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ What is the capital of Minnesota?
2
+
3
+ A: Bismarck
4
+ B: Springfield
5
+ C: Saint Paul
6
+ D: Minneapolis
7
+
8
+ Answer: C
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0102/final_frame.png ADDED

Git LFS Details

  • SHA256: e1aad928d28657eb71c5b545effa36bdfed09d94594a9d83fc858a0989634e4c
  • Pointer size: 130 Bytes
  • Size of remote file: 34.4 kB
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0102/first_frame.png ADDED

Git LFS Details

  • SHA256: 32db12569eaefcb98bd06b2a997e0f7c4ace907aa55ae1553885996f59d973f0
  • Pointer size: 130 Bytes
  • Size of remote file: 34.4 kB
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0102/ground_truth.mp4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:602526dc283b89df4ff659f03517fda732f1f65c68587799d6fc11c7d113d0ca
3
+ size 22168
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0159/final_frame.png ADDED

Git LFS Details

  • SHA256: aa39a765c43f50b25e599c5eff713db2be5edcfc7d41307e4153eb3551267eea
  • Pointer size: 130 Bytes
  • Size of remote file: 78.8 kB
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0159/first_frame.png ADDED

Git LFS Details

  • SHA256: bac4b79dfaa7d69740e6fc095197fd3936de0f813f9206973001cd1b59b14055
  • Pointer size: 130 Bytes
  • Size of remote file: 78.7 kB
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0159/ground_truth.mp4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:be887945ade65203394ea8b26e59d14dd7d6183e59ffb8dbc2bc35b5a7fb339b
3
+ size 22215
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0161/final_frame.png ADDED

Git LFS Details

  • SHA256: a982e4a9349dbef3a66698f52ed328b9ae0d4fbd2b0fdfa1d487fcb28d4ed7a0
  • Pointer size: 130 Bytes
  • Size of remote file: 42.3 kB
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0161/first_frame.png ADDED

Git LFS Details

  • SHA256: 9df4f1e36608081731315e3a45aad3245286b0eefb2891866d0b9102d9b5ec14
  • Pointer size: 130 Bytes
  • Size of remote file: 42.3 kB
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0161/ground_truth.mp4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3ffe9a794a2c47f86c50a48f959b773f84169eaa08bb1ffd08cbff4709390a4e
3
+ size 23022
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0166/final_frame.png ADDED

Git LFS Details

  • SHA256: 3e8d056fe82fee6114b0c6f45f33166afc60cbdee1c6833c41340ed8ecb14c6b
  • Pointer size: 130 Bytes
  • Size of remote file: 42.6 kB
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0166/first_frame.png ADDED

Git LFS Details

  • SHA256: 3a6663143e3ec043b435be5afded35b2429bd7ac931e8a8a859291929369984c
  • Pointer size: 130 Bytes
  • Size of remote file: 42.6 kB
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0166/ground_truth.mp4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a3511d59dbe4b105b8e668d9eeeaa097a581c1feafccf0a9b087bb52004cd485
3
+ size 24682
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0192/final_frame.png ADDED

Git LFS Details

  • SHA256: 25bb32c98424560dc9df9105bc6a60edcbf39fb0106732285e9cd421df83cde0
  • Pointer size: 130 Bytes
  • Size of remote file: 45.9 kB
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0192/first_frame.png ADDED

Git LFS Details

  • SHA256: 94dc38ec8238ce2b13888a1c83994cfcc274c047c3502125e9e6f29fdbbbf8f0
  • Pointer size: 130 Bytes
  • Size of remote file: 45.9 kB
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0192/ground_truth.mp4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:876c21250e7153b3f5770fad4db82e7cd6bfc2ed9d36a50ba4f894868db6c97c
3
+ size 25937
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0195/final_frame.png ADDED

Git LFS Details

  • SHA256: c0815d9f1f8a3dc12c2f91df8366df7f187cfc06654d832108fd6a1237620f30
  • Pointer size: 131 Bytes
  • Size of remote file: 104 kB
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0195/first_frame.png ADDED

Git LFS Details

  • SHA256: 23b6cbef5164d7e463ad83318f13df52d216a77b4472fb984b235cc4bead5346
  • Pointer size: 131 Bytes
  • Size of remote file: 104 kB
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0195/ground_truth.mp4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5c545c1cf87fc897a47b345e09a5b74d8802953a6acf9e309c94b9c40b677a49
3
+ size 36952
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0212/original/question.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset": "ScienceQA",
3
+ "source_id": "train_0001121",
4
+ "question": "Which of these states is farthest east?",
5
+ "choices": {
6
+ "A": "Kansas",
7
+ "B": "California",
8
+ "C": "Montana",
9
+ "D": "New Mexico"
10
+ },
11
+ "answer": "A",
12
+ "original_image_filename": "train_0001121.png",
13
+ "has_embedded_choices": false
14
+ }
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0215/original/question.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset": "ScienceQA",
3
+ "source_id": "train_0001130",
4
+ "question": "What is the capital of Washington?",
5
+ "choices": {
6
+ "A": "Orlando",
7
+ "B": "Olympia",
8
+ "C": "Denver",
9
+ "D": "Spokane"
10
+ },
11
+ "answer": "B",
12
+ "original_image_filename": "train_0001130.png",
13
+ "has_embedded_choices": false
14
+ }
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0223/original/question.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset": "ScienceQA",
3
+ "source_id": "train_0001179",
4
+ "question": "What is the capital of Massachusetts?",
5
+ "choices": {
6
+ "A": "Boston",
7
+ "B": "Plymouth",
8
+ "C": "Atlanta",
9
+ "D": "Providence"
10
+ },
11
+ "answer": "A",
12
+ "original_image_filename": "train_0001179.png",
13
+ "has_embedded_choices": false
14
+ }
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0224/original/question.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset": "ScienceQA",
3
+ "source_id": "train_0001181",
4
+ "question": "Which of these states is farthest west?",
5
+ "choices": {
6
+ "A": "Wisconsin",
7
+ "B": "New Hampshire",
8
+ "C": "North Carolina",
9
+ "D": "Florida"
10
+ },
11
+ "answer": "A",
12
+ "original_image_filename": "train_0001181.png",
13
+ "has_embedded_choices": false
14
+ }
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0240/original/question.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset": "ScienceQA",
3
+ "source_id": "train_0001287",
4
+ "question": "What is the capital of Oregon?",
5
+ "choices": {
6
+ "A": "Portland",
7
+ "B": "Salem",
8
+ "C": "Tulsa",
9
+ "D": "Honolulu"
10
+ },
11
+ "answer": "B",
12
+ "original_image_filename": "train_0001287.png",
13
+ "has_embedded_choices": false
14
+ }
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0241/original/question.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset": "ScienceQA",
3
+ "source_id": "train_0001303",
4
+ "question": "Which of these cities is marked on the map?",
5
+ "choices": {
6
+ "A": "Chicago",
7
+ "B": "Cleveland",
8
+ "C": "Indianapolis",
9
+ "D": "St. Louis"
10
+ },
11
+ "answer": "B",
12
+ "original_image_filename": "train_0001303.png",
13
+ "has_embedded_choices": false
14
+ }
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0246/original/question.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset": "ScienceQA",
3
+ "source_id": "train_0001347",
4
+ "question": "Which country is highlighted?",
5
+ "choices": {
6
+ "A": "Tonga",
7
+ "B": "Vanuatu",
8
+ "C": "Solomon Islands",
9
+ "D": "Fiji"
10
+ },
11
+ "answer": "D",
12
+ "original_image_filename": "train_0001347.png",
13
+ "has_embedded_choices": false
14
+ }
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0247/original/question.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset": "ScienceQA",
3
+ "source_id": "train_0001349",
4
+ "question": "What can Damon and Bert trade to each get what they want?",
5
+ "choices": {
6
+ "A": "Bert can trade his broccoli for Damon's oranges.",
7
+ "B": "Damon can trade his tomatoes for Bert's sandwich.",
8
+ "C": "Damon can trade his tomatoes for Bert's broccoli.",
9
+ "D": "Bert can trade his almonds for Damon's tomatoes."
10
+ },
11
+ "answer": "C",
12
+ "original_image_filename": "train_0001349.png",
13
+ "has_embedded_choices": false
14
+ }
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0248/original/question.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset": "ScienceQA",
3
+ "source_id": "train_0001354",
4
+ "question": "Which of these states is farthest north?",
5
+ "choices": {
6
+ "A": "Oregon",
7
+ "B": "Utah",
8
+ "C": "Louisiana",
9
+ "D": "Georgia"
10
+ },
11
+ "answer": "A",
12
+ "original_image_filename": "train_0001354.png",
13
+ "has_embedded_choices": false
14
+ }
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0270/original/question.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset": "ScienceQA",
3
+ "source_id": "train_0001451",
4
+ "question": "What is the name of the colony shown?",
5
+ "choices": {
6
+ "A": "Vermont",
7
+ "B": "Ohio",
8
+ "C": "North Carolina",
9
+ "D": "Massachusetts"
10
+ },
11
+ "answer": "C",
12
+ "original_image_filename": "train_0001451.png",
13
+ "has_embedded_choices": false
14
+ }
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0270/prompt.txt ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ What is the name of the colony shown?
2
+
3
+ A: Vermont
4
+ B: Ohio
5
+ C: North Carolina
6
+ D: Massachusetts
7
+
8
+ Answer: C
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0277/original/question.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset": "ScienceQA",
3
+ "source_id": "train_0001498",
4
+ "question": "What is the name of the colony shown?",
5
+ "choices": {
6
+ "A": "New Hampshire",
7
+ "B": "Alabama",
8
+ "C": "South Carolina",
9
+ "D": "New Jersey"
10
+ },
11
+ "answer": "C",
12
+ "original_image_filename": "train_0001498.png",
13
+ "has_embedded_choices": false
14
+ }
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0278/original/question.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset": "ScienceQA",
3
+ "source_id": "train_0001502",
4
+ "question": "Which of these states is farthest south?",
5
+ "choices": {
6
+ "A": "Delaware",
7
+ "B": "Oregon",
8
+ "C": "Maine",
9
+ "D": "Massachusetts"
10
+ },
11
+ "answer": "A",
12
+ "original_image_filename": "train_0001502.png",
13
+ "has_embedded_choices": false
14
+ }
M-2_scienceqa_data-generator/scienceqa_task/scienceqa_0279/original/question.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset": "ScienceQA",
3
+ "source_id": "train_0001515",
4
+ "question": "Select the mammal below.",
5
+ "choices": {
6
+ "A": "Amazon tree boa",
7
+ "B": "Madagascar day gecko",
8
+ "C": "sugar glider",
9
+ "D": "woodpecker"
10
+ },
11
+ "answer": "C",
12
+ "original_image_filename": "train_0001515.png",
13
+ "has_embedded_choices": false
14
+ }