Datasets:
Add files using upload-large-folder tool
Browse files- batch_1/event_01_gt.yaml +3 -3
- batch_1/event_02_gt.yaml +3 -3
- batch_2/event_01_gt.yaml +3 -3
- batch_2/event_02_gt.yaml +3 -3
- batch_2/event_03_gt.yaml +3 -3
- batch_2/event_04_gt.yaml +3 -3
- batch_2/event_05_gt.yaml +3 -3
- batch_2/event_06_gt.yaml +3 -3
- batch_2/event_07_gt.yaml +3 -3
- batch_2/event_08_gt.yaml +3 -3
- batch_2/event_09_gt.yaml +3 -3
- batch_2/event_10_gt.yaml +3 -3
- batch_3/event_01_gt.yaml +3 -3
- batch_3/event_02_gt.yaml +3 -3
- batch_3/event_03_gt.yaml +3 -3
- batch_3/event_04_gt.yaml +3 -3
- batch_3/event_05_gt.yaml +3 -3
- batch_3/event_06_gt.yaml +3 -3
- batch_4/event_01_gt.yaml +3 -3
- batch_4/event_02_gt.yaml +3 -3
- batch_4/event_03_gt.yaml +3 -3
- batch_4/event_04_gt.yaml +3 -3
- batch_4/event_05_gt.yaml +3 -3
- batch_5/event_01_gt.yaml +3 -3
- batch_5/event_02_gt.yaml +3 -3
- batch_5/event_03_gt.yaml +3 -3
- batch_5/event_04_gt.yaml +3 -3
- batch_5/event_05_gt.yaml +3 -3
- batch_6/event_01_gt.yaml +3 -3
- batch_6/event_02_gt.yaml +3 -3
- batch_6/event_03_gt.yaml +3 -3
- batch_6/event_04_gt.yaml +3 -3
- batch_7/event_01_gt.yaml +3 -3
- batch_7/event_02_gt.yaml +3 -3
- batch_7/event_03_gt.yaml +3 -3
- upload.py +8 -0
batch_1/event_01_gt.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
| 1 |
---
|
| 2 |
-
clip_path: "../
|
| 3 |
-
image_path: ".
|
| 4 |
-
image_odometry_file: ".
|
| 5 |
from_frame: 675
|
| 6 |
to_frame: 943
|
| 7 |
event_description: "The person in the video washed a cup with water and detergent, then rinsed it with water."
|
|
|
|
| 1 |
---
|
| 2 |
+
clip_path: "../videos/clip_01.mp4"
|
| 3 |
+
image_path: "."
|
| 4 |
+
image_odometry_file: "./image_odometry_data.json"
|
| 5 |
from_frame: 675
|
| 6 |
to_frame: 943
|
| 7 |
event_description: "The person in the video washed a cup with water and detergent, then rinsed it with water."
|
batch_1/event_02_gt.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
| 1 |
---
|
| 2 |
-
clip_path: "../
|
| 3 |
-
image_path: ".
|
| 4 |
-
image_odometry_file: ".
|
| 5 |
from_frame: 1881
|
| 6 |
to_frame: 1959
|
| 7 |
event_description: "The person picked up the mug from the counter, placed it in the cabinet, and then closed the cabinet door."
|
|
|
|
| 1 |
---
|
| 2 |
+
clip_path: "../videos/clip_02.mp4"
|
| 3 |
+
image_path: "."
|
| 4 |
+
image_odometry_file: "./image_odometry_data.json"
|
| 5 |
from_frame: 1881
|
| 6 |
to_frame: 1959
|
| 7 |
event_description: "The person picked up the mug from the counter, placed it in the cabinet, and then closed the cabinet door."
|
batch_2/event_01_gt.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
| 1 |
---
|
| 2 |
-
clip_path: "../
|
| 3 |
-
image_path: ".
|
| 4 |
-
image_odometry_file: ".
|
| 5 |
from_frame: 259
|
| 6 |
to_frame: 320
|
| 7 |
event_description: "The person opens the cabinet, takes out a mug and place it on the counter, then closes the cabinet."
|
|
|
|
| 1 |
---
|
| 2 |
+
clip_path: "../videos/clip_03.mp4"
|
| 3 |
+
image_path: "."
|
| 4 |
+
image_odometry_file: "./image_odometry_data.json"
|
| 5 |
from_frame: 259
|
| 6 |
to_frame: 320
|
| 7 |
event_description: "The person opens the cabinet, takes out a mug and place it on the counter, then closes the cabinet."
|
batch_2/event_02_gt.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
| 1 |
---
|
| 2 |
-
clip_path: "../
|
| 3 |
-
image_path: ".
|
| 4 |
-
image_odometry_file: ".
|
| 5 |
from_frame: 370
|
| 6 |
to_frame: 635
|
| 7 |
event_description: "The person fills the kettle with water from the faucet, then turns on the kettle. After a while, they manually turn off the kettle prematurely."
|
|
|
|
| 1 |
---
|
| 2 |
+
clip_path: "../videos/clip_04.mp4"
|
| 3 |
+
image_path: "."
|
| 4 |
+
image_odometry_file: "./image_odometry_data.json"
|
| 5 |
from_frame: 370
|
| 6 |
to_frame: 635
|
| 7 |
event_description: "The person fills the kettle with water from the faucet, then turns on the kettle. After a while, they manually turn off the kettle prematurely."
|
batch_2/event_03_gt.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
| 1 |
---
|
| 2 |
-
clip_path: "../
|
| 3 |
-
image_path: ".
|
| 4 |
-
image_odometry_file: ".
|
| 5 |
from_frame: 663
|
| 6 |
to_frame: 712
|
| 7 |
event_description: "The person fills the mug with water from the kettle."
|
|
|
|
| 1 |
---
|
| 2 |
+
clip_path: "../videos/clip_05.mp4"
|
| 3 |
+
image_path: "."
|
| 4 |
+
image_odometry_file: "./image_odometry_data.json"
|
| 5 |
from_frame: 663
|
| 6 |
to_frame: 712
|
| 7 |
event_description: "The person fills the mug with water from the kettle."
|
batch_2/event_04_gt.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
| 1 |
---
|
| 2 |
-
clip_path: "../
|
| 3 |
-
image_path: ".
|
| 4 |
-
image_odometry_file: ".
|
| 5 |
from_frame: 726
|
| 6 |
to_frame: 795
|
| 7 |
event_description: "The person drinks from a mug, then placed it back on the counter."
|
|
|
|
| 1 |
---
|
| 2 |
+
clip_path: "../videos/clip_06.mp4"
|
| 3 |
+
image_path: "."
|
| 4 |
+
image_odometry_file: "./image_odometry_data.json"
|
| 5 |
from_frame: 726
|
| 6 |
to_frame: 795
|
| 7 |
event_description: "The person drinks from a mug, then placed it back on the counter."
|
batch_2/event_05_gt.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
| 1 |
---
|
| 2 |
-
clip_path: "../
|
| 3 |
-
image_path: ".
|
| 4 |
-
image_odometry_file: ".
|
| 5 |
from_frame: 1640
|
| 6 |
to_frame: 1752
|
| 7 |
event_description: "The person picks up the mug from the counter and places it inside the dishwasher, then closes the dishwasher."
|
|
|
|
| 1 |
---
|
| 2 |
+
clip_path: "../videos/clip_07.mp4"
|
| 3 |
+
image_path: "."
|
| 4 |
+
image_odometry_file: "./image_odometry_data.json"
|
| 5 |
from_frame: 1640
|
| 6 |
to_frame: 1752
|
| 7 |
event_description: "The person picks up the mug from the counter and places it inside the dishwasher, then closes the dishwasher."
|
batch_2/event_06_gt.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
| 1 |
---
|
| 2 |
-
clip_path: "../
|
| 3 |
-
image_path: ".
|
| 4 |
-
image_odometry_file: ".
|
| 5 |
from_frame: 1986
|
| 6 |
to_frame: 2056
|
| 7 |
event_description: "The person picks up the mug from the cabinet and placed it on the counter."
|
|
|
|
| 1 |
---
|
| 2 |
+
clip_path: "../videos/clip_08.mp4"
|
| 3 |
+
image_path: "."
|
| 4 |
+
image_odometry_file: "./image_odometry_data.json"
|
| 5 |
from_frame: 1986
|
| 6 |
to_frame: 2056
|
| 7 |
event_description: "The person picks up the mug from the cabinet and placed it on the counter."
|
batch_2/event_07_gt.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
| 1 |
---
|
| 2 |
-
clip_path: "../
|
| 3 |
-
image_path: ".
|
| 4 |
-
image_odometry_file: ".
|
| 5 |
from_frame: 2481
|
| 6 |
to_frame: 2778
|
| 7 |
event_description: "The person uses the coffee machine to make a coffee, then walks away."
|
|
|
|
| 1 |
---
|
| 2 |
+
clip_path: "../videos/clip_09.mp4"
|
| 3 |
+
image_path: "./"
|
| 4 |
+
image_odometry_file: "./image_odometry_data.json"
|
| 5 |
from_frame: 2481
|
| 6 |
to_frame: 2778
|
| 7 |
event_description: "The person uses the coffee machine to make a coffee, then walks away."
|
batch_2/event_08_gt.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
| 1 |
---
|
| 2 |
-
clip_path: "../
|
| 3 |
-
image_path: ".
|
| 4 |
-
image_odometry_file: ".
|
| 5 |
from_frame: 3044
|
| 6 |
to_frame: 3122
|
| 7 |
event_description: "The person sits down and works on the laptop. They drink from the mug."
|
|
|
|
| 1 |
---
|
| 2 |
+
clip_path: "../videos/clip_10.mp4"
|
| 3 |
+
image_path: "."
|
| 4 |
+
image_odometry_file: "./image_odometry_data.json"
|
| 5 |
from_frame: 3044
|
| 6 |
to_frame: 3122
|
| 7 |
event_description: "The person sits down and works on the laptop. They drink from the mug."
|
batch_2/event_09_gt.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
| 1 |
---
|
| 2 |
-
clip_path: "../
|
| 3 |
-
image_path: ".
|
| 4 |
-
image_odometry_file: ".
|
| 5 |
from_frame: 3637
|
| 6 |
to_frame: 3816
|
| 7 |
event_description: "The person washes a cup with water and dish soap."
|
|
|
|
| 1 |
---
|
| 2 |
+
clip_path: "../videos/clip_11.mp4"
|
| 3 |
+
image_path: "."
|
| 4 |
+
image_odometry_file: "./image_odometry_data.json"
|
| 5 |
from_frame: 3637
|
| 6 |
to_frame: 3816
|
| 7 |
event_description: "The person washes a cup with water and dish soap."
|
batch_2/event_10_gt.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
| 1 |
---
|
| 2 |
-
clip_path: "../
|
| 3 |
-
image_path: ".
|
| 4 |
-
image_odometry_file: ".
|
| 5 |
from_frame: 3896
|
| 6 |
to_frame: 3945
|
| 7 |
event_description: "The person picked up the mug from the counter, placed it in the cabinet, and then closed the cabinet door."
|
|
|
|
| 1 |
---
|
| 2 |
+
clip_path: "../videos/clip_12.mp4"
|
| 3 |
+
image_path: "."
|
| 4 |
+
image_odometry_file: "./image_odometry_data.json"
|
| 5 |
from_frame: 3896
|
| 6 |
to_frame: 3945
|
| 7 |
event_description: "The person picked up the mug from the counter, placed it in the cabinet, and then closed the cabinet door."
|
batch_3/event_01_gt.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
| 1 |
---
|
| 2 |
-
clip_path: "../
|
| 3 |
-
image_path: ".
|
| 4 |
-
image_odometry_file: ".
|
| 5 |
from_frame: 208
|
| 6 |
to_frame: 399
|
| 7 |
event_description: "The person washes a mug with water and dish soap."
|
|
|
|
| 1 |
---
|
| 2 |
+
clip_path: "../videos/clip_13.mp4"
|
| 3 |
+
image_path: "."
|
| 4 |
+
image_odometry_file: "./image_odometry_data.json"
|
| 5 |
from_frame: 208
|
| 6 |
to_frame: 399
|
| 7 |
event_description: "The person washes a mug with water and dish soap."
|
batch_3/event_02_gt.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
| 1 |
---
|
| 2 |
-
clip_path: "../
|
| 3 |
-
image_path: ".
|
| 4 |
-
image_odometry_file: ".
|
| 5 |
from_frame: 424
|
| 6 |
to_frame: 451
|
| 7 |
event_description: "The person opens the cabinet and then closes it."
|
|
|
|
| 1 |
---
|
| 2 |
+
clip_path: "../videos/clip_14.mp4"
|
| 3 |
+
image_path: "."
|
| 4 |
+
image_odometry_file: "./image_odometry_data.json"
|
| 5 |
from_frame: 424
|
| 6 |
to_frame: 451
|
| 7 |
event_description: "The person opens the cabinet and then closes it."
|
batch_3/event_03_gt.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
| 1 |
---
|
| 2 |
-
clip_path: "../
|
| 3 |
-
image_path: ".
|
| 4 |
-
image_odometry_file: ".
|
| 5 |
from_frame: 583
|
| 6 |
to_frame: 653
|
| 7 |
event_description: "The person opens the dishwasher, takes out a mug and then places it on the counter."
|
|
|
|
| 1 |
---
|
| 2 |
+
clip_path: "../videos/clip_15.mp4"
|
| 3 |
+
image_path: "."
|
| 4 |
+
image_odometry_file: "./image_odometry_data.json"
|
| 5 |
from_frame: 583
|
| 6 |
to_frame: 653
|
| 7 |
event_description: "The person opens the dishwasher, takes out a mug and then places it on the counter."
|
batch_3/event_04_gt.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
| 1 |
---
|
| 2 |
-
clip_path: "../
|
| 3 |
-
image_path: ".
|
| 4 |
-
image_odometry_file: ".
|
| 5 |
from_frame: 696
|
| 6 |
to_frame: 757
|
| 7 |
event_description: "The person rinses the mug with water."
|
|
|
|
| 1 |
---
|
| 2 |
+
clip_path: "../videos/clip_16.mp4"
|
| 3 |
+
image_path: "."
|
| 4 |
+
image_odometry_file: "./image_odometry_data.json"
|
| 5 |
from_frame: 696
|
| 6 |
to_frame: 757
|
| 7 |
event_description: "The person rinses the mug with water."
|
batch_3/event_05_gt.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
| 1 |
---
|
| 2 |
-
clip_path: "../
|
| 3 |
-
image_path: ".
|
| 4 |
-
image_odometry_file: ".
|
| 5 |
from_frame: 759
|
| 6 |
to_frame: 826
|
| 7 |
event_description: "The person places the mug into the dishwasher."
|
|
|
|
| 1 |
---
|
| 2 |
+
clip_path: "../videos/clip_17.mp4"
|
| 3 |
+
image_path: "."
|
| 4 |
+
image_odometry_file: "./image_odometry_data.json"
|
| 5 |
from_frame: 759
|
| 6 |
to_frame: 826
|
| 7 |
event_description: "The person places the mug into the dishwasher."
|
batch_3/event_06_gt.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
| 1 |
---
|
| 2 |
-
clip_path: "../
|
| 3 |
-
image_path: ".
|
| 4 |
-
image_odometry_file: ".
|
| 5 |
from_frame: 959
|
| 6 |
to_frame: 1156
|
| 7 |
event_description: "The person places the mug on the coffee machine and makes a coffee."
|
|
|
|
| 1 |
---
|
| 2 |
+
clip_path: "../videos/clip_18.mp4"
|
| 3 |
+
image_path: "."
|
| 4 |
+
image_odometry_file: "./image_odometry_data.json"
|
| 5 |
from_frame: 959
|
| 6 |
to_frame: 1156
|
| 7 |
event_description: "The person places the mug on the coffee machine and makes a coffee."
|
batch_4/event_01_gt.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
| 1 |
---
|
| 2 |
-
clip_path: "../
|
| 3 |
-
image_path: ".
|
| 4 |
-
image_odometry_file: ".
|
| 5 |
from_frame: 371
|
| 6 |
to_frame: 530
|
| 7 |
event_description: "The person works on the laptop. He checks his phone and then puts it into his pocket. He then takes the laptop and the mouse and walks away"
|
|
|
|
| 1 |
---
|
| 2 |
+
clip_path: "../videos/clip_19.mp4"
|
| 3 |
+
image_path: "."
|
| 4 |
+
image_odometry_file: "./image_odometry_data.json"
|
| 5 |
from_frame: 371
|
| 6 |
to_frame: 530
|
| 7 |
event_description: "The person works on the laptop. He checks his phone and then puts it into his pocket. He then takes the laptop and the mouse and walks away"
|
batch_4/event_02_gt.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
| 1 |
---
|
| 2 |
-
clip_path: "../
|
| 3 |
-
image_path: ".
|
| 4 |
-
image_odometry_file: ".
|
| 5 |
from_frame: 1007
|
| 6 |
to_frame: 1172
|
| 7 |
event_description: "The person works on the laptop while taking sips from the mug. He takes out his phone from his pocket and placed it on the table."
|
|
|
|
| 1 |
---
|
| 2 |
+
clip_path: "../videos/clip_20.mp4"
|
| 3 |
+
image_path: "."
|
| 4 |
+
image_odometry_file: "./image_odometry_data.json"
|
| 5 |
from_frame: 1007
|
| 6 |
to_frame: 1172
|
| 7 |
event_description: "The person works on the laptop while taking sips from the mug. He takes out his phone from his pocket and placed it on the table."
|
batch_4/event_03_gt.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
| 1 |
---
|
| 2 |
-
clip_path: "../
|
| 3 |
-
image_path: ".
|
| 4 |
-
image_odometry_file: ".
|
| 5 |
from_frame: 1529
|
| 6 |
to_frame: 1596
|
| 7 |
event_description: "The person refills the water bottle at the faucet, and then drink from it."
|
|
|
|
| 1 |
---
|
| 2 |
+
clip_path: "../videos/clip_21.mp4"
|
| 3 |
+
image_path: "."
|
| 4 |
+
image_odometry_file: "./image_odometry_data.json"
|
| 5 |
from_frame: 1529
|
| 6 |
to_frame: 1596
|
| 7 |
event_description: "The person refills the water bottle at the faucet, and then drink from it."
|
batch_4/event_04_gt.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
| 1 |
---
|
| 2 |
-
clip_path: "../
|
| 3 |
-
image_path: ".
|
| 4 |
-
image_odometry_file: ".
|
| 5 |
from_frame: 1599
|
| 6 |
to_frame: 1657
|
| 7 |
event_description: "The person rinses the mug with water from the faucet."
|
|
|
|
| 1 |
---
|
| 2 |
+
clip_path: "../videos/clip_22.mp4"
|
| 3 |
+
image_path: "."
|
| 4 |
+
image_odometry_file: "./image_odometry_data.json"
|
| 5 |
from_frame: 1599
|
| 6 |
to_frame: 1657
|
| 7 |
event_description: "The person rinses the mug with water from the faucet."
|
batch_4/event_05_gt.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
| 1 |
---
|
| 2 |
-
clip_path: "../
|
| 3 |
-
image_path: ".
|
| 4 |
-
image_odometry_file: ".
|
| 5 |
from_frame: 1873
|
| 6 |
to_frame: 1941
|
| 7 |
event_description: "The person takes the laptop, mouse and mug from the table and walks away. He leaves the phone on the table"
|
|
|
|
| 1 |
---
|
| 2 |
+
clip_path: "../videos/clip_23.mp4"
|
| 3 |
+
image_path: "."
|
| 4 |
+
image_odometry_file: "./image_odometry_data.json"
|
| 5 |
from_frame: 1873
|
| 6 |
to_frame: 1941
|
| 7 |
event_description: "The person takes the laptop, mouse and mug from the table and walks away. He leaves the phone on the table"
|
batch_5/event_01_gt.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
| 1 |
---
|
| 2 |
-
clip_path: "../
|
| 3 |
-
image_path: ".
|
| 4 |
-
image_odometry_file: ".
|
| 5 |
from_frame: 258
|
| 6 |
to_frame: 412
|
| 7 |
event_description: "The person is cleaning the mug with water and detergent"
|
|
|
|
| 1 |
---
|
| 2 |
+
clip_path: "../videos/clip_24.mp4"
|
| 3 |
+
image_path: "."
|
| 4 |
+
image_odometry_file: "./image_odometry_data.json"
|
| 5 |
from_frame: 258
|
| 6 |
to_frame: 412
|
| 7 |
event_description: "The person is cleaning the mug with water and detergent"
|
batch_5/event_02_gt.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
| 1 |
---
|
| 2 |
-
clip_path: "../
|
| 3 |
-
image_path: ".
|
| 4 |
-
image_odometry_file: ".
|
| 5 |
from_frame: 431
|
| 6 |
to_frame: 480
|
| 7 |
event_description: "The person opened the cabinet, picked up a mug and placed it on the counter."
|
|
|
|
| 1 |
---
|
| 2 |
+
clip_path: "../videos/clip_25.mp4"
|
| 3 |
+
image_path: "."
|
| 4 |
+
image_odometry_file: "./image_odometry_data.json"
|
| 5 |
from_frame: 431
|
| 6 |
to_frame: 480
|
| 7 |
event_description: "The person opened the cabinet, picked up a mug and placed it on the counter."
|
batch_5/event_03_gt.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
| 1 |
---
|
| 2 |
-
clip_path: "../
|
| 3 |
-
image_path: ".
|
| 4 |
-
image_odometry_file: ".
|
| 5 |
from_frame: 555
|
| 6 |
to_frame: 603
|
| 7 |
event_description: "The person fills the mug with water from the faucet then placed it back on the counter."
|
|
|
|
| 1 |
---
|
| 2 |
+
clip_path: "../videos/clip_26.mp4"
|
| 3 |
+
image_path: "."
|
| 4 |
+
image_odometry_file: "./image_odometry_data.json"
|
| 5 |
from_frame: 555
|
| 6 |
to_frame: 603
|
| 7 |
event_description: "The person fills the mug with water from the faucet then placed it back on the counter."
|
batch_5/event_04_gt.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
| 1 |
---
|
| 2 |
-
clip_path: "../
|
| 3 |
-
image_path: ".
|
| 4 |
-
image_odometry_file: ".
|
| 5 |
from_frame: 805
|
| 6 |
to_frame: 1028
|
| 7 |
event_description: "The person made a coffee with the coffee machine."
|
|
|
|
| 1 |
---
|
| 2 |
+
clip_path: "../videos/clip_27.mp4"
|
| 3 |
+
image_path: "."
|
| 4 |
+
image_odometry_file: "./image_odometry_data.json"
|
| 5 |
from_frame: 805
|
| 6 |
to_frame: 1028
|
| 7 |
event_description: "The person made a coffee with the coffee machine."
|
batch_5/event_05_gt.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
| 1 |
---
|
| 2 |
-
clip_path: "../
|
| 3 |
-
image_path: ".
|
| 4 |
-
image_odometry_file: ".
|
| 5 |
from_frame: 1256
|
| 6 |
to_frame: 1268
|
| 7 |
event_description: "The person picked up the mug from the counter and walked away."
|
|
|
|
| 1 |
---
|
| 2 |
+
clip_path: "../videos/clip_28.mp4"
|
| 3 |
+
image_path: "."
|
| 4 |
+
image_odometry_file: "./image_odometry_data.json"
|
| 5 |
from_frame: 1256
|
| 6 |
to_frame: 1268
|
| 7 |
event_description: "The person picked up the mug from the counter and walked away."
|
batch_6/event_01_gt.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
| 1 |
---
|
| 2 |
-
clip_path: "../
|
| 3 |
-
image_path: ".
|
| 4 |
-
image_odometry_file: ".
|
| 5 |
from_frame: 419
|
| 6 |
to_frame: 581
|
| 7 |
event_description: "The person sat down and worked on their laptop, occasionally taking sips from two cups."
|
|
|
|
| 1 |
---
|
| 2 |
+
clip_path: "../videos/clip_29.mp4"
|
| 3 |
+
image_path: "."
|
| 4 |
+
image_odometry_file: "./image_odometry_data.json"
|
| 5 |
from_frame: 419
|
| 6 |
to_frame: 581
|
| 7 |
event_description: "The person sat down and worked on their laptop, occasionally taking sips from two cups."
|
batch_6/event_02_gt.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
| 1 |
---
|
| 2 |
-
clip_path: "../
|
| 3 |
-
image_path: ".
|
| 4 |
-
image_odometry_file: ".
|
| 5 |
from_frame: 654
|
| 6 |
to_frame: 681
|
| 7 |
event_description: "The person picked up the mug from the table and walked away."
|
|
|
|
| 1 |
---
|
| 2 |
+
clip_path: "../videos/clip_30.mp4"
|
| 3 |
+
image_path: "."
|
| 4 |
+
image_odometry_file: "./image_odometry_data.json"
|
| 5 |
from_frame: 654
|
| 6 |
to_frame: 681
|
| 7 |
event_description: "The person picked up the mug from the table and walked away."
|
batch_6/event_03_gt.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
| 1 |
---
|
| 2 |
-
clip_path: "../
|
| 3 |
-
image_path: ".
|
| 4 |
-
image_odometry_file: ".
|
| 5 |
from_frame: 834
|
| 6 |
to_frame: 894
|
| 7 |
event_description: "The person picked up the mug, the laptop and the mouse and walked away."
|
|
|
|
| 1 |
---
|
| 2 |
+
clip_path: "../videos/clip_31.mp4"
|
| 3 |
+
image_path: "."
|
| 4 |
+
image_odometry_file: "./image_odometry_data.json"
|
| 5 |
from_frame: 834
|
| 6 |
to_frame: 894
|
| 7 |
event_description: "The person picked up the mug, the laptop and the mouse and walked away."
|
batch_6/event_04_gt.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
| 1 |
---
|
| 2 |
-
clip_path: "../
|
| 3 |
-
image_path: ".
|
| 4 |
-
image_odometry_file: ".
|
| 5 |
from_frame: 1414
|
| 6 |
to_frame: 1608
|
| 7 |
event_description: "The person worked on the laptop, occasionally took sips from the two mugs. He pulls out his phone from his jacket and checked it, then put it back into his jacket."
|
|
|
|
| 1 |
---
|
| 2 |
+
clip_path: "../videos/clip_32.mp4"
|
| 3 |
+
image_path: "."
|
| 4 |
+
image_odometry_file: "./image_odometry_data.json"
|
| 5 |
from_frame: 1414
|
| 6 |
to_frame: 1608
|
| 7 |
event_description: "The person worked on the laptop, occasionally took sips from the two mugs. He pulls out his phone from his jacket and checked it, then put it back into his jacket."
|
batch_7/event_01_gt.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
| 1 |
---
|
| 2 |
-
clip_path: "../
|
| 3 |
-
image_path: ".
|
| 4 |
-
image_odometry_file: ".
|
| 5 |
from_frame: 238
|
| 6 |
to_frame: 262
|
| 7 |
event_description: "The person stood up, picked up the two mugs and walked away."
|
|
|
|
| 1 |
---
|
| 2 |
+
clip_path: "../videos/clip_33.mp4"
|
| 3 |
+
image_path: "."
|
| 4 |
+
image_odometry_file: "./image_odometry_data.json"
|
| 5 |
from_frame: 238
|
| 6 |
to_frame: 262
|
| 7 |
event_description: "The person stood up, picked up the two mugs and walked away."
|
batch_7/event_02_gt.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
| 1 |
---
|
| 2 |
-
clip_path: "../
|
| 3 |
-
image_path: ".
|
| 4 |
-
image_odometry_file: ".
|
| 5 |
from_frame: 406
|
| 6 |
to_frame: 667
|
| 7 |
event_description: "The person washes two mugs with water and dish soap."
|
|
|
|
| 1 |
---
|
| 2 |
+
clip_path: "../videos/clip_34.mp4"
|
| 3 |
+
image_path: "."
|
| 4 |
+
image_odometry_file: "./image_odometry_data.json"
|
| 5 |
from_frame: 406
|
| 6 |
to_frame: 667
|
| 7 |
event_description: "The person washes two mugs with water and dish soap."
|
batch_7/event_03_gt.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
| 1 |
---
|
| 2 |
-
clip_path: "../
|
| 3 |
-
image_path: ".
|
| 4 |
-
image_odometry_file: ".
|
| 5 |
from_frame: 674
|
| 6 |
to_frame: 750
|
| 7 |
event_description: "The person picked up one mug from the counter, then put it into the cabinet. Then they picked up the other mug and walked away."
|
|
|
|
| 1 |
---
|
| 2 |
+
clip_path: "../videos/clip_35.mp4"
|
| 3 |
+
image_path: "."
|
| 4 |
+
image_odometry_file: "./image_odometry_data.json"
|
| 5 |
from_frame: 674
|
| 6 |
to_frame: 750
|
| 7 |
event_description: "The person picked up one mug from the counter, then put it into the cabinet. Then they picked up the other mug and walked away."
|
upload.py
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from huggingface_hub import HfApi, login
|
| 2 |
+
|
| 3 |
+
login()
|
| 4 |
+
|
| 5 |
+
api = HfApi()
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
api.upload_large_folder(folder_path=".", repo_id="phuoc101/EGG", repo_type="dataset")
|